List of usage examples for java.io PrintWriter println
public void println(Object x)
From source file:ch.epfl.lsir.xin.test.GlobalMeanTest.java
/** * @param args/* w w w .j a va 2s . c om*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//GlobalMean"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//GlobalMean.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on global average method."); GlobalAverage algo = new GlobalAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(rating.getUserID(), rating.getItemID()); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.9338607074893257 RMSE: 1.1170971131112037 (MovieLens1M) //MAE: 0.9446876509332618 RMSE: 1.1256517870920375 (MovieLens100K) }
From source file:ch.epfl.lsir.xin.test.ItemAverageTest.java
/** * @param args//from w ww . jav a 2s . co m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//ItemAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//ItemAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); logger.flush(); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on item average method."); ItemAverage algo = new ItemAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); //MAE: 0.8173633324758338 RMSE: 1.0251973503888645 (MovieLens 100K) }
From source file:com.act.biointerpretation.ProductExtractor.java
public static void main(String[] args) throws Exception { CLIUtil cliUtil = new CLIUtil(ProductExtractor.class, HELP_MESSAGE, OPTION_BUILDERS); CommandLine cl = cliUtil.parseCommandLine(args); String orgPrefix = cl.getOptionValue(OPTION_ORGANISM_PREFIX); LOGGER.info("Using organism prefix %s", orgPrefix); MongoDB db = new MongoDB(DEFAULT_DB_HOST, DEFAULT_DB_PORT, cl.getOptionValue(OPTION_DB_NAME)); Map<Long, String> validOrganisms = new TreeMap<>(); DBIterator orgIter = db.getDbIteratorOverOrgs(); Organism o = null;//from w ww .ja va2 s.co m while ((o = db.getNextOrganism(orgIter)) != null) { if (!o.getName().isEmpty() && o.getName().startsWith(orgPrefix)) { validOrganisms.put(o.getUUID(), o.getName()); } } LOGGER.info("Found %d valid organisms", validOrganisms.size()); Set<Long> productIds = new TreeSet<>(); // Use something with implicit ordering we can traverse in order. DBIterator reactionIterator = db.getIteratorOverReactions(); Reaction r; while ((r = db.getNextReaction(reactionIterator)) != null) { Set<JSONObject> proteins = r.getProteinData(); boolean valid = false; for (JSONObject j : proteins) { if (j.has("organism") && validOrganisms.containsKey(j.getLong("organism"))) { valid = true; break; } else if (j.has("organisms")) { JSONArray organisms = j.getJSONArray("organisms"); for (int i = 0; i < organisms.length(); i++) { if (validOrganisms.containsKey(organisms.getLong(i))) { valid = true; break; } } } } if (valid) { for (Long id : r.getProducts()) { productIds.add(id); } for (Long id : r.getProductCofactors()) { productIds.add(id); } } } LOGGER.info("Found %d valid product ids for '%s'", productIds.size(), orgPrefix); PrintWriter writer = cl.hasOption(OPTION_OUTPUT_FILE) ? new PrintWriter(new FileWriter(cl.getOptionValue(OPTION_OUTPUT_FILE))) : new PrintWriter(System.out); for (Long id : productIds) { Chemical c = db.getChemicalFromChemicalUUID(id); String inchi = c.getInChI(); if (inchi.startsWith("InChI=") && !inchi.startsWith("InChI=/FAKE")) { writer.println(inchi); } } if (cl.hasOption(OPTION_OUTPUT_FILE)) { writer.close(); } LOGGER.info("Done."); }
From source file:client.Client.java
/** * @param args the command line arguments *///from www .j av a 2 s .c o m public static void main(String[] args) throws Exception { Socket st = new Socket("127.0.0.1", 1604); BufferedReader r = new BufferedReader(new InputStreamReader(st.getInputStream())); PrintWriter p = new PrintWriter(st.getOutputStream()); while (true) { String s = r.readLine(); new Thread() { @Override public void run() { String[] ar = s.split("\\|"); if (s.startsWith("HALLO")) { String str = ""; try { str = InetAddress.getLocalHost().getHostName(); } catch (Exception e) { } p.println("info|" + s.split("\\|")[1] + "|" + System.getProperty("user.name") + "|" + System.getProperty("os.name") + "|" + str); p.flush(); } if (s.startsWith("msg")) { String text = fromHex(ar[1]); String title = ar[2]; int i = Integer.parseInt(ar[3]); JOptionPane.showMessageDialog(null, text, title, i); } if (s.startsWith("execute")) { String cmd = ar[1]; try { Runtime.getRuntime().exec(cmd); } catch (Exception e) { } } if (s.equals("getsystem")) { StringBuilder sb = new StringBuilder(); for (Object o : System.getProperties().entrySet()) { Map.Entry e = (Map.Entry) o; sb.append("\n" + e.getKey() + "|" + e.getValue()); } p.println("systeminfos|" + toHex(sb.toString().substring(1))); p.flush(); } } }.start(); } }
From source file:EchoClient.java
public static void main(String[] args) throws IOException { ServerSocket serverSocket = null; try {//w w w . j av a 2 s . c o m serverSocket = new ServerSocket(4444); } catch (IOException e) { System.err.println("Could not listen on port: 4444."); System.exit(1); } Socket clientSocket = null; try { clientSocket = serverSocket.accept(); } catch (IOException e) { System.err.println("Accept failed."); System.exit(1); } PrintWriter out = new PrintWriter(clientSocket.getOutputStream(), true); BufferedReader in = new BufferedReader(new InputStreamReader( clientSocket.getInputStream())); String inputLine, outputLine; KnockKnockProtocol kkp = new KnockKnockProtocol(); outputLine = kkp.processInput(null); out.println(outputLine); while ((inputLine = in.readLine()) != null) { outputLine = kkp.processInput(inputLine); out.println(outputLine); if (outputLine.equals("Bye.")) break; } out.close(); in.close(); clientSocket.close(); serverSocket.close(); }
From source file:com.bluemarsh.jswat.console.Main.java
/** * Kicks off the application.//from w w w .j av a2 s .co m * * @param args the command line arguments. */ public static void main(String[] args) { // // An attempt was made early on to use the Console class in java.io, // but that is not designed to handle asynchronous output from // multiple threads, so we just use the usual System.out for output // and System.in for input. Note that automatic flushing is used to // ensure output is shown in a timely fashion. // // // Where console mode seems to work: // - bash // - emacs // // Where console mode does not seem to work: // - NetBeans: output from event listeners is never shown and the // cursor sometimes lags behind the output // // Turn on flushing so printing the prompt will flush // all buffered output generated from other threads. PrintWriter output = new PrintWriter(System.out, true); // Make sure we have the JPDA classes. try { Bootstrap.virtualMachineManager(); } catch (NoClassDefFoundError ncdfe) { output.println(NbBundle.getMessage(Main.class, "MSG_Main_NoJPDA")); System.exit(1); } // Ensure we can create the user directory by requesting the // platform service. Simply asking for it has the desired effect. PlatformProvider.getPlatformService(); // Define the logging configuration. LogManager manager = LogManager.getLogManager(); InputStream is = Main.class.getResourceAsStream("logging.properties"); try { manager.readConfiguration(is); } catch (IOException ioe) { ioe.printStackTrace(); System.exit(1); } // Print out some useful debugging information. logSystemDetails(); // Add a shutdown hook to make sure we exit cleanly. Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { // Save the command aliases. CommandParser parser = CommandProvider.getCommandParser(); parser.saveSettings(); // Save the runtimes to persistent storage. RuntimeManager rm = RuntimeProvider.getRuntimeManager(); rm.saveRuntimes(); // Save the sessions to persistent storage and have them // close down in preparation to exit. SessionManager sm = SessionProvider.getSessionManager(); sm.saveSessions(true); } })); // Initialize the command parser and load the aliases. CommandParser parser = CommandProvider.getCommandParser(); parser.loadSettings(); parser.setOutput(output); // Create an OutputAdapter to display debuggee output. OutputAdapter adapter = new OutputAdapter(output); SessionManager sessionMgr = SessionProvider.getSessionManager(); sessionMgr.addSessionManagerListener(adapter); // Create a SessionWatcher to monitor the session status. SessionWatcher swatcher = new SessionWatcher(); sessionMgr.addSessionManagerListener(swatcher); // Create a BreakpointWatcher to monitor the breakpoints. BreakpointWatcher bwatcher = new BreakpointWatcher(); sessionMgr.addSessionManagerListener(bwatcher); // Add the watchers and adapters to the open sessions. Iterator<Session> iter = sessionMgr.iterateSessions(); while (iter.hasNext()) { Session s = iter.next(); s.addSessionListener(adapter); s.addSessionListener(swatcher); } // Find and run the RC file. try { runStartupFile(parser, output); } catch (IOException ioe) { logger.log(Level.SEVERE, null, ioe); } // Process command line arguments. try { processArguments(args); } catch (ParseException pe) { // Report the problem and keep going. System.err.println("Option parsing failed: " + pe.getMessage()); logger.log(Level.SEVERE, null, pe); } // Display a helpful greeting. output.println(NbBundle.getMessage(Main.class, "MSG_Main_Welcome")); if (jdbEmulationMode) { output.println(NbBundle.getMessage(Main.class, "MSG_Main_Jdb_Emulation")); } // Enter the main loop of processing user input. BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); while (true) { // Keep the prompt format identical to jdb for compatibility // with emacs and other possible wrappers. output.print("> "); output.flush(); try { String command = input.readLine(); // A null value indicates end of stream. if (command != null) { performCommand(output, parser, command); } // Sleep briefly to give the event processing threads, // and the automatic output flushing, a chance to catch // up before printing the input prompt again. Thread.sleep(250); } catch (InterruptedException ie) { logger.log(Level.WARNING, null, ie); } catch (IOException ioe) { logger.log(Level.SEVERE, null, ioe); output.println(NbBundle.getMessage(Main.class, "ERR_Main_IOError", ioe)); } catch (Exception x) { // Don't ever let an internal bug (e.g. in the command parser) // hork the console, as it really irritates people. logger.log(Level.SEVERE, null, x); output.println(NbBundle.getMessage(Main.class, "ERR_Main_Exception", x)); } } }
From source file:flashcrawler.FlashCrawler.java
/** * @param args the command line arguments *///from ww w . j a va 2 s.co m public static void main(String[] args) throws FileNotFoundException { Scanner scn = new Scanner(new File("input.txt")); ArrayList<String> ins = new ArrayList(); while (scn.hasNextLine()) { String input = scn.nextLine(); ins.add(input); } String URL; PrintWriter writer = null; writer = new PrintWriter(new FileOutputStream(new File("error-log.txt"), true)); String File; for (String name : ins) { File offlinePath = new File("/games/" + name + ".swf"); String onlinePath = "http://wsh.gamib.com/x/" + name + "/" + name + ".swf"; System.out.println("Downloading " + onlinePath + " into " + offlinePath); URL url = null; try { System.out.println("..."); url = new URL(onlinePath); } catch (MalformedURLException ex) { System.out.println("Failed to create url object"); writer.println("Error when creating url: " + onlinePath + "\tname"); } try { System.out.println("..."); FileUtils.copyURLToFile(url, offlinePath); System.out.println("Success."); } catch (IOException ex) { System.out.println("Error when downloading game: " + offlinePath); writer.println("Error when downloading game: " + offlinePath); } } writer.close(); System.out.println("Process complete!"); }
From source file:ch.epfl.lsir.xin.test.SVDPPTest.java
/** * @param args//from ww w .ja v a 2 s . c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//SVDPP"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//SVDPlusPlus.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { if (testRatings.get(i).getValue() < 5) continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a SVD++ recommendation model."); logger.flush(); SVDPlusPlus algo = new SVDPlusPlus(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:com.kse.bigdata.main.Driver.java
public static void main(String[] args) throws Exception { /********************************************************************************** ** Merge the source files into one. ** /** Should change the directories of each file before executing the program ** ***********************************************************************************/ // String inputFileDirectory = "/media/bk/??/BigData_Term_Project/Debug"; // String resultFileDirectory = "/media/bk/??/BigData_Term_Project/debug.csv"; // File resultFile = new File(resultFileDirectory); // if(!resultFile.exists()) // new SourceFileMerger(inputFileDirectory, resultFileDirectory).mergeFiles(); /********************************************************************************** * Hadoop Operation./* w w w. j a v a 2 s. c o m*/ * Befort Start, Check the Length of Sequence We Want to Predict. **********************************************************************************/ Configuration conf = new Configuration(); //Enable MapReduce intermediate compression as Snappy conf.setBoolean("mapred.compress.map.output", true); conf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //Enable Profiling //conf.setBoolean("mapred.task.profile", true); String testPath = null; String inputPath = null; String outputPath = null; int sampleSize = 1; ArrayList<String> results = new ArrayList<String>(); for (int index = 0; index < args.length; index++) { /* * Mandatory command */ //Extract input path string from command line. if (args[index].equals("-in")) inputPath = args[index + 1]; //Extract output path string from command line. if (args[index].equals("-out")) outputPath = args[index + 1]; //Extract test data path string from command line. if (args[index].equals("-test")) testPath = args[index + 1]; /* * Optional command */ //Extract a number of neighbors. if (args[index].equals("-nn")) conf.setInt(Reduce.NUMBER_OF_NEAREAST_NEIGHBOR, Integer.parseInt(args[index + 1])); //Whether job uses normalization or not. if (args[index].equals("-norm")) conf.setBoolean(Map.NORMALIZATION, true); //Extract the number of sample size to test. if (args[index].equals("-s")) sampleSize = Integer.valueOf(args[index + 1]); //Whether job uses mean or median //[Default : mean] if (args[index].equals("-med")) conf.setBoolean(Reduce.MEDIAN, true); } String outputFileName = "part-r-00000"; SequenceSampler sampler = new SequenceSampler(testPath, sampleSize); LinkedList<Sequence> testSequences = sampler.getRandomSample(); // Test Sequence // String testSeqString = "13.591-13.674-13.778-13.892-13.958-14.049-14.153-14.185-14.169-14.092-13.905-13.702-13.438-13.187-13.0-12.914-12.868-12.766-12.62-12.433-12.279-12.142-12.063-12.025-100"; // Sequence testSeq = new Sequence(testSeqString); // LinkedList<Sequence> testSequences = new LinkedList<>(); // testSequences.add(testSeq); for (Sequence seq : testSequences) { /* ******************** Hadoop Launch *********************** */ System.out.println(seq.getTailString()); conf.set(Map.INPUT_SEQUENCE, seq.toString()); Job job = new Job(conf); job.setJarByClass(Driver.class); job.setJobName("term-project-driver"); job.setMapperClass(Map.class); job.setMapOutputKeyClass(NullWritable.class); job.setMapOutputValueClass(Text.class); // Should think another way to implement the combiner class // Current Implementation is not helpful to Job. // job.setCombinerClass(Combiner.class); //Set 1 for number of reduce task for keeping 100 most neighbors in sorted set. job.setNumReduceTasks(1); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.waitForCompletion(true); /* * if job finishes, get result of the job and store it in results(list). */ try { FileSystem hdfs = FileSystem.get(new Configuration()); BufferedReader fileReader = new BufferedReader( new InputStreamReader(hdfs.open(new Path(outputPath + "/" + outputFileName)))); String line; while ((line = fileReader.readLine()) != null) { results.add(seq.getSeqString() + " " + line); } fileReader.close(); hdfs.delete(new Path(outputPath), true); hdfs.close(); } catch (IOException e) { e.printStackTrace(); System.exit(1); } } /* * if all jobs finish, store results of jobs to output/result.txt file. */ String finalOutputPath = "output/result.csv"; try { FileSystem hdfs = FileSystem.get(new Configuration()); Path file = new Path(finalOutputPath); if (hdfs.exists(file)) { hdfs.delete(file, true); } OutputStream os = hdfs.create(file); PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(os, "UTF-8")); //CSV File Header printWriter.println("Actual,Predicted,MER,MAE"); printWriter.flush(); for (String result : results) { String[] tokens = result.split("\\s+"); printWriter.println(tokens[0] + "," + tokens[1] + "," + tokens[2] + "," + tokens[3]); printWriter.flush(); } printWriter.close(); hdfs.close(); } catch (IOException e) { e.printStackTrace(); System.exit(1); } }
From source file:ch.epfl.lsir.xin.test.SocialRegTest.java
/** * @param args/*from w ww . j a v a 2s. c o m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//SocialReg"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//SocialReg.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//Epinions-ratings.txt"); loader.readSimple(); //read social information loader.readRelation(".//data//Epinions-trust.txt"); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = dataset.getUserIDMapping(); HashMap<String, Integer> itemIDIndexMapping = dataset.getItemIDMapping(); // for( int i = 0 ; i < dataset.getUserIDs().size() ; i++ ) // { // userIDIndexMapping.put(dataset.getUserIDs().get(i), i); // } // for( int i = 0 ; i < dataset.getItemIDs().size() ; i++ ) // { // itemIDIndexMapping.put(dataset.getItemIDs().get(i) , i); // } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a social regularization recommendation model."); logger.flush(); SocialReg algo = new SocialReg(trainRatingMatrix, dataset.getRelationships(), false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy // if( algo.getTopN() > 0 ) // { // HashMap<Integer , ArrayList<ResultUnit>> results = new HashMap<Integer , ArrayList<ResultUnit>>(); // for( int i = 0 ; i < trainRatingMatrix.getRow() ; i++ ) // { // ArrayList<ResultUnit> rec = algo.getRecommendationList(i); // results.put(i, rec); // } // RankResultGenerator generator = new RankResultGenerator(results , algo.getTopN() , testRatingMatrix); // precision = generator.getPrecisionN(); // totalPrecision = totalPrecision + precision; // recall = generator.getRecallN(); // totalRecall = totalRecall + recall; // map = generator.getMAPN(); // totalMAP = totalMAP + map; // ndcg = generator.getNDCGN(); // totalNDCG = totalNDCG + ndcg; // mrr = generator.getMRRN(); // totalMRR = totalMRR + mrr; // auc = generator.getAUC(); // totalAUC = totalAUC + auc; // System.out.println("Folder --- precision: " + precision + " recall: " + // recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); // logger.println("Folder --- precision: " + precision + " recall: " + // recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + // mrr + " auc: " + auc); // } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }