List of usage examples for java.util HashMap put
public V put(K key, V value)
From source file:ch.epfl.lsir.xin.test.BiasedMFTest.java
/** * @param args/*from w w w. j a va2s . co m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//BiasedMF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//biasedMF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a biased matrix factorization recommendation model."); logger.flush(); BiasedMF algo = new BiasedMF(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:edu.illinois.cs.cogcomp.nlp.tokenizer.HashCollisionReport.java
/** * Read each test file in the directory, tokenize and create the token view. Then check for * collisions.//from w w w. j av a 2s . co m * @param args * @throws IOException */ public static void main(String[] args) throws IOException { if (args.length == 0) error("Must pass in the name of a directory with files to test against."); File dir = new File(args[0]); if (!dir.exists()) { error("The directory did not exist : " + dir); } if (!dir.isDirectory()) { error("The path was not a directory : " + dir); } File[] files = dir.listFiles(); for (File file : files) { if (file.isFile()) { String normal = FileUtils.readFileToString(file); TextAnnotationBuilder tabldr = new TokenizerTextAnnotationBuilder(new StatefulTokenizer()); TextAnnotation taNormal = tabldr.createTextAnnotation("test", "normal", normal); List<Constituent> normalToks = taNormal.getView(ViewNames.TOKENS).getConstituents(); HashMap<Integer, Constituent> hashmap = new HashMap<>(); // add each constituent to the map keyed by it's hashcode. Check first to see if the hashcode // is already used, if it is report it. for (Constituent c : normalToks) { int code = c.hashCode(); if (hashmap.containsKey(code)) { Constituent dup = hashmap.get(code); System.err.println(c + " == " + dup); } else { hashmap.put(code, c); } } } } }
From source file:ch.epfl.lsir.xin.test.UserBasedCFTest.java
/** * @param args//w w w.j a v a2s .c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//UserBasedCF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//UserBasedCF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); trainRatingMatrix.calculateUsersMean(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } logger.println("Initialize a user based collaborative filtering recommendation model."); UserBasedCF algo = new UserBasedCF(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build();//if read local model, no need to build the model algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < testRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); // for( Map.Entry<Integer, Double> entry : testRatingMatrix.getRatingMatrix().get(i).entrySet() ) // { // System.out.print( entry.getKey() + "(" + entry.getValue() + ") , "); // } // System.out.println(); // for( int j = 0 ; j < rec.size() ; j++ ) // { // System.out.print(rec.get(j).getItemIndex() + "(" + rec.get(j).getPrediciton() + // ") , "); // } // System.out.println("**********"); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); // MovieLens100k //MAE: 0.7343907480119425 RMSE: 0.9405808357192891 (MovieLens 100K, shrinkage 25, neighbor size 60, PCC) //MAE: 0.7522376630596646 RMSE: 0.9520931265724659 (MovieLens 100K, no shrinkage , neighbor size 40, COSINE) logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:ch.epfl.lsir.xin.test.MFTest.java
/** * @param args/*from www . java 2 s . c o m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//MF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//MF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a matrix factorization based recommendation model."); logger.flush(); MatrixFactorization algo = new MatrixFactorization(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); // for( Map.Entry<Integer, Double> entry : testRatingMatrix.getRatingMatrix().get(i).entrySet() ) // { // System.out.print( entry.getKey() + "(" + entry.getValue() + ") , "); // } // System.out.println(); // for( int j = 0 ; j < rec.size() ; j++ ) // { // System.out.print(rec.get(j).getItemIndex() + "(" + rec.get(j).getPrediciton() + // ") , "); // } // System.out.println("**********"); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:com.git.ifly6.components.Census.java
public static void main(String[] args) { Scanner scan = new Scanner(System.in); try {//from ww w .ja v a 2s. c om region = new NSRegion(args[0]); } catch (ArrayIndexOutOfBoundsException e) { System.out.print("Please input the name of your region: \t"); region = new NSRegion(scan.nextLine()); } try { HashMap<String, Integer> endoMap = new HashMap<String, Integer>(); String[] waMembers = region.getWAMembers(); int[] valueCount = new int[waMembers.length]; System.out.println( "[INFO] This census will take: " + time((int) Math.round(waitTime * waMembers.length))); for (int i = 0; i < waMembers.length; i++) { NSNation nation = new NSNation(waMembers[i]); valueCount[i] = nation.getEndoCount(); endoMap.put(waMembers[i], new Integer(valueCount[i])); System.out.println("[LOG] Fetched information for: " + waMembers[i] + ", " + (i + 1) + " of " + waMembers.length); } TreeMap<String, Integer> sortedMap = sortByValue(endoMap); int current = 0; int previous = sortedMap.firstEntry().getValue(); System.out.printf("%-35s %12s %12s%n", "Nations", "Endorsements", "Difference"); System.out.println("-------------------------------------------------------------"); for (Map.Entry<String, Integer> entry : sortedMap.entrySet()) { String nationName = StringUtils.capitalize(entry.getKey().replace('_', ' ')); current = entry.getValue(); if ((previous - current) != 0) { System.out.printf("%-35s %12s %12s%n", nationName, entry.getValue(), (previous - current)); } else { System.out.printf("%-35s %12s %12s%n", nationName, entry.getValue(), "-"); } previous = entry.getValue(); } System.out.println("-------------------------------------------------------------"); System.out.printf("%-35s %12s %12s%n", "Delegate", "Endorsements", "Proportion"); System.out.printf("%-35s %12s %12s%n", StringUtils.capitalize(sortedMap.firstEntry().getKey().replace('_', ' ')), sortedMap.firstEntry().getValue(), (double) (sortedMap.firstEntry().getValue() / waMembers.length)); } catch (IOException e) { printError("Failed to fetch WA members or get endorsements in this region. " + "Check your internet connection or the state of the API."); } scan.close(); }
From source file:edu.cmu.lti.oaqa.annographix.apps.SolrSimpleIndexApp.java
public static void main(String[] args) { Options options = new Options(); options.addOption("i", null, true, "Input File"); options.addOption("u", null, true, "Solr URI"); options.addOption("n", null, true, "Batch size"); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); try {//from ww w .j a va 2 s .c om CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("i")) { inputFile = cmd.getOptionValue("i"); } else { Usage("Specify Input File"); } if (cmd.hasOption("u")) { solrURI = cmd.getOptionValue("u"); } else { Usage("Specify Solr URI"); } if (cmd.hasOption("n")) { batchQty = Integer.parseInt(cmd.getOptionValue("n")); } SolrServerWrapper solrServer = new SolrServerWrapper(solrURI); BufferedReader inpText = new BufferedReader( new InputStreamReader(CompressUtils.createInputStream(inputFile))); XmlHelper xmlHlp = new XmlHelper(); String docText = XmlHelper.readNextXMLIndexEntry(inpText); for (int docNum = 1; docText != null; ++docNum, docText = XmlHelper.readNextXMLIndexEntry(inpText)) { // 1. Read document text Map<String, String> docFields = null; HashMap<String, Object> objDocFields = new HashMap<String, Object>(); try { docFields = xmlHlp.parseXMLIndexEntry(docText); } catch (SAXException e) { System.err.println("Parsing error, offending DOC:" + NL + docText); throw new Exception("Parsing error."); } for (Map.Entry<String, String> e : docFields.entrySet()) { //System.out.println(e.getKey() + " " + e.getValue()); objDocFields.put(e.getKey(), e.getValue()); } solrServer.indexDocument(objDocFields); if ((docNum - 1) % batchQty == 0) solrServer.indexCommit(); } solrServer.indexCommit(); } catch (ParseException e) { Usage("Cannot parse arguments"); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:edu.pitt.dbmi.facebase.hd.HumanDataController.java
/** Entry point to application. Firstly it gathers properties from hd.properties file on CLASSPATH * Most properties are localisms to do with file path information, database connection strings, * and TrueCrypt program operation. Hopefully the application can run by only editing entries in * hd.properties file. hd.properties has normal java properties fragility but also app-specific * fragility--for example, the sshServerUrl must end with a colon, as in root@server: * Notice that program properties can also be set below--by filling in the empty strings following * declaration AND commenting-out the try-catch clause that follows which gathers these properties * from hd.properties. /*w ww. j av a 2 s . co m*/ */ public static void main(String[] args) { /** holds human-readable error data to be passed to addError() */ String errorString = ""; log.info("HumanDataController Started"); /** length of time to sleep in between each polling loop, 5secs is responsive, 5mins is alot */ String sleepFor = ""; /** Prefix path where TrueCrypt write operations will be performed (i.e. /tmp or /var/tmp), no trailing-slash */ String trueCryptBasePath = ""; /** TrueCrypt volume file extension (probably .tc or .zip) */ String trueCryptExtension = ""; /** Middle-of-path directory name to be created where TrueCrypt volume will be mounted */ String trueCryptMountpoint = ""; /** Human Data Server database credentials */ String hdDbUser = ""; String hdPasswd = ""; String hdJdbcUrl = ""; /** Hub database credentials */ String fbDbUser = ""; String fbPasswd = ""; String fbJdbcUrl = ""; /** Full path to truecrypt binary, (ie /usr/bin/truecrypt) */ String trueCryptBin = ""; /** Full path to scp binary, (ie /usr/bin/scp) */ String scpBin = ""; /** user@host portion of scp destination argument (ie. root@www.server.com:) */ String sshServerUrl = ""; /** file full path portion of scp destination argument (ie. /usr/local/downloads/) */ String finalLocation = ""; /** Full path to touch binary, (ie /bin/touch) */ String touchBin = ""; /** hardcoded truecrypt parameters; run "truecrypt -h" to learn about these */ String algorithm = ""; String hash = ""; String filesystem = ""; String volumeType = ""; String randomSource = ""; String protectHidden = ""; String extraArgs = ""; /** truecrypt parameters are packed into a map so we only pass one arg (this map) to method invoking truecrypt */ HashMap<String, String> trueCryptParams = new HashMap<String, String>(); trueCryptParams.put("trueCryptBin", ""); trueCryptParams.put("scpBin", ""); trueCryptParams.put("sshServerUrl", ""); trueCryptParams.put("finalLocation", ""); trueCryptParams.put("touchBin", ""); trueCryptParams.put("algorithm", ""); trueCryptParams.put("hash", ""); trueCryptParams.put("filesystem", ""); trueCryptParams.put("volumeType", ""); trueCryptParams.put("randomSource", ""); trueCryptParams.put("protectHidden", ""); trueCryptParams.put("extraArgs", ""); try { /** The properties file name is hardcoded to hd.properties--cannot be changed--this file must be on or at root of classpath */ final Configuration config = new PropertiesConfiguration("hd.properties"); sleepFor = config.getString("sleepFor"); hubURL = config.getString("hubURL"); responseTrigger = config.getString("responseTrigger"); trueCryptBasePath = config.getString("trueCryptBasePath"); trueCryptExtension = config.getString("trueCryptExtension"); trueCryptMountpoint = config.getString("trueCryptMountpoint"); hdDbUser = config.getString("hdDbUser"); hdPasswd = config.getString("hdPasswd"); hdJdbcUrl = config.getString("hdJdbcUrl"); fbDbUser = config.getString("fbDbUser"); fbPasswd = config.getString("fbPasswd"); fbJdbcUrl = config.getString("fbJdbcUrl"); trueCryptBin = config.getString("trueCryptBin"); scpBin = config.getString("scpBin"); sshServerUrl = config.getString("sshServerUrl"); finalLocation = config.getString("finalLocation"); touchBin = config.getString("touchBin"); algorithm = config.getString("algorithm"); hash = config.getString("hash"); filesystem = config.getString("filesystem"); volumeType = config.getString("volumeType"); randomSource = config.getString("randomSource"); protectHidden = config.getString("protectHidden"); extraArgs = config.getString("extraArgs"); trueCryptParams.put("trueCryptBin", trueCryptBin); trueCryptParams.put("scpBin", scpBin); trueCryptParams.put("sshServerUrl", sshServerUrl); trueCryptParams.put("finalLocation", finalLocation); trueCryptParams.put("touchBin", touchBin); trueCryptParams.put("algorithm", algorithm); trueCryptParams.put("hash", hash); trueCryptParams.put("filesystem", filesystem); trueCryptParams.put("volumeType", volumeType); trueCryptParams.put("randomSource", randomSource); trueCryptParams.put("protectHidden", protectHidden); trueCryptParams.put("extraArgs", extraArgs); log.debug("properties file loaded successfully"); } catch (final ConfigurationException e) { errorString = "Properties file problem"; String logString = e.getMessage(); addError(errorString, logString); log.error(errorString); } log.debug("initialize static class variable HumanDataManager declared earlier"); hdm = new HumanDataManager(hdDbUser, hdPasswd, hdJdbcUrl); log.debug("declare and initialize InstructionQueueManager"); InstructionQueueManager iqm = new InstructionQueueManager(fbDbUser, fbPasswd, fbJdbcUrl); log.debug("pass to the logfile/console all startup parameters for troubleshooting"); log.info("HumanDataController started with these settings from hd.properties: " + "hubURL=" + hubURL + " " + "responseTrigger=" + responseTrigger + " " + "trueCryptBasePath=" + trueCryptBasePath + " " + "trueCryptExtension=" + trueCryptExtension + " " + "trueCryptMountpoint=" + trueCryptMountpoint + " " + "hdDbUser=" + hdDbUser + " " + "hdPasswd=" + hdPasswd + " " + "hdJdbcUrl=" + hdJdbcUrl + " " + "fbDbUser=" + fbDbUser + " " + "fbPasswd=" + fbPasswd + " " + "fbJdbcUrl=" + fbJdbcUrl + " " + "trueCryptBin=" + trueCryptBin + " " + "scpBin=" + scpBin + " " + "sshServerUrl=" + sshServerUrl + " " + "finalLocation=" + finalLocation + " " + "touchBin=" + touchBin + " " + "algorithm=" + algorithm + " " + "hash=" + hash + " " + "filesystem=" + filesystem + " " + "volumeType=" + volumeType + " " + "randomSource=" + randomSource + " " + "protectHidden=" + protectHidden + " " + "extraArgs=" + extraArgs); log.debug("Enter infinite loop where program will continuously poll Hub server database for new requests"); while (true) { log.debug("LOOP START"); try { Thread.sleep(Integer.parseInt(sleepFor) * 1000); } catch (InterruptedException ie) { errorString = "Failed to sleep, got interrupted."; log.error(errorString, ie); addError(errorString, ie.getMessage()); } log.debug( "About to invoke InstructionQueueManager.queryInstructions()--Hibernate to fb_queue starts NOW"); List<InstructionQueueItem> aiqi = iqm.queryInstructions(); log.debug("Currently there are " + aiqi.size() + " items in the queue"); InstructionQueueItem iqi; String instructionName = ""; log.debug("About to send http request -status- telling Hub we are alive:"); httpGetter("status", "0"); if (aiqi.size() > 0) { log.debug( "There is at least one request, status=pending, queue item; commence processing of most recent item"); iqi = aiqi.get(0); log.debug("About to get existing user key, or create a new one, via fb_keychain Hibernate"); FbKey key = hdm.queryKey(iqi.getUid()); log.debug( "About to pull the JSON Instructions string, and other items, from the InstructionQueueItem"); String instructionsString = iqi.getInstructions(); instructionName = iqi.getName(); log.debug("About to create a new FileManager object with:"); log.debug(instructionName + trueCryptBasePath + trueCryptExtension + trueCryptMountpoint); FileManager fm = new FileManager(instructionName, trueCryptBasePath, trueCryptExtension, trueCryptMountpoint); ArrayList<Instructions> ali = new ArrayList<Instructions>(); log.debug( "FileManager.makeInstructionsObjects() creates multiple Instruction objects from the InstructionQueueItem.getInstructions() value"); if (fm.makeInstructionsObjects(instructionsString, ali)) { log.debug("FileManager.makeInstructionsObjects() returned true"); } else { errorString = "FileManager.makeInstructionsObjects() returned false"; log.error(errorString); addError(errorString, ""); } log.debug( "FileManager.makeFiles() uses its list of Instruction objects and calls its makeFiles() method to make/get requested data files"); if (fm.makeFiles(ali)) { log.debug("FileManager.makeFiles() returned true"); } else { errorString = "FileManager.makeFiles() returned false"; log.error(errorString); addError(errorString, ""); } //sends the size/100000 as seconds(100k/sec)...needs to be real seconds."); long bytesPerSecond = 100000; Long timeToMake = new Long(fm.getSize() / bytesPerSecond); String timeToMakeString = timeToMake.toString(); log.debug("Send http request -status- to Hub with total creation time estimate:"); log.debug(timeToMakeString); httpGetter("status", timeToMakeString); log.debug( "Update the queue_item row with the total size of the data being packaged with InstructionQueueManager.updateInstructionSize()"); if (iqm.updateInstructionSize(fm.getSize(), iqi.getQid())) { log.debug("InstructionQueueManager.updateInstructionSize() returned true"); } else { errorString = "InstructionQueueManager.updateInstructionSize() returned false"; log.error(errorString); addError(errorString, ""); } log.debug("About to make new TrueCryptManager with these args:"); log.debug(key.getEncryption_key() + fm.getSize() + fm.getTrueCryptPath() + fm.getTrueCryptVolumePath() + trueCryptParams); TrueCryptManager tcm = new TrueCryptManager(key.getEncryption_key(), fm.getSize(), fm.getTrueCryptPath(), fm.getTrueCryptVolumePath(), trueCryptParams); if (tcm.touchVolume()) { log.debug("TrueCryptManager.touchVolume() returned true, touched file"); } else { errorString = "TrueCryptManager.touchVolume() returned false"; log.error(errorString); addError(errorString, ""); } if (tcm.makeVolume()) { log.debug("TrueCryptManager.makeVolume() returned true, created TrueCrypt volume"); } else { errorString = "TrueCryptManager.makeVolume() returned false"; log.error(errorString); addError(errorString, ""); } if (tcm.mountVolume()) { log.debug("TrueCryptManager.mountVolume() returned true, mounted TrueCrypt volume"); } else { errorString = "TrueCryptManager.mountVolume() returned false"; log.error(errorString); addError(errorString, ""); } if (fm.copyFilesToVolume(ali)) { log.debug( "TrueCryptManager.copyFilesToVolume() returned true, copied requested files to mounted volume"); } else { errorString = "TrueCryptManager.copyFilesToVolume() returned false"; log.error(errorString); addError(errorString, ""); } if (tcm.disMountVolume()) { log.debug("TrueCryptManager.disMountVolume() returned true, umounted TrueCrypt volume"); } else { errorString = "TrueCryptManager.disMountVolume() returned false"; log.error(errorString); addError(errorString, ""); } if (tcm.sendVolumeToFinalLocation()) { log.debug( "TrueCryptManager.sendVolumeToFinalLocation() returned true, copied TrueCrypt volume to retreivable, final location"); } else { errorString = "TrueCryptManager.sendVolumeToFinalLocation() returned false"; log.error(errorString); addError(errorString, ""); } if (iqm.updateInstructionToCompleted(tcm.getFinalLocation() + fm.getTrueCryptFilename(), fm.getSize(), iqi.getQid(), getErrors(), getLogs())) { log.debug("InstructionQueueManager.updateInstructionToCompleted() returned true"); log.debug( "Processing of queue item is almost finished, updated fb_queue item row with location, size, status, errors, logs:"); log.debug(tcm.getFinalLocation() + fm.getTrueCryptFilename() + fm.getSize() + iqi.getQid() + getErrors() + getLogs()); } else { errorString = "InstructionQueueManager.updateInstructionToCompleted() returned false"; log.error(errorString); addError(errorString, ""); } log.debug("About to send http request -update- telling Hub which item is finished."); httpGetter("update", iqi.getHash()); log.debug("Finished processing pending queue item, status should now be complete or error"); } else { log.debug("Zero queue items"); } log.debug("LOOP END"); } }
From source file:ISMAGS.CommandLineInterface.java
public static void main(String[] args) throws IOException { String folder = null, files = null, motifspec = null, output = null; Options opts = new Options(); opts.addOption("folder", true, "Folder name"); opts.addOption("linkfiles", true, "Link files seperated by spaces (format: linktype[char] directed[d/u] filename)"); opts.addOption("motif", true, "Motif description by two strings (format: linktypes)"); opts.addOption("output", true, "Output file name"); CommandLineParser parser = new PosixParser(); try {// w w w . j ava 2s. c om CommandLine cmd = parser.parse(opts, args); if (cmd.hasOption("folder")) { folder = cmd.getOptionValue("folder"); } if (cmd.hasOption("linkfiles")) { files = cmd.getOptionValue("linkfiles"); } if (cmd.hasOption("motif")) { motifspec = cmd.getOptionValue("motif"); } if (cmd.hasOption("output")) { output = cmd.getOptionValue("output"); } } catch (ParseException e) { Die("Error: Parsing error"); } if (print) { printBanner(folder, files, motifspec, output); } if (folder == null || files == null || motifspec == null || output == null) { Die("Error: not all options are provided"); } else { ArrayList<String> linkfiles = new ArrayList<String>(); ArrayList<String> linkTypes = new ArrayList<String>(); ArrayList<String> sourcenetworks = new ArrayList<String>(); ArrayList<String> destinationnetworks = new ArrayList<String>(); ArrayList<Boolean> directed = new ArrayList<Boolean>(); StringTokenizer st = new StringTokenizer(files, " "); while (st.hasMoreTokens()) { linkTypes.add(st.nextToken()); directed.add(st.nextToken().equals("d")); sourcenetworks.add(st.nextToken()); destinationnetworks.add(st.nextToken()); linkfiles.add(folder + st.nextToken()); } ArrayList<LinkType> allLinkTypes = new ArrayList<LinkType>(); HashMap<Character, LinkType> typeTranslation = new HashMap<Character, LinkType>(); for (int i = 0; i < linkTypes.size(); i++) { String n = linkTypes.get(i); char nn = n.charAt(0); LinkType t = typeTranslation.get(nn); if (t == null) { t = new LinkType(directed.get(i), n, i, nn, sourcenetworks.get(i), destinationnetworks.get(i)); } allLinkTypes.add(t); typeTranslation.put(nn, t); } if (print) { System.out.println("Reading network.."); } Network network = Network.readNetworkFromFiles(linkfiles, allLinkTypes); Motif motif = getMotif(motifspec, typeTranslation); if (print) { System.out.println("Starting the search.."); } MotifFinder mf = new MotifFinder(network); long tijd = System.nanoTime(); Set<MotifInstance> motifs = mf.findMotif(motif, false); tijd = System.nanoTime() - tijd; if (print) { System.out.println("Completed search in " + tijd / 1000000 + " milliseconds"); } if (print) { System.out.println("Found " + motifs.size() + " instances of " + motifspec + " motif"); } if (print) { System.out.println("Writing instances to file: " + output); } printMotifs(motifs, output); if (print) { System.out.println("Done."); } // Set<MotifInstance> motifs=null; // MotifFinder mf=null; // System.out.println("Starting the search.."); // long tstart = System.nanoTime(); // for (int i = 0; i < it; i++) { // // mf = new MotifFinder(network, allLinkTypes, true); // motifs = mf.findMotif(motif); // } // // long tend = System.nanoTime(); // double time_in_ms = (tend - tstart) / 1000000.0; // System.out.println("Found " + mf.totalFound + " motifs, " + time_in_ms + " ms"); //// System.out.println("Evaluated " + mf.totalNrMappedNodes+ " search nodes"); //// System.out.println("Found " + motifs.size() + " motifs, " + time_in_ms + " ms"); // printMotifs(motifs, output); } }
From source file:com.appeligo.epg.EpgIndexer.java
public static void main(String[] args) throws Exception { HessianProxyFactory factory = new HessianProxyFactory(); EPGProvider epg = (EPGProvider) factory.create(EPGProvider.class, "http://localhost/epg/channel.epg"); Calendar cal = Calendar.getInstance(); cal.add(Calendar.HOUR, -5);//www.j a va 2s.c o m cal.set(Calendar.DATE, 1); String[] lineups = new String[] { "SDTW-C", "P-C", "P-DC", "P-S", "M-C", "M-DC", "M-S", "E-C", "E-DC", "E-S", "H-C", "H-DC", "H-S" }; List<String> ids = epg.getModifiedProgramIds(cal.getTime()); int count = 0; long average = 0; int counter = 0; int added = 0; while (count < ids.size()) { System.err.println("in loop: " + counter + ", " + count + "," + ids.size()); int subsetSize = (ids.size() < 100 ? ids.size() : 100); counter++; if (count % 1000 == 0) { log.debug("Index programs into the Lucene Index. Current have processed " + count + " programs out of " + ids.size()); } int endIndex = (count + subsetSize > ids.size() ? ids.size() : count + subsetSize); List<String> subset = ids.subList(count, endIndex); count += subsetSize; long time = System.currentTimeMillis(); HashMap<String, List<ScheduledProgram>> schedules = new HashMap<String, List<ScheduledProgram>>(); for (String lineup : lineups) { ScheduledProgram[] programs = epg.getNextShowingList(lineup, subset); for (ScheduledProgram program : programs) { if (program != null) { List<ScheduledProgram> schedule = schedules.get(program.getProgramId()); if (schedule == null) { schedule = new ArrayList<ScheduledProgram>(); schedules.put(program.getProgramId(), schedule); } schedule.add(program); added++; } } } long after = System.currentTimeMillis(); long diff = after - time; average += diff; System.err.println(diff + " - " + (average / counter) + " added: " + added); } // EpgIndexer indexer = new EpgIndexer(programIndex, epg, lineup); // Calendar cal = Calendar.getInstance(); // cal.set(Calendar.DAY_OF_MONTH, 24); // cal.set(Calendar.HOUR_OF_DAY, 0); // indexer.updateEpgIndex(cal.getTime()); }
From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java
public static void main(String[] args) throws IOException, java.text.ParseException { opts.addOption("algorithm", true, "Used algorithm for compression. Possible values are 'brute-force', " + "'brute-force-edges','brute-force-merges','randomized','randomized-merges'," + "'randomized-edges'," + "'fast-brute-force'," + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'."); opts.addOption("query", true, "Query nodes ids, separated by comma."); opts.addOption("queryfile", true, "Read query nodes from file."); opts.addOption("ratio", true, "Goal ratio"); opts.addOption("importancefile", true, "Read importances straight from file"); opts.addOption("keepedges", false, "Don't remove edges during merges"); opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case"); opts.addOption("paths", false, "Do path oriented compression"); opts.addOption("edges", false, "Do edge oriented compression"); // opts.addOption( "a", double sigma = 1.0; CommandLineParser parser = new PosixParser(); CommandLine cmd = null;/*from w w w . ja v a2 s . co m*/ try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } String queryStr = cmd.getOptionValue("query"); String[] queryNodeIDs = {}; double[] queryNodeIMP = {}; if (queryStr != null) { queryNodeIDs = queryStr.split(","); queryNodeIMP = new double[queryNodeIDs.length]; for (int i = 0; i < queryNodeIDs.length; i++) { String s = queryNodeIDs[i]; String[] es = s.split("="); queryNodeIMP[i] = 1; if (es.length == 2) { queryNodeIDs[i] = es[0]; queryNodeIMP[i] = Double.parseDouble(es[1]); } else if (es.length > 2) { System.out.println("Too many '=' in querynode specification: " + s); } } } String queryFile = cmd.getOptionValue("queryfile"); Map<String, Double> queryNodes = Collections.EMPTY_MAP; if (queryFile != null) { File in = new File(queryFile); BufferedReader read = new BufferedReader(new FileReader(in)); queryNodes = readMap(read); read.close(); } String impfile = cmd.getOptionValue("importancefile"); Map<String, Double> importances = null; if (impfile != null) { File in = new File(impfile); BufferedReader read = new BufferedReader(new FileReader(in)); importances = readMap(read); read.close(); } String algoStr = cmd.getOptionValue("algorithm"); CompressionAlgorithm algo = null; if (algoStr == null || algoStr.equals("brute-force")) { algo = new BruteForceCompression(); } else if (algoStr.equals("brute-force-edges")) { algo = new BruteForceCompressionOnlyEdges(); } else if (algoStr.equals("brute-force-merges")) { algo = new BruteForceCompressionOnlyMerges(); } else if (algoStr.equals("fast-brute-force-merges")) { //algo = new FastBruteForceCompressionOnlyMerges(); algo = new FastBruteForceCompression(true, false); } else if (algoStr.equals("fast-brute-force-edges")) { algo = new FastBruteForceCompression(false, true); //algo = new FastBruteForceCompressionOnlyEdges(); } else if (algoStr.equals("fast-brute-force")) { algo = new FastBruteForceCompression(true, true); } else if (algoStr.equals("randomized-edges")) { algo = new RandomizedCompressionOnlyEdges(); //modified } else if (algoStr.equals("randomized")) { algo = new RandomizedCompression(); } else if (algoStr.equals("randomized-merges")) { algo = new RandomizedCompressionOnlyMerges(); } else { System.out.println("Unsupported algorithm: " + algoStr); printHelp(); } String ratioStr = cmd.getOptionValue("ratio"); double ratio = 0; if (ratioStr != null) { ratio = Double.parseDouble(ratioStr); } else { System.out.println("Goal ratio not specified"); printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile)); HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>(); for (String id : queryNodes.keySet()) { queryBMNodes.put(bmg.getNode(id), queryNodes.get(id)); } long startMillis = System.currentTimeMillis(); ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes); if (importances != null) { for (String id : importances.keySet()) { wrap.setImportance(bmg.getNode(id), importances.get(id)); } } ImportanceMerger merger = null; if (cmd.hasOption("edges")) { merger = new ImportanceMergerEdges(wrap.getImportanceGraph()); } else if (cmd.hasOption("paths")) { merger = new ImportanceMergerPaths(wrap.getImportanceGraph()); } else { System.out.println("Specify either 'paths' or 'edges'."); System.exit(1); } if (cmd.hasOption("keepedges")) { merger.setKeepEdges(true); } algo.compress(merger, ratio); long endMillis = System.currentTimeMillis(); // write importance { BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false)); for (BMNode nod : bmg.getNodes()) { wr.write(nod + " " + wrap.getImportance(nod) + "\n"); } wr.close(); } // write sum of all pairs of node importance added by Fang /* { BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); double sum = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { for (int j = i+1; j <= orig.getMaxNodeId(); j++) { sum = sum+ wrap.getImportance(i)* wrap.getImportance(j); } } wr.write(""+sum); wr.write("\n"); wr.close(); } */ // write uncompressed edges { BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); for (int i = 0; i <= orig.getMaxNodeId(); i++) { String iname = wrap.intToNode(i).toString(); HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i < j) continue; String jname = wrap.intToNode(j).toString(); double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b)); wr.write("\n"); } } wr.close(); } // write distance { // BufferedWriter wr = new BufferedWriter(new // FileWriter("distance.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double error = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i <= j) continue; double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j); // modify by Fang: multiply imp(u)imp(v) } } error = Math.sqrt(error); //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each // edge is counted twice wr.write("" + error); wr.write("\n"); wr.close(); } // write sizes { ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph comp = merger.getCurrentGraph(); // BufferedWriter wr = new BufferedWriter(new // FileWriter("sizes.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " " + comp.getEdgeCount()); wr.write("\n"); wr.close(); } //write time { System.out.println("writing time"); BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang double secs = (endMillis - startMillis) * 0.001; wr.write("" + secs + "\n"); wr.close(); } //write change of connectivity for edge-oriented case // added by Fang { if (cmd.hasOption("connectivity")) { BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double diff = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { ProbDijkstra pdori = new ProbDijkstra(orig, i); ProbDijkstra pducom = new ProbDijkstra(ucom, i); for (int j = i + 1; j <= orig.getMaxNodeId(); j++) { double oriconn = pdori.getProbTo(j); double ucomconn = pducom.getProbTo(j); diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i) * wrap.getImportance(j); } } diff = Math.sqrt(diff); wr.write("" + diff); wr.write("\n"); wr.close(); } } //write output graph { BMGraph output = bmg;//new BMGraph(bmg); int no = 0; BMNode[] nodes = new BMNode[merger.getGroups().size()]; for (ArrayList<Integer> gr : merger.getGroups()) { BMNode bmgroup = new BMNode("Group", "" + (no + 1)); bmgroup.setAttributes(new HashMap<String, String>()); bmgroup.put("autoedges", "0"); nodes[no] = bmgroup; no++; if (gr.size() == 0) continue; for (int x : gr) { BMNode nod = output.getNode(wrap.intToNode(x).toString()); BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to"); output.ensureHasEdge(belongs); } output.ensureHasNode(bmgroup); } for (int i = 0; i < nodes.length; i++) { for (int x : merger.getCurrentGraph().getNeighbors(i)) { if (x == i) { nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x)); continue; } BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge"); ge.setAttributes(new HashMap<String, String>()); ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); output.ensureHasEdge(ge); } } System.out.println(output.getGroupNodes()); BMGraphUtils.writeBMGraph(output, "output.bmg"); } }