List of usage examples for java.lang Math abs
@HotSpotIntrinsicCandidate public static double abs(double a)
From source file:FaceRatios.java
@SuppressWarnings("serial") public static void main(String[] args) { int r = FSDK.ActivateLibrary(FACE_SDK_LICENSE); if (r == FSDK.FSDKE_OK) { FSDK.Initialize();// ww w . j a va2 s.c o m FSDK.SetFaceDetectionParameters(true, true, 384); Map<String, Map<String, ArrayList<Double>>> faceProperties = new HashMap<>(); for (String directory : new File(FACE_DIRECTORY).list()) { if (new File(FACE_DIRECTORY + directory).isDirectory()) { Map<String, ArrayList<Double>> properties = new HashMap<String, ArrayList<Double>>() { { for (String property : propertyNames) put(property, new ArrayList<Double>()); } }; File[] files = new File(FACE_DIRECTORY + directory).listFiles(); System.out.println("Analyzing " + directory + " with " + files.length + " files\n"); for (File file : files) { if (file.isFile()) { HImage imageHandle = new HImage(); FSDK.LoadImageFromFileW(imageHandle, file.getAbsolutePath()); FSDK.TFacePosition.ByReference facePosition = new FSDK.TFacePosition.ByReference(); if (FSDK.DetectFace(imageHandle, facePosition) == FSDK.FSDKE_OK) { FSDK_Features.ByReference facialFeatures = new FSDK_Features.ByReference(); FSDK.DetectFacialFeaturesInRegion(imageHandle, (FSDK.TFacePosition) facePosition, facialFeatures); Point[] featurePoints = new Point[FSDK.FSDK_FACIAL_FEATURE_COUNT]; for (int i = 0; i < FSDK.FSDK_FACIAL_FEATURE_COUNT; i++) { featurePoints[i] = new Point(0, 0); featurePoints[i].x = facialFeatures.features[i].x; featurePoints[i].y = facialFeatures.features[i].y; } double eyeDistance = featureDistance(featurePoints, FeatureID.LEFT_EYE, FeatureID.RIGHT_EYE); double rightEyeSize = featureDistance(featurePoints, FeatureID.RIGHT_EYE_INNER_CORNER, FeatureID.RIGHT_EYE_OUTER_CORNER); double leftEyeSize = featureDistance(featurePoints, FeatureID.LEFT_EYE_INNER_CORNER, FeatureID.LEFT_EYE_OUTER_CORNER); double averageEyeSize = (rightEyeSize + leftEyeSize) / 2; double mouthLength = featureDistance(featurePoints, FeatureID.MOUTH_RIGHT_CORNER, FeatureID.MOUTH_LEFT_CORNER); double mouthHeight = featureDistance(featurePoints, FeatureID.MOUTH_BOTTOM, FeatureID.MOUTH_TOP); double noseHeight = featureDistance(featurePoints, FeatureID.NOSE_BOTTOM, FeatureID.NOSE_BRIDGE); double chinHeight = featureDistance(featurePoints, FeatureID.CHIN_BOTTOM, FeatureID.MOUTH_BOTTOM); double chinToBridgeHeight = featureDistance(featurePoints, FeatureID.CHIN_BOTTOM, FeatureID.NOSE_BRIDGE); double faceContourLeft = (featurePoints[FeatureID.CHIN_BOTTOM.getIndex()].getY() - featurePoints[FeatureID.FACE_CONTOUR2.getIndex()].getY()) / (featurePoints[FeatureID.CHIN_BOTTOM.getIndex()].getX() - featurePoints[FeatureID.FACE_CONTOUR2.getIndex()].getX()); double faceContourRight = (featurePoints[FeatureID.CHIN_BOTTOM.getIndex()].getY() - featurePoints[FeatureID.FACE_CONTOUR12.getIndex()].getY()) / (featurePoints[FeatureID.CHIN_BOTTOM.getIndex()].getX() - featurePoints[FeatureID.FACE_CONTOUR12.getIndex()].getX()); double bridgeLeftEyeDistance = featureDistance(featurePoints, FeatureID.LEFT_EYE_INNER_CORNER, FeatureID.NOSE_BRIDGE); double bridgeRightEyeDistance = featureDistance(featurePoints, FeatureID.RIGHT_EYE_INNER_CORNER, FeatureID.NOSE_BRIDGE); properties.get("eyeSize/eyeDistance").add(averageEyeSize / eyeDistance); properties.get("eyeSizeDisparity") .add(Math.abs(leftEyeSize - rightEyeSize) / averageEyeSize); properties.get("bridgeToEyeDisparity") .add(Math.abs(bridgeLeftEyeDistance - bridgeRightEyeDistance) / ((bridgeLeftEyeDistance + bridgeRightEyeDistance) / 2)); properties.get("eyeDistance/mouthLength").add(eyeDistance / mouthLength); properties.get("eyeDistance/noseHeight").add(eyeDistance / noseHeight); properties.get("eyeSize/mouthLength").add(eyeDistance / mouthLength); properties.get("eyeSize/noseHeight").add(eyeDistance / noseHeight); properties.get("mouthLength/mouthHeight").add(mouthLength / mouthHeight); properties.get("chinHeight/noseHeight").add(chinHeight / noseHeight); properties.get("chinHeight/chinToBridgeHeight") .add(chinHeight / chinToBridgeHeight); properties.get("noseHeight/chinToBridgeHeight") .add(noseHeight / chinToBridgeHeight); properties.get("mouthHeight/chinToBridgeHeight") .add(mouthHeight / chinToBridgeHeight); properties.get("faceCountourAngle") .add(Math.toDegrees(Math.atan((faceContourLeft - faceContourRight) / (1 + faceContourLeft * faceContourRight)))); } FSDK.FreeImage(imageHandle); } } System.out.format("%32s\t%8s\t%8s\t%3s%n", "Property", "", "", "c"); System.out.println(new String(new char[76]).replace("\0", "-")); ArrayList<Entry<String, ArrayList<Double>>> propertyList = new ArrayList<>( properties.entrySet()); Collections.sort(propertyList, new Comparator<Entry<String, ArrayList<Double>>>() { @Override public int compare(Entry<String, ArrayList<Double>> arg0, Entry<String, ArrayList<Double>> arg1) { DescriptiveStatistics dStats0 = new DescriptiveStatistics(listToArray(arg0.getValue())); DescriptiveStatistics dStats1 = new DescriptiveStatistics(listToArray(arg1.getValue())); return new Double(dStats0.getStandardDeviation() / dStats0.getMean()) .compareTo(dStats1.getStandardDeviation() / dStats1.getMean()); } }); for (Entry<String, ArrayList<Double>> property : propertyList) { DescriptiveStatistics dStats = new DescriptiveStatistics(listToArray(property.getValue())); System.out.format("%32s\t%4f\t%4f\t%3s%n", property.getKey(), dStats.getMean(), dStats.getStandardDeviation(), Math.round(dStats.getStandardDeviation() / dStats.getMean() * 100) + "%"); } System.out.println("\n"); faceProperties.put(directory, properties); } } for (String propertyName : propertyNames) { DefaultBoxAndWhiskerCategoryDataset dataset = new DefaultBoxAndWhiskerCategoryDataset(); for (Entry<String, Map<String, ArrayList<Double>>> face : faceProperties.entrySet()) { dataset.add(face.getValue().get(propertyName), "Default Series", face.getKey()); } PropertyBoxWhisker plot = new PropertyBoxWhisker(propertyName, dataset); plot.pack(); plot.setVisible(true); } } }
From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java
public static void main(String[] args) { if (args.length == 0) { printUsage();/*from w w w. j a va 2 s . co m*/ return; } try { parseOptions(args); } catch (IllegalArgumentException e) { System.err.println("Error: " + e.getMessage()); printUsage(); return; } // Read in chrList, if specified if (chrFile != null) { chrFilter = new HashSet<String>(); try { BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile)); String chr = null; while ((chr = chrFilterIn.readLine()) != null) chrFilter.add(chr); chrFilterIn.close(); if (verbose > 0) System.err.println( "Only looking at alignments on " + chrFilter.size() + " specified chromosomes"); } catch (IOException e) { System.err.println("Error: " + e.getMessage()); return; } } if (verbose > 0) { // Start the processMonitor processMonitor = new Timer(); // Start the ProcessStatusTask statusTask = new ProcessStatusTask(); // Schedule to show the status every 1 second processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq); } // Read in known SNP file, if specified if (knownSnpFile != null) { if (verbose > 0) System.err.println("Checking known SNPs from user specified VCF file"); knownVCF = new VCFFileReader(new File(knownSnpFile)); } SamReaderFactory readerFac = SamReaderFactory.makeDefault(); SAMFileWriterFactory writerFac = new SAMFileWriterFactory(); if (!isSilent) readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency else readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency SamReader in = readerFac.open(new File(inFile)); SAMFileHeader inHeader = in.getFileHeader(); if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate) System.err.println("Warning: Input file '" + inFile + "' might be sorted by coordinate and cannot be correctly processed!"); SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header // Add new programHeader SAMProgramRecord progRec = new SAMProgramRecord(progName); progRec.setProgramName(progName); progRec.setProgramVersion(progVer); progRec.setCommandLine(StringUtils.join(" ", args)); header.addProgramRecord(progRec); //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder()); // reset the orders header.setGroupOrder(groupOrder); header.setSortOrder(sortOrder); // write SAMHeader String prevID = null; SAMRecord prevRecord = null; List<SAMRecord> alnList = new ArrayList<SAMRecord>(); List<SAMRecordPair> alnPEList = null; // Estimate fragment length distribution by scan one-pass through the alignments SAMRecordIterator results = in.iterator(); if (!NO_ESTIMATE) { if (verbose > 0) { System.err.println("Estimating insert fragment size distribution ..."); statusTask.reset(); statusTask.setInfo("alignments scanned"); } long N = 0; double fragL_S = 0; // fragLen sum double fragL_SS = 0; // fragLen^2 sum while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) { double fragLen = Math.abs(record.getInferredInsertSize()); if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments N++; fragL_S += fragLen; fragL_SS += fragLen * fragLen; } // stop estimate if already enough if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN) break; } } if (verbose > 0) statusTask.finish(); // estimate fragment size if (N >= MIN_ESTIMATE_BASE) { // override command line values MEAN_FRAG_LEN = fragL_S / N; SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1))); String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN, SD_FRAG_LEN); if (verbose > 0) System.err.println(estStr); // also add the estimation to comment header.addComment(estStr); } else { System.err.println( "Unable to estimate the fragment size distribution due to too few observed alignments"); System.err.println( "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step"); statusTask.cancel(); processMonitor.cancel(); return; } // Initiate the normal model normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN); // reset the iterator, if necessary if (in.type() == SamReader.Type.SAM_TYPE) { try { in.close(); } catch (IOException e) { System.err.println(e.getMessage()); } in = readerFac.open(new File(inFile)); } results.close(); results = in.iterator(); } // end of NO_ESTIMATE SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile)) : writerFac.makeBAMWriter(header, false, new File(outFile)); // check each alignment again if (verbose > 0) { System.err.println("Filtering alignments ..."); statusTask.reset(); statusTask.setInfo("alignments processed"); } while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); String ID = record.getReadName(); // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment if (ID.equals(prevID) && record.getReadLength() == 0) SAMAlignFixer.fixSAMRecordRead(record, prevRecord); if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) { prevID = ID; prevRecord = record; continue; } // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto) if (fixMD) SAMAlignFixer.fixMisStr(record); // fix alignment, ignore if failed (unmapped or empty) if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) { prevID = ID; prevRecord = record; continue; } if (!record.getReadPairedFlag()) { System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString()); out.close(); statusTask.cancel(); processMonitor.cancel(); return; } if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments // create alnPEList from filtered alnList alnPEList = createAlnPEListFromAlnList(alnList); //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size()); int totalPair = alnPEList.size(); // filter highly unlikely PEhits filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY); // calculate posterior mapQ for each pair calcPEHitPostP(alnPEList, totalPair, MAX_HIT); // filter hits by mapQ if (MIN_MAPQ > 0) filterPEHits(alnPEList, MIN_MAPQ); // sort the list first with an anonymous class of comparator, with DESCREASING order Collections.sort(alnPEList, Collections.reverseOrder()); // control max-best if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits int nBestStratum = 0; int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE for (SAMRecordPair pr : alnPEList) if (pr.getPEMapQ() == bestMapQ) nBestStratum++; else break; // stop searching for sorted list if (nBestStratum > MAX_BEST) alnPEList.clear(); } // filter alignments with auxiliary filters if (!MAX_SENSITIVITY) filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL); // report remaining secondary alignments, up-to MAX_REPORT for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) { SAMRecordPair repPair = alnPEList.get(i); if (doUpdateBit) repPair.setNotPrimaryAlignmentFlags(i != 0); int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size(); int nFiltered = alnPEList.size(); if (repPair.fwdRecord != null) { repPair.fwdRecord.setAttribute("NH", nReport); repPair.fwdRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.fwdRecord); } if (repPair.revRecord != null) { repPair.revRecord.setAttribute("NH", nReport); repPair.revRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.revRecord); } } // reset list alnList.clear(); alnPEList.clear(); } // update if (!ID.equals(prevID)) { prevID = ID; prevRecord = record; } alnList.add(record); } // end while try { in.close(); out.close(); } catch (IOException e) { System.err.println(e.getMessage()); } // Terminate the monitor task and monitor if (verbose > 0) { statusTask.cancel(); statusTask.finish(); processMonitor.cancel(); } }
From source file:com.heliosapm.tsdblite.metric.Trace.java
@SuppressWarnings("javadoc") public static void main(String[] args) { log("Trace Test"); Map<String, String> tags = new HashMap<String, String>(4); tags.put("host", "localhost"); tags.put("app", "test"); tags.put("cpu", "" + 1); tags.put("type", "combined"); final Trace trace = new Trace("sys.cpu", tags, false, 34, -1, System.currentTimeMillis()); log("toString: " + trace); String json = JSON.serializeToString(trace); log("JSON: " + json); final Trace t = JSON.parseToObject(json, Trace.class); log("fromJson: " + t); log("====================================="); final Trace[] traces = new Trace[Constants.CORES]; final Random r = new Random(System.currentTimeMillis()); for (int i = 0; i < Constants.CORES; i++) { tags = new HashMap<String, String>(4); tags.put("host", "localhost"); tags.put("app", "test"); tags.put("cpu", "" + i); tags.put("type", "combined"); traces[i] = new Trace("sys.cpu", tags, false, Math.abs(r.nextInt(100)), -1, System.currentTimeMillis()); log("toString:" + traces[i]); }//from w w w .j av a 2 s. co m json = JSON.serializeToString(traces); log("JSON: " + json); Trace[] ts = JSON.parseToObject(json, Trace[].class); for (Trace x : ts) { log("fromJson: " + x); } }
From source file:ch.epfl.lsir.xin.test.ItemBasedCFTest.java
/** * @param args/*from w w w. ja va 2 s . c o m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//ItemBasedCF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//ItemBasedCF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); trainRatingMatrix.calculateItemsMean(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a item based collaborative filtering recommendation model."); ItemBasedCF algo = new ItemBasedCF(trainRatingMatrix); algo.setLogger(logger); algo.build();//if read local model, no need to build the model algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { // ArrayList<ResultUnit> rec = algo.getRecommendationList(i); // results.put(i, rec); ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.append("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc + "\n"); } } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); System.out.println("similarity: " + config.getString("SIMILARITY")); //MAE: 0.7227232762922241 RMSE: 0.9225576790122603 (MovieLens 100K, shrinkage 2500, neighbor size 40, PCC) //MAE: 0.7250636319353241 RMSE: 0.9242305485411567 (MovieLens 100K, shrinkage 25, neighbor size 40, PCC) //MAE: 0.7477213243604459 RMSE: 0.9512195004171138 (MovieLens 100K, shrinkage 2500, neighbor size 40, COSINE) logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:ch.epfl.lsir.xin.test.SVDPPTest.java
/** * @param args//from w w w. j av a2s. c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//SVDPP"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//SVDPlusPlus.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { if (testRatings.get(i).getValue() < 5) continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a SVD++ recommendation model."); logger.flush(); SVDPlusPlus algo = new SVDPlusPlus(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:ch.epfl.lsir.xin.test.BiasedMFTest.java
/** * @param args/* w w w . java 2 s . c om*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//BiasedMF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//biasedMF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a biased matrix factorization recommendation model."); logger.flush(); BiasedMF algo = new BiasedMF(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:ch.epfl.lsir.xin.test.MFTest.java
/** * @param args//from ww w . j a v a2 s. c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//MF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//MF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a matrix factorization based recommendation model."); logger.flush(); MatrixFactorization algo = new MatrixFactorization(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); // for( Map.Entry<Integer, Double> entry : testRatingMatrix.getRatingMatrix().get(i).entrySet() ) // { // System.out.print( entry.getKey() + "(" + entry.getValue() + ") , "); // } // System.out.println(); // for( int j = 0 ; j < rec.size() ; j++ ) // { // System.out.print(rec.get(j).getItemIndex() + "(" + rec.get(j).getPrediciton() + // ") , "); // } // System.out.println("**********"); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:ch.epfl.lsir.xin.test.UserBasedCFTest.java
/** * @param args/*ww w . j ava2 s.c om*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//UserBasedCF"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//UserBasedCF.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); trainRatingMatrix.calculateUsersMean(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } logger.println("Initialize a user based collaborative filtering recommendation model."); UserBasedCF algo = new UserBasedCF(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build();//if read local model, no need to build the model algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < testRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); // for( Map.Entry<Integer, Double> entry : testRatingMatrix.getRatingMatrix().get(i).entrySet() ) // { // System.out.print( entry.getKey() + "(" + entry.getValue() + ") , "); // } // System.out.println(); // for( int j = 0 ; j < rec.size() ; j++ ) // { // System.out.print(rec.get(j).getItemIndex() + "(" + rec.get(j).getPrediciton() + // ") , "); // } // System.out.println("**********"); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); // MovieLens100k //MAE: 0.7343907480119425 RMSE: 0.9405808357192891 (MovieLens 100K, shrinkage 25, neighbor size 60, PCC) //MAE: 0.7522376630596646 RMSE: 0.9520931265724659 (MovieLens 100K, no shrinkage , neighbor size 40, COSINE) logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }
From source file:eu.crisis_economics.abm.model.configuration.ExponentialDistributionModelParameterConfiguration.java
/** * A lightweight test for this {@link ConfigurationComponent}. This snippet * creates a {@link Parameter}{@code <Double>} using an instance of * {@link ExponentialDistributionModelParameterConfiguration} and then tests * whether {@link Double} values drawn from this {@link Parameter} are * strictly positive and have the expected mean. * /*w w w . j a va 2 s.c o m*/ */ public static void main(String[] args) { final ExponentialDistributionModelParameterConfiguration configuration = new ExponentialDistributionModelParameterConfiguration(); final ModelParameter<Double> distribution = configuration.createInjector() .getInstance(Key.get(new TypeLiteral<ModelParameter<Double>>() { })); double mean = 0.; final int numSamples = 10000000; for (int i = 0; i < numSamples; ++i) { final double value = distribution.get(); mean += value; Assert.assertTrue(value >= 0.); } mean /= numSamples; Assert.assertTrue(Math.abs(mean - 1.) < 1.e-3); System.out.println(mean); }
From source file:graticules2wld.Main.java
/** * @param args/*ww w . ja v a2s . c om*/ * @throws Exception */ public static void main(String[] args) throws Exception { /* parse the command line arguments */ // create the command line parser CommandLineParser parser = new PosixParser(); // create the Options Options options = new Options(); options.addOption("x", "originx", true, "x component of projected coordinates of upper left pixel"); options.addOption("y", "originy", true, "y component of projected coordinates of upper left pixel"); options.addOption("u", "tometers", true, "multiplication factor to get source units into meters"); options.addOption("h", "help", false, "prints this usage page"); options.addOption("d", "debug", false, "prints debugging information to stdout"); double originNorthing = 0; double originEasting = 0; String inputFileName = null; String outputFileName = null; try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("help")) printUsage(0); // print usage then exit using a non error exit status if (line.hasOption("debug")) debug = true; // these arguments are required if (!line.hasOption("originy") || !line.hasOption("originx")) printUsage(1); originNorthing = Double.parseDouble(line.getOptionValue("originy")); originEasting = Double.parseDouble(line.getOptionValue("originx")); if (line.hasOption("tometers")) unitsToMeters = Double.parseDouble(line.getOptionValue("tometers")); // two args should be left. the input csv file name and the output wld file name. String[] iofiles = line.getArgs(); if (iofiles.length < 2) { printUsage(1); } inputFileName = iofiles[0]; outputFileName = iofiles[1]; } catch (ParseException exp) { System.err.println("Unexpected exception:" + exp.getMessage()); System.exit(1); } // try to open the input file for reading and the output file for writing File graticulesCsvFile; BufferedReader csvReader = null; File wldFile; BufferedWriter wldWriter = null; try { graticulesCsvFile = new File(inputFileName); csvReader = new BufferedReader(new FileReader(graticulesCsvFile)); } catch (IOException exp) { System.err.println("Could not open input file for reading: " + inputFileName); System.exit(1); } try { wldFile = new File(outputFileName); wldWriter = new BufferedWriter(new FileWriter(wldFile)); } catch (IOException exp) { System.err.println("Could not open output file for writing: " + outputFileName); System.exit(1); } // list of lon graticules and lat graticules ArrayList<Graticule> lonGrats = new ArrayList<Graticule>(); ArrayList<Graticule> latGrats = new ArrayList<Graticule>(); // read the source CSV and convert its information into the two ArrayList<Graticule> data structures readCSV(csvReader, lonGrats, latGrats); // we now need to start finding the world file paramaters DescriptiveStatistics stats = new DescriptiveStatistics(); // find theta and phi for (Graticule g : latGrats) { stats.addValue(g.angle()); } double theta = stats.getMean(); // we use the mean of the lat angles as theta if (debug) System.out.println("theta range = " + Math.toDegrees(stats.getMax() - stats.getMin())); stats.clear(); for (Graticule g : lonGrats) { stats.addValue(g.angle()); } double phi = stats.getMean(); // ... and the mean of the lon angles for phi if (debug) System.out.println("phi range = " + Math.toDegrees(stats.getMax() - stats.getMin())); stats.clear(); // print these if in debug mode if (debug) { System.out.println("theta = " + Math.toDegrees(theta) + "deg"); System.out.println("phi = " + Math.toDegrees(phi) + "deg"); } // find x and y (distance beteen pixels in map units) Collections.sort(latGrats); Collections.sort(lonGrats); int prevMapValue = 0; //fixme: how to stop warning about not being initilised? Line2D prevGratPixelSys = new Line2D.Double(); boolean first = true; for (Graticule g : latGrats) { if (!first) { int deltaMapValue = Math.abs(g.realValue() - prevMapValue); double deltaPixelValue = (g.l.ptLineDist(prevGratPixelSys.getP1()) + (g.l.ptLineDist(prevGratPixelSys.getP2()))) / 2; double delta = deltaMapValue / deltaPixelValue; stats.addValue(delta); } else { first = false; prevMapValue = g.realValue(); prevGratPixelSys = (Line2D) g.l.clone(); } } double y = stats.getMean(); if (debug) System.out.println("y range = " + (stats.getMax() - stats.getMin())); stats.clear(); first = true; for (Graticule g : lonGrats) { if (!first) { int deltaMapValue = g.realValue() - prevMapValue; double deltaPixelValue = (g.l.ptLineDist(prevGratPixelSys.getP1()) + (g.l.ptLineDist(prevGratPixelSys.getP2()))) / 2; double delta = deltaMapValue / deltaPixelValue; stats.addValue(delta); } else { first = false; prevMapValue = g.realValue(); prevGratPixelSys = (Line2D) g.l.clone(); } } double x = stats.getMean(); if (debug) System.out.println("x range = " + (stats.getMax() - stats.getMin())); stats.clear(); if (debug) { System.out.println("x = " + x); System.out.println("y = " + y); } SimpleRegression regression = new SimpleRegression(); // C, F are translation terms: x, y map coordinates of the center of the upper-left pixel for (Graticule g : latGrats) { // find perp dist to pixel space 0,0 Double perpPixelDist = g.l.ptLineDist(new Point2D.Double(0, 0)); // find the map space distance from this graticule to the center of the 0,0 pixel Double perpMapDist = perpPixelDist * y; // perpMapDist / perpPixelDist = y regression.addData(perpMapDist, g.realValue()); } double F = regression.getIntercept(); regression.clear(); for (Graticule g : lonGrats) { // find perp dist to pixel space 0,0 Double perpPixelDist = g.l.ptLineDist(new Point2D.Double(0, 0)); // find the map space distance from this graticule to the center of the 0,0 pixel Double perpMapDist = perpPixelDist * x; // perpMapDist / perpPixelDist = x regression.addData(perpMapDist, g.realValue()); } double C = regression.getIntercept(); regression.clear(); if (debug) { System.out.println("Upper Left pixel has coordinates " + C + ", " + F); } // convert to meters C *= unitsToMeters; F *= unitsToMeters; // C,F store the projected (in map units) coordinates of the upper left pixel. // originNorthing,originEasting is the offset we need to apply to 0,0 to push the offsets into our global coordinate system C = originEasting + C; F = originNorthing + F; // calculate the affine transformation matrix elements double D = -1 * x * unitsToMeters * Math.sin(theta); double A = x * unitsToMeters * Math.cos(theta); double B = y * unitsToMeters * Math.sin(phi); // if should be negative, it'll formed by negative sin double E = -1 * y * unitsToMeters * Math.cos(phi); /* * Line 1: A: pixel size in the x-direction in map units/pixel * Line 2: D: rotation about y-axis * Line 3: B: rotation about x-axis * Line 4: E: pixel size in the y-direction in map units, almost always negative[3] * Line 5: C: x-coordinate of the center of the upper left pixel * Line 6: F: y-coordinate of the center of the upper left pixel */ if (debug) { System.out.println("A = " + A); System.out.println("D = " + D); System.out.println("B = " + B); System.out.println("E = " + E); System.out.println("C = " + C); System.out.println("F = " + F); // write the world file System.out.println(); System.out.println("World File:"); System.out.println(A); System.out.println(D); System.out.println(B); System.out.println(E); System.out.println(C); System.out.println(F); } // write to the .wld file wldWriter.write(A + "\n"); wldWriter.write(D + "\n"); wldWriter.write(B + "\n"); wldWriter.write(E + "\n"); wldWriter.write(C + "\n"); wldWriter.write(F + "\n"); wldWriter.close(); }