List of usage examples for java.util ArrayList add
public boolean add(E e)
From source file:dependencies.DependencyResolving.java
/** * @param args the command line arguments *//*from w w w . j av a 2 s .c om*/ public static void main(String[] args) { // TODO code application logic here JSONParser parser = new JSONParser(); //we use JSONParser in order to be able to read from JSON file try { //here we declare the file reader and define the path to the file dependencies.json Object obj = parser.parse(new FileReader( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\dependencies.json")); JSONObject project = (JSONObject) obj; //a JSON object containing all the data in the .json file JSONArray dependencies = (JSONArray) project.get("dependencies"); //get array of objects with key "dependencies" System.out.print("We need to install the following dependencies: "); Iterator<String> iterator = dependencies.iterator(); //define an iterator over the array "dependencies" while (iterator.hasNext()) { System.out.println(iterator.next()); } //on the next line we declare another object, which parses a Parser object and reads from all_packages.json Object obj2 = parser.parse(new FileReader( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\all_packages.json")); JSONObject tools = (JSONObject) obj2; //a JSON object containing all thr data in the file all_packages.json for (int i = 0; i < dependencies.size(); i++) { if (tools.containsKey(dependencies.get(i))) { System.out.println( "In order to install " + dependencies.get(i) + ", we need the following programs:"); JSONArray temporaryArray = (JSONArray) tools.get(dependencies.get(i)); //a temporary JSON array in which we store the keys and values of the dependencies for (i = 0; i < temporaryArray.size(); i++) { System.out.println(temporaryArray.get(i)); } ArrayList<Object> arraysOfJsonData = new ArrayList<Object>(); //an array in which we will store the keys of the objects, after we use the values and won't need them anymore for (i = 0; i < temporaryArray.size(); i++) { System.out.println("Installing " + temporaryArray.get(i)); } while (!temporaryArray.isEmpty()) { for (Object element : temporaryArray) { if (tools.containsKey(element)) { JSONArray secondaryArray = (JSONArray) tools.get(element); //a temporary array within the scope of the if-statement if (secondaryArray.size() != 0) { System.out.println("In order to install " + element + ", we need "); } for (i = 0; i < secondaryArray.size(); i++) { System.out.println(secondaryArray.get(i)); } for (Object o : secondaryArray) { arraysOfJsonData.add(o); //here we create a file with the installed dependency File file = new File( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\" + o); if (file.createNewFile()) { System.out.println(file.getName() + " is installed!"); } else { } } secondaryArray.clear(); } } temporaryArray.clear(); for (i = 0; i < arraysOfJsonData.size(); i++) { temporaryArray.add(arraysOfJsonData.get(i)); } arraysOfJsonData.clear(); } } } Set<String> keys = tools.keySet(); // here we define a set of keys of the objects in all_packages.json for (String s : keys) { File file = new File( "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\" + s); if (file.createNewFile()) { System.out.println(file.getName() + " is installed."); } else { } } } catch (IOException ex) { Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex); } catch (ParseException ex) { Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:eu.cognitum.readandwrite.App.java
public static void main(String[] args) { try {// w ww . j ava 2 s .co m String configFile = 0 == args.length ? "example.properties" : args[0]; CONFIGURATION = new Properties(); File f = new File(configFile); if (!f.exists()) { LOGGER.warning("configuration not found at " + configFile); return; } LOGGER.info("loading configuration file " + f.getAbsoluteFile()); CONFIGURATION.load(new FileInputStream(f)); String ip = CONFIGURATION.getProperty(PROP_STORAGE_HOSTNAME); String keyspace = CONFIGURATION.getProperty(PROP_STORAGE_KEYSPACE); String directory = CONFIGURATION.getProperty(PROP_STORAGE_DIRECTORY); // N of articles to be generated. int Narticles = 100000; // size of the buffer to commit each time int commitBufferSize = 100; // N of articles to commit before trying reads int readStep = 100; String currentNamespace = "http://mynamespace#"; LOGGER.log(Level.INFO, "Generating the rdf..."); GenerateRdf rdfGenerator = new GenerateRdf(currentNamespace, "tmp.rdf"); rdfGenerator.generateAndSaveRdf(Narticles); LOGGER.log(Level.INFO, "Generated the rdf!"); ArrayList<SimulateReadAndWrite> simulateAll = new ArrayList<SimulateReadAndWrite>(); int Ndbs = 0; DBS[] chosenDbs = { DBS.NATIVE }; //DBS[] chosenDbs = DBS.values(); for (DBS dbs : chosenDbs) { SailRepository sr; switch (dbs) { case NATIVE: sr = createNativeStoreConnection(directory); break; case TITAN: sr = createTitanConnection(ip, keyspace); break; case NEO4J: sr = createNeo4jConnection(keyspace); break; case ORIENT: sr = createOrientConnection(keyspace); break; default: sr = null; break; } if (sr == null) { throw new Exception("Something wrong while connecting to " + dbs.toString()); } simulateAll.add(new SimulateReadAndWrite(sr, "test" + dbs.toString(), Narticles, readStep, commitBufferSize, dbs.toString(), keyspace, currentNamespace, rdfGenerator)); simulateAll.get(Ndbs).start(); Ndbs++; } int Nfinished = 0; int k; while (Nfinished != Ndbs) { Nfinished = 0; k = 0; for (DBS dbs : chosenDbs) { if (simulateAll.get(k).IsProcessCompleted()) { Nfinished++; } else { System.out.println(String.format("Process for db %s is at %.2f", dbs.toString(), simulateAll.get(k).GetProgress())); } k++; } Thread.sleep(10000); } } catch (Exception ex) { LOGGER.log(Level.SEVERE, null, ex); } }
From source file:DIA_Umpire_SE.DIA_Umpire_SE.java
/** * @param args the command line arguments DIA_Umpire parameterfile *//*from www. j ava 2 s . c om*/ public static void main(String[] args) throws InterruptedException, FileNotFoundException, ExecutionException, IOException, ParserConfigurationException, DataFormatException, SAXException, Exception { System.out.println( "================================================================================================="); System.out.println( "DIA-Umpire singal extraction analysis (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length < 2 || args.length > 3) { System.out.println( "command format error, the correct format is: java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params"); System.out.println( "To fix DIA setting, use : java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params -f"); return; } try { //Define logger level for console ConsoleLogger.SetConsoleLogger(Level.INFO); //Define logger level and file path for text log file ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_se.log"); } catch (Exception e) { } boolean Fix = false; boolean Resume = false; if (args.length == 3 && args[2].equals("-f")) { Fix = true; } String parameterfile = args[1]; String MSFilePath = args[0]; Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + parameterfile); Logger.getRootLogger().info("Spectra file:" + MSFilePath); BufferedReader reader = new BufferedReader(new FileReader(parameterfile)); String line = ""; InstrumentParameter param = new InstrumentParameter(InstrumentParameter.InstrumentType.TOF5600); param.DetermineBGByID = false; param.EstimateBG = true; int NoCPUs = 2; SpectralDataType.DataType dataType = SpectralDataType.DataType.DIA_F_Window; String WindowType = ""; int WindowSize = 25; ArrayList<XYData> WindowList = new ArrayList<>(); boolean ExportPrecursorPeak = false; boolean ExportFragmentPeak = false; //<editor-fold defaultstate="collapsed" desc="Read parameter file"> while ((line = reader.readLine()) != null) { Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==window setting begin")) { while (!(line = reader.readLine()).equals("==window setting end")) { if (!"".equals(line)) { WindowList.add(new XYData(Float.parseFloat(line.split("\t")[0]), Float.parseFloat(line.split("\t")[1]))); } } continue; } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); if (type.startsWith("para.")) { type = type.replace("para.", "SE."); } String value = line.split("=")[1].trim(); switch (type) { case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "ExportPrecursorPeak": { ExportPrecursorPeak = Boolean.parseBoolean(value); break; } case "ExportFragmentPeak": { ExportFragmentPeak = Boolean.parseBoolean(value); break; } //<editor-fold defaultstate="collapsed" desc="instrument parameters"> case "RPmax": { param.PrecursorRank = Integer.parseInt(value); break; } case "RFmax": { param.FragmentRank = Integer.parseInt(value); break; } case "CorrThreshold": { param.CorrThreshold = Float.parseFloat(value); break; } case "DeltaApex": { param.ApexDelta = Float.parseFloat(value); break; } case "RTOverlap": { param.RTOverlapThreshold = Float.parseFloat(value); break; } case "BoostComplementaryIon": { param.BoostComplementaryIon = Boolean.parseBoolean(value); break; } case "AdjustFragIntensity": { param.AdjustFragIntensity = Boolean.parseBoolean(value); break; } case "SE.MS1PPM": { param.MS1PPM = Float.parseFloat(value); break; } case "SE.MS2PPM": { param.MS2PPM = Float.parseFloat(value); break; } case "SE.SN": { param.SNThreshold = Float.parseFloat(value); break; } case "SE.MS2SN": { param.MS2SNThreshold = Float.parseFloat(value); break; } case "SE.MinMSIntensity": { param.MinMSIntensity = Float.parseFloat(value); break; } case "SE.MinMSMSIntensity": { param.MinMSMSIntensity = Float.parseFloat(value); break; } case "SE.MinRTRange": { param.MinRTRange = Float.parseFloat(value); break; } case "SE.MaxNoPeakCluster": { param.MaxNoPeakCluster = Integer.parseInt(value); param.MaxMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinNoPeakCluster": { param.MinNoPeakCluster = Integer.parseInt(value); param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinMS2NoPeakCluster": { param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MaxCurveRTRange": { param.MaxCurveRTRange = Float.parseFloat(value); break; } case "SE.Resolution": { param.Resolution = Integer.parseInt(value); break; } case "SE.RTtol": { param.RTtol = Float.parseFloat(value); break; } case "SE.NoPeakPerMin": { param.NoPeakPerMin = Integer.parseInt(value); break; } case "SE.StartCharge": { param.StartCharge = Integer.parseInt(value); break; } case "SE.EndCharge": { param.EndCharge = Integer.parseInt(value); break; } case "SE.MS2StartCharge": { param.MS2StartCharge = Integer.parseInt(value); break; } case "SE.MS2EndCharge": { param.MS2EndCharge = Integer.parseInt(value); break; } case "SE.NoMissedScan": { param.NoMissedScan = Integer.parseInt(value); break; } case "SE.Denoise": { param.Denoise = Boolean.valueOf(value); break; } case "SE.EstimateBG": { param.EstimateBG = Boolean.valueOf(value); break; } case "SE.RemoveGroupedPeaks": { param.RemoveGroupedPeaks = Boolean.valueOf(value); break; } case "SE.MinFrag": { param.MinFrag = Integer.parseInt(value); break; } case "SE.IsoPattern": { param.IsoPattern = Float.valueOf(value); break; } case "SE.StartRT": { param.startRT = Float.valueOf(value); break; } case "SE.EndRT": { param.endRT = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksRTOverlap": { param.RemoveGroupedPeaksRTOverlap = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksCorr": { param.RemoveGroupedPeaksCorr = Float.valueOf(value); break; } case "SE.MinMZ": { param.MinMZ = Float.valueOf(value); break; } case "SE.MinPrecursorMass": { param.MinPrecursorMass = Float.valueOf(value); break; } case "SE.MaxPrecursorMass": { param.MaxPrecursorMass = Float.valueOf(value); break; } case "SE.IsoCorrThreshold": { param.IsoCorrThreshold = Float.valueOf(value); break; } case "SE.MassDefectFilter": { param.MassDefectFilter = Boolean.parseBoolean(value); break; } case "SE.MassDefectOffset": { param.MassDefectOffset = Float.valueOf(value); break; } //</editor-fold>//</editor-fold> case "WindowType": { WindowType = value; switch (WindowType) { case "SWATH": { dataType = SpectralDataType.DataType.DIA_F_Window; break; } case "V_SWATH": { dataType = SpectralDataType.DataType.DIA_V_Window; break; } case "MSX": { dataType = SpectralDataType.DataType.MSX; break; } case "MSE": { dataType = SpectralDataType.DataType.MSe; break; } } break; } case "WindowSize": { WindowSize = Integer.parseInt(value); break; } } } } //</editor-fold> try { File MSFile = new File(MSFilePath); if (MSFile.exists()) { long time = System.currentTimeMillis(); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + MSFilePath + "...."); //Initialize a DIA file data structure DIAPack DiaFile = new DIAPack(MSFile.getAbsolutePath(), NoCPUs); DiaFile.Resume = Resume; DiaFile.SetDataType(dataType); DiaFile.SetParameter(param); //Set DIA isolation window setting if (dataType == SpectralDataType.DataType.DIA_F_Window) { DiaFile.SetWindowSize(WindowSize); } else if (dataType == SpectralDataType.DataType.DIA_V_Window) { for (XYData window : WindowList) { DiaFile.AddVariableWindow(window); } } DiaFile.SaveDIASetting(); DiaFile.SaveParams(); if (Fix) { DiaFile.FixScanidx(); return; } DiaFile.ExportPrecursorPeak = ExportPrecursorPeak; DiaFile.ExportFragmentPeak = ExportFragmentPeak; Logger.getRootLogger().info("Module A: Signal extraction"); //Start DIA signal extraction process to generate pseudo MS/MS files DiaFile.process(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(MSFilePath + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } else { throw new RuntimeException("file: " + MSFile + "? does not exist!"); } Logger.getRootLogger().info("Job complete"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }
From source file:edu.nyu.vida.data_polygamy.relationship_computation.Relationship.java
/** * @param args/*from w w w .j a v a 2s .co m*/ * @throws ParseException */ @SuppressWarnings({ "deprecation" }) public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option forceOption = new Option("f", "force", false, "force the computation of the relationship " + "even if files already exist"); forceOption.setRequired(false); options.addOption(forceOption); Option scoreOption = new Option("sc", "score", true, "set threhsold for relationship score"); scoreOption.setRequired(false); scoreOption.setArgName("SCORE THRESHOLD"); options.addOption(scoreOption); Option strengthOption = new Option("st", "strength", true, "set threhsold for relationship strength"); strengthOption.setRequired(false); strengthOption.setArgName("STRENGTH THRESHOLD"); options.addOption(strengthOption); Option completeRandomizationOption = new Option("c", "complete-randomization", false, "use complete randomization when performing significance tests"); completeRandomizationOption.setRequired(false); options.addOption(completeRandomizationOption); Option idOption = new Option("id", "ids", false, "output id instead of names for datasets and attributes"); idOption.setRequired(false); options.addOption(idOption); Option g1Option = new Option("g1", "first-group", true, "set first group of datasets"); g1Option.setRequired(true); g1Option.setArgName("FIRST GROUP"); g1Option.setArgs(Option.UNLIMITED_VALUES); options.addOption(g1Option); Option g2Option = new Option("g2", "second-group", true, "set second group of datasets"); g2Option.setRequired(false); g2Option.setArgName("SECOND GROUP"); g2Option.setArgs(Option.UNLIMITED_VALUES); options.addOption(g2Option); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); Option removeOption = new Option("r", "remove-not-significant", false, "remove relationships that are not" + "significant from the final output"); removeOption.setRequired(false); options.addOption(removeOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } Path path = null; FileSystem fs = FileSystem.get(new Configuration()); ArrayList<String> shortDataset = new ArrayList<String>(); ArrayList<String> firstGroup = new ArrayList<String>(); ArrayList<String> secondGroup = new ArrayList<String>(); HashMap<String, String> datasetAgg = new HashMap<String, String>(); boolean removeNotSignificant = cmd.hasOption("r"); boolean removeExistingFiles = cmd.hasOption("f"); boolean completeRandomization = cmd.hasOption("c"); boolean hasScoreThreshold = cmd.hasOption("sc"); boolean hasStrengthThreshold = cmd.hasOption("st"); boolean outputIds = cmd.hasOption("id"); String scoreThreshold = hasScoreThreshold ? cmd.getOptionValue("sc") : ""; String strengthThreshold = hasStrengthThreshold ? cmd.getOptionValue("st") : ""; // all datasets ArrayList<String> all_datasets = new ArrayList<String>(); if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path))); String line = br.readLine(); while (line != null) { all_datasets.add(line.split("\t")[0]); line = br.readLine(); } br.close(); if (s3) fs.close(); String[] all_datasets_array = new String[all_datasets.size()]; all_datasets.toArray(all_datasets_array); String[] firstGroupCmd = cmd.getOptionValues("g1"); String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : all_datasets_array; addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket); addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket); if (shortDataset.size() == 0) { System.out.println("No datasets to process."); System.exit(0); } if (firstGroup.isEmpty()) { System.out.println("No indices from datasets in G1."); System.exit(0); } if (secondGroup.isEmpty()) { System.out.println("No indices from datasets in G2."); System.exit(0); } // getting dataset ids String datasetNames = ""; String datasetIds = ""; HashMap<String, String> datasetId = new HashMap<String, String>(); Iterator<String> it = shortDataset.iterator(); while (it.hasNext()) { datasetId.put(it.next(), null); } if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); line = br.readLine(); while (line != null) { String[] dt = line.split("\t"); all_datasets.add(dt[0]); if (datasetId.containsKey(dt[0])) { datasetId.put(dt[0], dt[1]); datasetNames += dt[0] + ","; datasetIds += dt[1] + ","; } line = br.readLine(); } br.close(); if (s3) fs.close(); datasetNames = datasetNames.substring(0, datasetNames.length() - 1); datasetIds = datasetIds.substring(0, datasetIds.length() - 1); it = shortDataset.iterator(); while (it.hasNext()) { String dataset = it.next(); if (datasetId.get(dataset) == null) { System.out.println("No dataset id for " + dataset); System.exit(0); } } String firstGroupStr = ""; String secondGroupStr = ""; for (String dataset : firstGroup) { firstGroupStr += datasetId.get(dataset) + ","; } for (String dataset : secondGroup) { secondGroupStr += datasetId.get(dataset) + ","; } firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1); secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1); String relationshipsDir = ""; if (outputIds) { relationshipsDir = FrameworkUtils.relationshipsIdsDir; } else { relationshipsDir = FrameworkUtils.relationshipsDir; } FrameworkUtils.createDir(s3bucket + relationshipsDir, s3conf, s3); String random = completeRandomization ? "complete" : "restricted"; String indexInputDirs = ""; String noRelationship = ""; HashSet<String> dirs = new HashSet<String>(); String dataset1; String dataset2; String datasetId1; String datasetId2; for (int i = 0; i < firstGroup.size(); i++) { for (int j = 0; j < secondGroup.size(); j++) { if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer .parseInt(datasetId.get(secondGroup.get(j)))) { dataset1 = firstGroup.get(i); dataset2 = secondGroup.get(j); } else { dataset1 = secondGroup.get(j); dataset2 = firstGroup.get(i); } datasetId1 = datasetId.get(dataset1); datasetId2 = datasetId.get(dataset2); if (dataset1.equals(dataset2)) continue; String correlationOutputFileName = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2 + "/"; if (removeExistingFiles) { FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3); } if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) { dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset1); dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset2); } else { noRelationship += datasetId1 + "-" + datasetId2 + ","; } } } if (dirs.isEmpty()) { System.out.println("All the relationships were already computed."); System.out.println("Use -f in the beginning of the command line to force the computation."); System.exit(0); } for (String dir : dirs) { indexInputDirs += dir + ","; } Configuration conf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String jobName = "relationship" + "-" + random; String relationshipOutputDir = s3bucket + relationshipsDir + "/tmp/"; FrameworkUtils.removeFile(relationshipOutputDir, s3conf, s3); for (int i = 0; i < shortDataset.size(); i++) { conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i))); } for (int i = 0; i < shortDataset.size(); i++) { conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size", Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length)); } conf.set("dataset-keys", datasetIds); conf.set("dataset-names", datasetNames); conf.set("first-group", firstGroupStr); conf.set("second-group", secondGroupStr); conf.set("complete-random", String.valueOf(completeRandomization)); conf.set("output-ids", String.valueOf(outputIds)); conf.set("complete-random-str", random); conf.set("main-dataset-id", datasetId.get(shortDataset.get(0))); conf.set("remove-not-significant", String.valueOf(removeNotSignificant)); if (noRelationship.length() > 0) { conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1)); } if (hasScoreThreshold) { conf.set("score-threshold", scoreThreshold); } if (hasStrengthThreshold) { conf.set("strength-threshold", strengthThreshold); } conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); conf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); conf.set("mapreduce.input.fileinputformat.split.minsize", "0"); conf.set("mapreduce.task.io.sort.mb", "200"); conf.set("mapreduce.task.io.sort.factor", "100"); conf.set("mapreduce.task.timeout", "2400000"); if (s3) { machineConf.setMachineConfiguration(conf); conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); conf.set("bucket", s3bucket); } if (snappyCompression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } Job job = new Job(conf); job.setJobName(jobName); job.setMapOutputKeyClass(PairAttributeWritable.class); job.setMapOutputValueClass(TopologyTimeSeriesWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(CorrelationMapper.class); job.setReducerClass(CorrelationReducer.class); job.setNumReduceTasks(machineConf.getNumberReduces()); job.setInputFormatClass(SequenceFileInputFormat.class); //job.setOutputFormatClass(TextOutputFormat.class); LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class); FileInputFormat.setInputDirRecursive(job, true); FileInputFormat.setInputPaths(job, indexInputDirs.substring(0, indexInputDirs.length() - 1)); FileOutputFormat.setOutputPath(job, new Path(relationshipOutputDir)); job.setJarByClass(Relationship.class); long start = System.currentTimeMillis(); job.submit(); job.waitForCompletion(true); System.out.println(jobName + "\t" + (System.currentTimeMillis() - start)); // moving files to right place for (int i = 0; i < firstGroup.size(); i++) { for (int j = 0; j < secondGroup.size(); j++) { if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer .parseInt(datasetId.get(secondGroup.get(j)))) { dataset1 = firstGroup.get(i); dataset2 = secondGroup.get(j); } else { dataset1 = secondGroup.get(j); dataset2 = firstGroup.get(i); } if (dataset1.equals(dataset2)) continue; String from = s3bucket + relationshipsDir + "/tmp/" + dataset1 + "-" + dataset2 + "/"; String to = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2 + "/"; FrameworkUtils.renameFile(from, to, s3conf, s3); } } }
From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencing.java
public static void main(String args[]) throws ParseException, EngineException, InterruptedException, IOException { List<String> endpoints = new ArrayList<String>(); String queryPath = null;/*from w ww .j ava2 s . c om*/ boolean rulesSelection = false; File rulesDir = null; File ontDir = null; ///////////////// Graph graph = Graph.create(); QueryProcess exec = QueryProcess.create(graph); Options options = new Options(); Option helpOpt = new Option("h", "help", false, "print this message"); // Option queryOpt = new Option("q", "query", true, "specify the sparql query file"); // Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL"); Option versionOpt = new Option("v", "version", false, "print the version information and exit"); Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules"); Option ontOpt = new Option("o", "ontologiesDir", true, "directory containing the ontologies for rules selection"); // Option locOpt = new Option("c", "centralized", false, "performs centralized inferences"); Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded"); // Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run"); // options.addOption(queryOpt); // options.addOption(endpointOpt); options.addOption(helpOpt); options.addOption(versionOpt); options.addOption(rulesOpt); options.addOption(ontOpt); // options.addOption(selOpt); // options.addOption(locOpt); options.addOption(dataOpt); String header = "Corese/KGRAM rule engine experiment command line interface"; String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr"; CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("kgdqp", header, options, footer, true); System.exit(0); } if (cmd.hasOption("o")) { rulesSelection = true; String ontDirPath = cmd.getOptionValue("o"); ontDir = new File(ontDirPath); if (!ontDir.isDirectory()) { logger.warn(ontDirPath + " is not a valid directory path."); System.exit(0); } } if (!cmd.hasOption("r")) { logger.info("You must specify a path for inference rules directory !"); System.exit(0); } if (cmd.hasOption("l")) { String[] dataPaths = cmd.getOptionValues("l"); for (String path : dataPaths) { Load ld = Load.create(graph); ld.load(path); logger.info("Loaded " + path); } } if (cmd.hasOption("v")) { logger.info("version 3.0.4-SNAPSHOT"); System.exit(0); } String rulesDirPath = cmd.getOptionValue("r"); rulesDir = new File(rulesDirPath); if (!rulesDir.isDirectory()) { logger.warn(rulesDirPath + " is not a valid directory path."); System.exit(0); } // Local rules graph initialization Graph rulesG = Graph.create(); Load ld = Load.create(rulesG); if (rulesSelection) { // Ontology loading if (ontDir.isDirectory()) { for (File o : ontDir.listFiles()) { logger.info("Loading " + o.getAbsolutePath()); ld.load(o.getAbsolutePath()); } } } // Rules loading if (rulesDir.isDirectory()) { for (File r : rulesDir.listFiles()) { logger.info("Loading " + r.getAbsolutePath()); ld.load(r.getAbsolutePath()); } } // Rule engine initialization RuleEngine ruleEngine = RuleEngine.create(graph); ruleEngine.set(exec); ruleEngine.setOptimize(true); ruleEngine.setConstructResult(true); ruleEngine.setTrace(true); StopWatch sw = new StopWatch(); logger.info("Federated graph size : " + graph.size()); logger.info("Rules graph size : " + rulesG.size()); // Rule selection logger.info("Rules selection"); QueryProcess localKgram = QueryProcess.create(rulesG); ArrayList<String> applicableRules = new ArrayList<String>(); sw.start(); String rulesSelQuery = ""; if (rulesSelection) { rulesSelQuery = pertinentRulesQuery; } else { rulesSelQuery = allRulesQuery; } Mappings maps = localKgram.query(rulesSelQuery); logger.info("Rules selected in " + sw.getTime() + " ms"); logger.info("Applicable rules : " + maps.size()); // Selected rule loading for (Mapping map : maps) { IDatatype dt = (IDatatype) map.getValue("?res"); String rule = dt.getLabel(); //loading rule in the rule engine // logger.info("Adding rule : "); // System.out.println("-------"); // System.out.println(rule); // System.out.println(""); // if (! rule.toLowerCase().contains("sameas")) { applicableRules.add(rule); ruleEngine.addRule(rule); // } } // Rules application on distributed sparql endpoints logger.info("Rules application (" + applicableRules.size() + " rules)"); ExecutorService threadPool = Executors.newCachedThreadPool(); RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine); sw.reset(); sw.start(); // ruleEngine.process(); threadPool.execute(ruleThread); threadPool.shutdown(); //monitoring loop while (!threadPool.isTerminated()) { // System.out.println("******************************"); // System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter)); // System.out.println("Rule engine running for " + sw.getTime() + " ms"); // System.out.println("Federated graph size : " + graph.size()); System.out.println(sw.getTime() + " , " + graph.size()); Thread.sleep(5000); } logger.info("Federated graph size : " + graph.size()); // logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter)); // TripleFormat f = TripleFormat.create(graph, true); // f.write("/tmp/gAll.ttl"); }
From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencingNoSpin.java
public static void main(String args[]) throws ParseException, EngineException, InterruptedException, IOException, LoadException { List<String> endpoints = new ArrayList<String>(); String queryPath = null;// w w w . ja v a 2 s . c o m boolean rulesSelection = false; File rulesDir = null; File ontDir = null; ///////////////// Graph graph = Graph.create(); QueryProcess exec = QueryProcess.create(graph); Options options = new Options(); Option helpOpt = new Option("h", "help", false, "print this message"); // Option queryOpt = new Option("q", "query", true, "specify the sparql query file"); // Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL"); Option versionOpt = new Option("v", "version", false, "print the version information and exit"); Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules"); Option ontOpt = new Option("o", "ontologiesDir", true, "directory containing the ontologies for rules selection"); // Option locOpt = new Option("c", "centralized", false, "performs centralized inferences"); Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded"); // Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run"); // options.addOption(queryOpt); // options.addOption(endpointOpt); options.addOption(helpOpt); options.addOption(versionOpt); options.addOption(rulesOpt); options.addOption(ontOpt); // options.addOption(selOpt); // options.addOption(locOpt); options.addOption(dataOpt); String header = "Corese/KGRAM rule engine experiment command line interface"; String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr"; CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("kgdqp", header, options, footer, true); System.exit(0); } if (cmd.hasOption("o")) { rulesSelection = true; String ontDirPath = cmd.getOptionValue("o"); ontDir = new File(ontDirPath); if (!ontDir.isDirectory()) { logger.warn(ontDirPath + " is not a valid directory path."); System.exit(0); } } if (!cmd.hasOption("r")) { logger.info("You must specify a path for inference rules directory !"); System.exit(0); } if (cmd.hasOption("l")) { String[] dataPaths = cmd.getOptionValues("l"); for (String path : dataPaths) { Load ld = Load.create(graph); ld.load(path); logger.info("Loaded " + path); } } if (cmd.hasOption("v")) { logger.info("version 3.0.4-SNAPSHOT"); System.exit(0); } String rulesDirPath = cmd.getOptionValue("r"); rulesDir = new File(rulesDirPath); if (!rulesDir.isDirectory()) { logger.warn(rulesDirPath + " is not a valid directory path."); System.exit(0); } // Local rules graph initialization Graph rulesG = Graph.create(); Load ld = Load.create(rulesG); if (rulesSelection) { // Ontology loading if (ontDir.isDirectory()) { for (File o : ontDir.listFiles()) { logger.info("Loading " + o.getAbsolutePath()); ld.load(o.getAbsolutePath()); } } } // Rules loading if (rulesDir.isDirectory()) { for (File r : rulesDir.listFiles()) { if (r.getAbsolutePath().endsWith(".rq")) { logger.info("Loading " + r.getAbsolutePath()); // ld.load(r.getAbsolutePath()); // byte[] encoded = Files.readAllBytes(Paths.get(r.getAbsolutePath())); // String construct = new String(encoded, "UTF-8"); //StandardCharsets.UTF_8); FileInputStream f = new FileInputStream(r); QueryLoad ql = QueryLoad.create(); String construct = ql.read(f); f.close(); SPINProcess sp = SPINProcess.create(); String spinConstruct = sp.toSpin(construct); ld.load(new ByteArrayInputStream(spinConstruct.getBytes()), Load.TURTLE_FORMAT); logger.info("Rules graph size : " + rulesG.size()); } } } // Rule engine initialization RuleEngine ruleEngine = RuleEngine.create(graph); ruleEngine.set(exec); ruleEngine.setOptimize(true); ruleEngine.setConstructResult(true); ruleEngine.setTrace(true); StopWatch sw = new StopWatch(); logger.info("Federated graph size : " + graph.size()); logger.info("Rules graph size : " + rulesG.size()); // Rule selection logger.info("Rules selection"); QueryProcess localKgram = QueryProcess.create(rulesG); ArrayList<String> applicableRules = new ArrayList<String>(); sw.start(); String rulesSelQuery = ""; if (rulesSelection) { rulesSelQuery = pertinentRulesQuery; } else { rulesSelQuery = allRulesQuery; } Mappings maps = localKgram.query(rulesSelQuery); logger.info("Rules selected in " + sw.getTime() + " ms"); logger.info("Applicable rules : " + maps.size()); // Selected rule loading for (Mapping map : maps) { IDatatype dt = (IDatatype) map.getValue("?res"); String rule = dt.getLabel(); //loading rule in the rule engine // logger.info("Adding rule : "); // System.out.println("-------"); // System.out.println(rule); // System.out.println(""); // if (! rule.toLowerCase().contains("sameas")) { applicableRules.add(rule); ruleEngine.addRule(rule); // } } // Rules application on distributed sparql endpoints logger.info("Rules application (" + applicableRules.size() + " rules)"); ExecutorService threadPool = Executors.newCachedThreadPool(); RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine); sw.reset(); sw.start(); // ruleEngine.process(); threadPool.execute(ruleThread); threadPool.shutdown(); //monitoring loop while (!threadPool.isTerminated()) { // System.out.println("******************************"); // System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter)); // System.out.println("Rule engine running for " + sw.getTime() + " ms"); // System.out.println("Federated graph size : " + graph.size()); System.out.println(sw.getTime() + " , " + graph.size()); Thread.sleep(5000); } logger.info("Federated graph size : " + graph.size()); // logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter)); // TripleFormat f = TripleFormat.create(graph, true); // f.write("/tmp/gAll.ttl"); }
From source file:edu.cmu.lti.oaqa.annographix.apps.SolrQueryApp.java
public static void main(String[] args) { Options options = new Options(); options.addOption("u", null, true, "Solr URI"); options.addOption("q", null, true, "Query"); options.addOption("n", null, true, "Max # of results"); options.addOption("o", null, true, "An optional TREC-style output file"); options.addOption("w", null, false, "Do a warm-up query call, before each query"); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); BufferedWriter trecOutFile = null; try {/*from ww w . j a va2s. com*/ CommandLine cmd = parser.parse(options, args); String queryFile = null, solrURI = null; if (cmd.hasOption("u")) { solrURI = cmd.getOptionValue("u"); } else { Usage("Specify Solr URI"); } SolrServerWrapper solr = new SolrServerWrapper(solrURI); if (cmd.hasOption("q")) { queryFile = cmd.getOptionValue("q"); } else { Usage("Specify Query file"); } int numRet = 100; if (cmd.hasOption("n")) { numRet = Integer.parseInt(cmd.getOptionValue("n")); } if (cmd.hasOption("o")) { trecOutFile = new BufferedWriter(new FileWriter(new File(cmd.getOptionValue("o")))); } List<String> fieldList = new ArrayList<String>(); fieldList.add(UtilConst.ID_FIELD); fieldList.add(UtilConst.SCORE_FIELD); double totalTime = 0; double retQty = 0; ArrayList<Double> queryTimes = new ArrayList<Double>(); boolean bDoWarmUp = cmd.hasOption("w"); if (bDoWarmUp) { System.out.println("Using a warmup step!"); } int queryQty = 0; for (String t : FileUtils.readLines(new File(queryFile))) { t = t.trim(); if (t.isEmpty()) continue; int ind = t.indexOf('|'); if (ind < 0) throw new Exception("Wrong format, line: '" + t + "'"); String qID = t.substring(0, ind); String q = t.substring(ind + 1); SolrDocumentList res = null; if (bDoWarmUp) { res = solr.runQuery(q, fieldList, numRet); } Long tm1 = System.currentTimeMillis(); res = solr.runQuery(q, fieldList, numRet); Long tm2 = System.currentTimeMillis(); retQty += res.getNumFound(); System.out.println(qID + " Obtained: " + res.getNumFound() + " entries in " + (tm2 - tm1) + " ms"); double delta = (tm2 - tm1); totalTime += delta; queryTimes.add(delta); ++queryQty; if (trecOutFile != null) { ArrayList<SolrRes> resArr = new ArrayList<SolrRes>(); for (SolrDocument doc : res) { String id = (String) doc.getFieldValue(UtilConst.ID_FIELD); float score = (Float) doc.getFieldValue(UtilConst.SCORE_FIELD); resArr.add(new SolrRes(id, "", score)); } SolrRes[] results = resArr.toArray(new SolrRes[resArr.size()]); Arrays.sort(results); SolrEvalUtils.saveTrecResults(qID, results, trecOutFile, TREC_RUN, results.length); } } double devTime = 0, meanTime = totalTime / queryQty; for (int i = 0; i < queryQty; ++i) { double d = queryTimes.get(i) - meanTime; devTime += d * d; } devTime = Math.sqrt(devTime / (queryQty - 1)); System.out.println(String.format("Query time, mean/standard dev: %.2f/%.2f (ms)", meanTime, devTime)); System.out.println(String.format("Avg # of docs returned: %.2f", retQty / queryQty)); solr.close(); trecOutFile.close(); } catch (ParseException e) { Usage("Cannot parse arguments"); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:org.loklak.harvester.TwitterScraper.java
/** * Usage: java twitter4j.examples.search.SearchTweets [query] * * @param args search query/*from w ww . j a va 2 s. co m*/ */ public static void main(String[] args) { //wget --no-check-certificate "https://twitter.com/search?q=eifel&src=typd&f=realtime" ArrayList<String> filterList = new ArrayList<String>(); filterList.add("image"); Timeline[] result = null; if (args[0].startsWith("/")) result = parse(new File(args[0]), Timeline.Order.CREATED_AT, true, true); else result = TwitterScraper.search(args[0], filterList, Timeline.Order.CREATED_AT, true, true); int all = 0; for (int x = 0; x < 2; x++) { if (x == 0) System.out.println("Timeline[0] - finished to be used:"); if (x == 1) System.out.println("Timeline[1] - messages which are in postprocessing"); all += result[x].size(); for (TwitterTweet tweet : result[x]) { tweet.waitReady(10000); System.out.println(tweet.getCreatedAt().toString() + " from @" + tweet.getScreenName() + " - " + tweet.getText()); } } System.out.println("count: " + all); System.exit(0); }
From source file:me.camerongray.teamlocker.server.Server.java
public static void main(String[] args) throws PropertyVetoException, SQLException { ConnectionManager.initialise("localhost", "teamlocker", "teamlocker", "teamlocker"); before((request, response) -> {/* ww w . j a va 2s .co m*/ // Log request StringBuilder sb = new StringBuilder(); sb.append(request.requestMethod()); sb.append(" " + request.url()); sb.append(" " + request.body()); System.out.println(sb); if (request.headers("Authorization") == null) { response.header("WWW-Authenticate", "Basic"); halt(401); } RequestCredentials credentials = new RequestCredentials(request); if (!Auth.checkCredentials(credentials.username, credentials.password)) { ResponseBuilder.errorHalt(response, 401, "Incorrect username/password"); } }); get("/check_auth/", (request, response) -> { return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); get("/users/:userId/", (request, response) -> { DynaBean user = null; try (Database database = new Database(ConnectionManager.getConnection(request))) { if (request.params(":userId").equals("self")) { try { user = database.getUser((new RequestCredentials(request)).getUsername()); } catch (ObjectNotFoundException e) { ResponseBuilder.errorHalt(response, 404, "User not found"); } } else { Auth.enforceAdmin(request, response); try { user = database.getUser(Integer.parseInt(request.params(":userId"))); } catch (NumberFormatException e) { ResponseBuilder.errorHalt(response, 400, "User ID must be a number"); } catch (ObjectNotFoundException e) { ResponseBuilder.errorHalt(response, 404, "User not found"); } } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("user", ResponseBuilder.objectOf("id", (int) user.get("id"), "full_name", (String) user.get("full_name"), "username", (String) user.get("username"), "email", (String) user.get("email"), "auth_hash", (String) user.get("auth_hash"), "encrypted_private_key", (String) user.get("encrypted_private_key"), "public_key", (String) user.get("public_key"), "admin", (boolean) user.get("admin"), "pbkdf2_salt", (String) user.get("pbkdf2_salt"), "aes_iv", (String) user.get("aes_iv")))); }); get("/users/:userId/encrypted_aes_keys/", (request, response) -> { int userId; if (request.params(":userId").equals("self")) { userId = Auth.getCurrentUserId(request); } else { Auth.enforceAdmin(request, response); userId = Integer.parseInt(request.params(":userId")); } List<DynaBean> accountData; try (Database database = new Database(ConnectionManager.getConnection(request))) { accountData = database.getUserAccountData(userId); } ArrayList<JSONObject> aesKeyObjects = new ArrayList<>(); for (DynaBean accountDataItem : accountData) { aesKeyObjects.add(ResponseBuilder.objectOf("account_id", (int) accountDataItem.get("account_id"), "encrypted_aes_key", (String) accountDataItem.get("encrypted_aes_key"))); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("encrypted_aes_keys", ResponseBuilder.fromArrayList(aesKeyObjects))); }); get("/users/:userId/permissions/", (request, response) -> { Auth.enforceAdmin(request, response); List<DynaBean> permissions = new ArrayList<>(); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { permissions = database.getUserPermissions(Integer.parseInt(request.params(":userId"))); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "User ID must be a number"); } } ArrayList<JSONObject> responseObjects = new ArrayList<>(); for (DynaBean permission : permissions) { responseObjects.add(ResponseBuilder.objectOf("folder_id", (int) permission.get("folder_id"), "read", (boolean) permission.get("read"), "write", (boolean) permission.get("write"))); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("permissions", ResponseBuilder.fromArrayList(responseObjects))); }); delete("/users/:userId/permissions/", (request, response) -> { Auth.enforceAdmin(request, response); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.deleteUserPermissions(Integer.parseInt(request.params(":userId"))); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "User ID must be a number"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); post("/users/:userId/", (request, response) -> { Auth.enforceAdmin(request, response); int userId = -1; try { userId = Integer.parseInt(request.params(":userId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "User ID must be a number"); } JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "postUsers"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } try (Database database = new Database(ConnectionManager.getConnection(request))) { boolean usernameExists = true; try { DynaBean user = database.getUser(requestJson.getString("username")); if ((Integer) user.get("id") == userId) { usernameExists = false; } } catch (ObjectNotFoundException ex) { usernameExists = false; } if (usernameExists) { ResponseBuilder.errorHalt(response, 409, "A user with that username already exists"); } database.updateUser(userId, requestJson.getString("username"), requestJson.getString("full_name"), requestJson.getString("email"), requestJson.getBoolean("admin")); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); get("/users/", (request, response) -> { Auth.enforceAdmin(request, response); ArrayList<JSONObject> userObjects = new ArrayList<>(); try (Database database = new Database(ConnectionManager.getConnection(request))) { List<DynaBean> users = database.getAllUsers(); for (DynaBean user : users) { userObjects.add(ResponseBuilder.objectOf("id", (int) user.get("id"), "full_name", (String) user.get("full_name"), "username", (String) user.get("username"), "email", (String) user.get("email"), "auth_hash", (String) user.get("auth_hash"), "encrypted_private_key", (String) user.get("encrypted_private_key"), "public_key", (String) user.get("public_key"), "admin", (boolean) user.get("admin"), "pbkdf2_salt", (String) user.get("pbkdf2_salt"), "aes_iv", (String) user.get("aes_iv"))); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("users", ResponseBuilder.fromArrayList(userObjects))); }); put("/users/", (request, response) -> { JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "putUsers"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } int userId = -1; try (Database database = new Database(ConnectionManager.getConnection(request))) { boolean userExists = true; try { database.getUser(requestJson.getString("username")); } catch (ObjectNotFoundException ex) { userExists = false; } if (userExists) { ResponseBuilder.errorHalt(response, 409, "A user with that username already exists"); } userId = database.addUser(requestJson.getString("full_name"), requestJson.getString("username"), requestJson.getString("email"), BCrypt.hashpw(requestJson.getString("auth_key"), BCrypt.gensalt()), requestJson.getString("encrypted_private_key"), requestJson.getString("public_key"), requestJson.getBoolean("admin"), requestJson.getString("pbkdf2_salt"), requestJson.getString("aes_iv")); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("user_id", userId)); }); get("/folders/", (request, response) -> { ArrayList<JSONObject> folderObjects = new ArrayList<>(); try (Database database = new Database(ConnectionManager.getConnection(request))) { List<DynaBean> folders = database.getFolders((int) Auth.getCurrentUser(request).get("id")); for (DynaBean folder : folders) { folderObjects.add(ResponseBuilder.objectOf("id", (int) folder.get("id"), "name", (String) folder.get("name"), "read", (boolean) folder.get("read"), "write", (boolean) folder.get("write"))); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("folders", ResponseBuilder.fromArrayList(folderObjects))); }); put("/folders/", (request, response) -> { JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "putFolder"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } Auth.enforceAdmin(request, response); int folderId = -1; try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.getFolder(requestJson.getString("name")); ResponseBuilder.errorHalt(response, 409, "A folder with that name already exists"); } catch (ObjectNotFoundException ex) { // We don't care if it doesn't exist, we actually want this exception to be thrown! } folderId = database.addFolder(requestJson.getString("name")); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("folder_id", folderId)); }); post("/folders/:folderId/", (request, response) -> { int folderId = -1; try { folderId = Integer.parseInt(request.params(":folderId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } Auth.enforceFolderPermission(request, response, folderId, Auth.PERMISSION_WRITE); JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "postFolders"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.updateFolder(folderId, requestJson.getString("name")); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Folder not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); // TODO - Replace with generic update user method post("/users/self/update_password/", (request, response) -> { JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "postUsersUpdatePassword"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.updateUserPassword(Auth.getCurrentUserId(request), requestJson.getString("encrypted_private_key"), requestJson.getString("aes_iv"), requestJson.getString("pbkdf2_salt"), BCrypt.hashpw(requestJson.getString("auth_key"), BCrypt.gensalt())); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "User not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); delete("/folders/:folderId/", (request, response) -> { int folderId = -1; try { folderId = Integer.parseInt(request.params(":folderId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } Auth.enforceFolderPermission(request, response, folderId, Auth.PERMISSION_WRITE); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.deleteFolder(folderId); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Folder not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); get("/folders/:folderId/accounts/", (request, response) -> { int folderId = -1; try { folderId = Integer.parseInt(request.params(":folderId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } Auth.enforceFolderPermission(request, response, folderId, Auth.PERMISSION_READ); ArrayList<JSONObject> accountObjects = new ArrayList<>(); try (Database database = new Database(ConnectionManager.getConnection(request))) { List<DynaBean> accounts = new ArrayList<>(); accounts = database.getFolderAccounts(folderId, Auth.getCurrentUserId(request)); for (DynaBean account : accounts) { accountObjects.add(ResponseBuilder.objectOf("id", (int) account.get("account_id"), "account_metadata", (String) account.get("account_metadata"), "encrypted_aes_key", (String) account.get("encrypted_aes_key"))); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("accounts", ResponseBuilder.fromArrayList(accountObjects))); }); get("/folders/:folderId/permissions/", (request, response) -> { Auth.enforceAdmin(request, response); List<DynaBean> permissions = new ArrayList<>(); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { permissions = database.getFolderPermissions(Integer.parseInt(request.params(":folderId"))); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } } ArrayList<JSONObject> responseObjects = new ArrayList<>(); for (DynaBean permission : permissions) { responseObjects.add(ResponseBuilder.objectOf("user_id", (int) permission.get("user_id"), "read", (boolean) permission.get("read"), "write", (boolean) permission.get("write"))); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("permissions", ResponseBuilder.fromArrayList(responseObjects))); }); post("/folders/:folderId/permissions/", (request, response) -> { Auth.enforceAdmin(request, response); int folderId = -1; try { folderId = Integer.parseInt(request.params(":folderId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } JSONObject requestJson = RequestJson.getValidated(request, "postFoldersPermissions"); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.getFolder(folderId); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Folder not found!"); } TransactionInterface transaction; try { transaction = new Transaction(database.getWrappedConnection()); } catch (ExistingOpenTransactionException ex) { transaction = new NullTransaction(); } try { database.deleteFolderPermissions(folderId); JSONArray permissions = requestJson.getJSONArray("permissions"); for (int i = 0; i < permissions.length(); i++) { JSONObject permission = permissions.getJSONObject(i); int userId = permission.getInt("user_id"); boolean read = permission.getBoolean("read"); boolean write = permission.getBoolean("write"); try { DynaBean user = database.getUser(userId); if ((boolean) user.get("admin")) { transaction.rollback(); ResponseBuilder.errorHalt(response, 400, "Trying to set permissions " + "for an administrator, administrators already have full permission."); } } catch (ObjectNotFoundException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 404, "User not found!"); } if (write && !read) { transaction.rollback(); ResponseBuilder.errorHalt(response, 400, "Users must be able " + "to read a folder if they are to write to it"); } if (!(write || read)) { database.deleteAccountDataForFolder(folderId, userId); } else { database.addPermission(folderId, userId, read, write); } } } catch (SQLException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 500, "Error updating permissions - " + ex); } transaction.commit(); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); get("/folders/:folderId/public_keys/", (request, response) -> { int folderId = -1; try { folderId = Integer.parseInt(request.params(":folderId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Folder ID must be a number"); } Auth.enforceFolderPermission(request, response, folderId, Auth.PERMISSION_WRITE); List<DynaBean> users; try (Database database = new Database(ConnectionManager.getConnection(request))) { users = database.getFolderUsers(folderId); } ArrayList<JSONObject> publicKeyObjects = new ArrayList<>(); for (DynaBean user : users) { publicKeyObjects.add(ResponseBuilder.objectOf("user_id", (int) user.get("id"), "public_key", (String) user.get("public_key"))); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("public_keys", ResponseBuilder.fromArrayList(publicKeyObjects))); }); get("/accounts/:accountId/", (request, response) -> { int accountId = -1; try { accountId = Integer.parseInt(request.params(":accountId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Account ID must be a number"); } Auth.enforceAccountPermission(request, response, accountId, Auth.PERMISSION_READ); DynaBean account = null; try (Database database = new Database(ConnectionManager.getConnection(request))) { try { account = database.getAccountData(accountId, Auth.getCurrentUserId(request)); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Account not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("account", ResponseBuilder.objectOf("account_metadata", (String) account.get("account_metadata"), "encrypted_aes_key", (String) account.get("encrypted_aes_key")))); }); delete("/accounts/:accountId/", (request, response) -> { int accountId = -1; try { accountId = Integer.parseInt(request.params(":accountId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Account ID must be a number"); } Auth.enforceAccountPermission(request, response, accountId, Auth.PERMISSION_WRITE); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.deleteAccount(accountId); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Account not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); post("/accounts/:accountId/", (request, response) -> { JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "postAccountsSingle"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, "JSON Validation Error - " + ex.getMessage()); } int accountId = -1; try { accountId = Integer.parseInt(request.params(":accountId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Account ID must be a number"); } Auth.enforceAccountPermission(request, response, accountId, Auth.PERMISSION_WRITE); try (Database database = new Database(ConnectionManager.getConnection(request))) { try { database.getAccount(accountId); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Account not found"); } try { database.getFolder(requestJson.getInt("folder_id")); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Folder not found"); } TransactionInterface transaction; try { transaction = new Transaction(database.getWrappedConnection()); } catch (ExistingOpenTransactionException ex) { transaction = new NullTransaction(); } try { database.updateAccount(accountId, requestJson.getInt("folder_id")); JSONArray accountDataItems = requestJson.getJSONArray("encrypted_account_data"); for (int i = 0; i < accountDataItems.length(); i++) { JSONObject accountDataItem = accountDataItems.getJSONObject(i); int userId = accountDataItem.getInt("user_id"); database.deleteAccountData(accountId, userId); database.addAccountDataItem(accountId, userId, accountDataItem.getString("account_metadata"), accountDataItem.getString("password"), accountDataItem.getString("encrypted_aes_key")); } } catch (SQLException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 500, "Error saving account - " + ex); } catch (ObjectNotFoundException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 404, "Error saving account - Object Not Found"); } transaction.commit(); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); post("/accounts/", (request, response) -> { JSONObject requestJson = RequestJson.getValidated(request, "postAccountsBatch"); JSONArray accounts = requestJson.getJSONArray("accounts"); try (Database database = new Database(ConnectionManager.getConnection(request))) { TransactionInterface transaction; try { transaction = new Transaction(database.getWrappedConnection()); } catch (ExistingOpenTransactionException ex) { transaction = new NullTransaction(); } try { for (int i = 0; i < accounts.length(); i++) { JSONObject account = accounts.getJSONObject(i); int accountId = account.getInt("account_id"); try { if (!Auth.getAccountPermission(request, response, accountId, Auth.PERMISSION_WRITE)) { transaction.rollback(); ResponseBuilder.errorHalt(response, 403, "You do not have write permission for account " + accountId); } } catch (ObjectNotFoundException ex) { transaction.rollback(); ex.printStackTrace(); ResponseBuilder.errorHalt(response, 404, "Account not found"); } JSONArray accountDataItems = account.getJSONArray("encrypted_account_data"); for (int j = 0; j < accountDataItems.length(); j++) { JSONObject accountDataItem = accountDataItems.getJSONObject(j); int userId = accountDataItem.getInt("user_id"); database.deleteAccountData(accountId, userId); database.addAccountDataItem(accountId, userId, accountDataItem.getString("account_metadata"), accountDataItem.getString("password"), accountDataItem.getString("encrypted_aes_key")); } } } catch (SQLException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 500, "Error saving accounts - " + ex); } transaction.commit(); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); put("/accounts/", (request, response) -> { JSONObject requestJson = null; try { requestJson = RequestJson.getValidated(request, "putAccounts"); } catch (JSONValidationException ex) { // TODO: Friendly error messages for JSONValidationExceptions rather than raw output from validation library ResponseBuilder.errorHalt(response, 400, ex.getMessage()); } Auth.enforceFolderPermission(request, response, requestJson.getInt("folder_id"), Auth.PERMISSION_WRITE); int accountId = -1; WrappedConnection connection = ConnectionManager.getConnection(request); try (Database database = new Database(connection)) { try { database.getFolder(requestJson.getInt("folder_id")); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Folder not found"); } TransactionInterface transaction; try { transaction = new Transaction(connection); } catch (ExistingOpenTransactionException ex) { transaction = new NullTransaction(); Logger.getLogger(Server.class.getName()).log(Level.SEVERE, null, ex); } try { accountId = database.addAccount(requestJson.getInt("folder_id")); JSONArray accountDataItems = requestJson.getJSONArray("encrypted_account_data"); for (int i = 0; i < accountDataItems.length(); i++) { JSONObject accountDataItem = accountDataItems.getJSONObject(i); database.addAccountDataItem(accountId, accountDataItem.getInt("user_id"), accountDataItem.getString("account_metadata"), accountDataItem.getString("password"), accountDataItem.getString("encrypted_aes_key")); } } catch (SQLException ex) { transaction.rollback(); ResponseBuilder.errorHalt(response, 500, "Error adding account - " + ex); } transaction.commit(); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("account_id", accountId)); }); get("/accounts/:accountId/password/", (request, response) -> { int accountId = -1; try { accountId = Integer.parseInt(request.params(":accountId")); } catch (NumberFormatException ex) { ResponseBuilder.errorHalt(response, 400, "Account ID must be a number"); } Auth.enforceAccountPermission(request, response, accountId, Auth.PERMISSION_READ); DynaBean account = null; try (Database database = new Database(ConnectionManager.getConnection(request))) { try { account = database.getAccountData(accountId, Auth.getCurrentUserId(request)); } catch (ObjectNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Account not found"); } } return ResponseBuilder.build(response, ResponseBuilder.objectOf("password", ResponseBuilder.objectOf("encrypted_password", (String) account.get("password"), "encrypted_aes_key", (String) account.get("encrypted_aes_key")))); }); put("/transaction/", (request, response) -> { Transaction transaction = TransactionStore.getTransaction(); return ResponseBuilder.build(response, ResponseBuilder.objectOf("transaction_id", transaction.getId())); }); post("/transaction/:transactionId/commit/", (request, response) -> { String transactionId = request.params(":transactionId"); try { Transaction transaction = TransactionStore.getTransaction(transactionId); transaction.commit(); transaction.getWrappedConnection().getConnection().close(); TransactionStore.forgetTransaction(transactionId); } catch (TransactionNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Transaction not found!"); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); post("/transaction/:transactionId/rollback/", (request, response) -> { String transactionId = request.params(":transactionId"); try { Transaction transaction = TransactionStore.getTransaction(transactionId); transaction.rollback(); transaction.getWrappedConnection().getConnection().close(); TransactionStore.forgetTransaction(transactionId); } catch (TransactionNotFoundException ex) { ResponseBuilder.errorHalt(response, 404, "Transaction not found!"); } return ResponseBuilder.build(response, ResponseBuilder.objectOf("success", true)); }); exception(Exception.class, (e, request, response) -> { if (e.getClass().equals(TransactionNotFoundException.class)) { response.status(404); response.body(ResponseBuilder.objectOf("error", true, "message", "Transaction not found", "type", "transaction_not_found").toString()); } else { System.out.println("An unhandled exception occurred!"); System.out.println(e.toString()); e.printStackTrace(); response.status(500); response.type("application/json"); response.body(ResponseBuilder .objectOf("error", true, "message", "An unhandled server error occurred! - " + e.toString()) .toString()); } }); //TODO - Disable this in production! // spark.debug.DebugScreen.enableDebugScreen(); }
From source file:edu.oregonstate.eecs.mcplan.ml.GaussianMixtureModel.java
/** * @param args//from w w w .j av a2s. c om */ public static void main(final String[] args) { final RandomGenerator rng = new MersenneTwister(42); final ArrayList<double[]> data = new ArrayList<double[]>(); // This data displays some problems with singular covariance estimates, // perhaps due to "multicollinearity" in the data. // for( int x = -1; x <= 1; ++x ) { // for( int y = -1; y <= 1; ++y ) { // data.add( new double[] { x, y } ); // data.add( new double[] { x + 10, y + 10} ); // data.add( new double[] { x + 20, y + 20} ); // data.add( new double[] { x + 30, y + 30} ); // } // } final int nsamples = 1000; final double[][] mu = new double[][] { new double[] { 0, 0 }, new double[] { 5, 0 }, new double[] { 0, 5 }, new double[] { 5, 5 } }; final double[][] Sigma = new double[][] { new double[] { 1, 0 }, new double[] { 0, 1 } }; final MultivariateNormalDistribution[] p = new MultivariateNormalDistribution[4]; for (int i = 0; i < 4; ++i) { p[i] = new MultivariateNormalDistribution(rng, mu[i], Sigma); } for (int i = 0; i < nsamples; ++i) { final int c = rng.nextInt(4); final double[] x = p[c].sample(); data.add(x); } // Perturb data // for( final double[] x : data ) { // for( int i = 0; i < x.length; ++i ) { // final double r = rng.nextGaussian() / 1.0; // x[i] += r; // } // } double best_bic = Double.MAX_VALUE; int best_k = 0; for (int k = 1; k <= 6; ++k) { System.out.println("*** k = " + k); final GaussianMixtureModel gmm = new GaussianMixtureModel(k, data.toArray(new double[data.size()][]), 10e-5, rng); gmm.run(); for (int i = 0; i < gmm.mu().length; ++i) { System.out.println("Center " + i + ": " + gmm.mu()[i]); } final double bic = ScoreFunctions.bic(data.size(), gmm.nparameters(), gmm.logLikelihood()); System.out.println("BIC = " + bic); System.out.println("ll = " + gmm.logLikelihood()); gmm.debug(); if (bic < best_bic) { best_bic = bic; best_k = k; } } System.out.println("Best model: k = " + best_k); }