List of usage examples for java.io BufferedWriter BufferedWriter
public BufferedWriter(Writer out)
From source file:com.vmware.photon.controller.core.Main.java
public static void main(String[] args) throws Throwable { try {/* ww w .j a v a 2 s . co m*/ LoggingFactory.bootstrap(); logger.info("args: " + Arrays.toString(args)); ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true) .description("Photon Controller Core"); parser.addArgument("config-file").help("photon controller configuration file"); parser.addArgument("--manual").type(Boolean.class).setDefault(false) .help("If true, create default deployment."); Namespace namespace = parser.parseArgsOrFail(args); PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace); DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig(); new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure(); SSLContext sslContext; if (deployerConfig.getDeployerContext().isAuthEnabled()) { sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL); TrustManagerFactory tmf = null; tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream in = FileUtils .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath())); keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray()); tmf.init(keyStore); sslContext.init(null, tmf.getTrustManagers(), null); } else { KeyStoreUtils.generateKeys("/thrift/"); sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } ThriftModule thriftModule = new ThriftModule(sslContext); PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule, deployerConfig, sslContext); if ((Boolean) namespace.get("manual")) { DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(), deployerConfig, xenonHost); } // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config // so that it can match the Configuration class of dropwizard. File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp"); File source = new File(args[0]); FileInputStream fis = new FileInputStream(source); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); FileWriter fstream = new FileWriter(apiFeTempConfig, true); BufferedWriter out = new BufferedWriter(fstream); String aLine = null; while ((aLine = in.readLine()) != null) { if (aLine.equals("apife:")) { aLine = aLine.replace("apife:", "server:"); } out.write(aLine); out.newLine(); } in.close(); out.close(); // This approach can be simplified once the apife container is gone, but for the time being // it expects the first arg to be the string "server". String[] apiFeArgs = new String[2]; apiFeArgs[0] = "server"; apiFeArgs[1] = apiFeTempConfig.getAbsolutePath(); ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs); ApiFeService.addServiceHost(xenonHost); ApiFeService.setSSLContext(sslContext); ApiFeService apiFeService = new ApiFeService(); apiFeService.run(apiFeArgs); apiFeTempConfig.deleteOnExit(); LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class); xenonHost.setApiClient(localApiClient); // in the non-auth enabled scenario we need to be able to accept any self-signed certificate if (!deployerConfig.getDeployerContext().isAuthEnabled()) { KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { logger.info("Shutting down"); xenonHost.stop(); logger.info("Done"); LoggingFactory.detachAndStop(); } }); } catch (Exception e) { logger.error("Failed to start photon controller ", e); throw e; } }
From source file:apps.LuceneQuery.java
public static void main(String[] args) { Options options = new Options(); options.addOption("d", null, true, "index directory"); options.addOption("i", null, true, "input file"); options.addOption("s", null, true, "stop word file"); options.addOption("n", null, true, "max # of results"); options.addOption("o", null, true, "a TREC-style output file"); options.addOption("r", null, true, "an optional QREL file, if specified," + "we save results only for queries for which we find at least one relevant entry."); options.addOption("prob", null, true, "question sampling probability"); options.addOption("max_query_qty", null, true, "a maximum number of queries to run"); options.addOption("bm25_b", null, true, "BM25 parameter: b"); options.addOption("bm25_k1", null, true, "BM25 parameter: k1"); options.addOption("bm25fixed", null, false, "use the fixed BM25 similarity"); options.addOption("seed", null, true, "random seed"); Joiner commaJoin = Joiner.on(','); Joiner spaceJoin = Joiner.on(' '); options.addOption("source_type", null, true, "query source type: " + commaJoin.join(SourceFactory.getQuerySourceList())); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); QrelReader qrels = null;// ww w .j a v a 2 s . co m try { CommandLine cmd = parser.parse(options, args); String indexDir = null; if (cmd.hasOption("d")) { indexDir = cmd.getOptionValue("d"); } else { Usage("Specify 'index directory'", options); } String inputFileName = null; if (cmd.hasOption("i")) { inputFileName = cmd.getOptionValue("i"); } else { Usage("Specify 'input file'", options); } DictNoComments stopWords = null; if (cmd.hasOption("s")) { String stopWordFileName = cmd.getOptionValue("s"); stopWords = new DictNoComments(new File(stopWordFileName), true /* lowercasing */); System.out.println("Using the stopword file: " + stopWordFileName); } String sourceName = cmd.getOptionValue("source_type"); if (sourceName == null) Usage("Specify document source type", options); int numRet = 100; if (cmd.hasOption("n")) { numRet = Integer.parseInt(cmd.getOptionValue("n")); System.out.println("Retrieving at most " + numRet + " candidate entries."); } String trecOutFileName = null; if (cmd.hasOption("o")) { trecOutFileName = cmd.getOptionValue("o"); } else { Usage("Specify 'a TREC-style output file'", options); } double fProb = 1.0f; if (cmd.hasOption("prob")) { try { fProb = Double.parseDouble(cmd.getOptionValue("prob")); } catch (NumberFormatException e) { Usage("Wrong format for 'question sampling probability'", options); } } if (fProb <= 0 || fProb > 1) { Usage("Question sampling probability should be >0 and <=1", options); } System.out.println("Sample the following fraction of questions: " + fProb); float bm25_k1 = UtilConst.BM25_K1_DEFAULT, bm25_b = UtilConst.BM25_B_DEFAULT; if (cmd.hasOption("bm25_k1")) { try { bm25_k1 = Float.parseFloat(cmd.getOptionValue("bm25_k1")); } catch (NumberFormatException e) { Usage("Wrong format for 'bm25_k1'", options); } } if (cmd.hasOption("bm25_b")) { try { bm25_b = Float.parseFloat(cmd.getOptionValue("bm25_b")); } catch (NumberFormatException e) { Usage("Wrong format for 'bm25_b'", options); } } long seed = 0; String tmpl = cmd.getOptionValue("seed"); if (tmpl != null) seed = Long.parseLong(tmpl); System.out.println("Using seed: " + seed); Random randGen = new Random(seed); System.out.println(String.format("BM25 parameters k1=%f b=%f ", bm25_k1, bm25_b)); boolean useFixedBM25 = cmd.hasOption("bm25fixed"); EnglishAnalyzer analyzer = new EnglishAnalyzer(); Similarity similarity = null; if (useFixedBM25) { System.out.println(String.format("Using fixed BM25Simlarity, k1=%f b=%f", bm25_k1, bm25_b)); similarity = new BM25SimilarityFix(bm25_k1, bm25_b); } else { System.out.println(String.format("Using Lucene BM25Similarity, k1=%f b=%f", bm25_k1, bm25_b)); similarity = new BM25Similarity(bm25_k1, bm25_b); } int maxQueryQty = Integer.MAX_VALUE; if (cmd.hasOption("max_query_qty")) { try { maxQueryQty = Integer.parseInt(cmd.getOptionValue("max_query_qty")); } catch (NumberFormatException e) { Usage("Wrong format for 'max_query_qty'", options); } } System.out.println(String.format("Executing at most %d queries", maxQueryQty)); if (cmd.hasOption("r")) { String qrelFile = cmd.getOptionValue("r"); System.out.println("Using the qrel file: '" + qrelFile + "', queries not returning a relevant entry will be ignored."); qrels = new QrelReader(qrelFile); } System.out.println(String.format("Using indexing directory %s", indexDir)); LuceneCandidateProvider candProvider = new LuceneCandidateProvider(indexDir, analyzer, similarity); TextCleaner textCleaner = new TextCleaner(stopWords); QuerySource inpQuerySource = SourceFactory.createQuerySource(sourceName, inputFileName); QueryEntry inpQuery = null; BufferedWriter trecOutFile = new BufferedWriter(new FileWriter(new File(trecOutFileName))); int questNum = 0, questQty = 0; long totalTimeMS = 0; while ((inpQuery = inpQuerySource.next()) != null) { if (questQty >= maxQueryQty) break; ++questNum; String queryID = inpQuery.mQueryId; if (randGen.nextDouble() <= fProb) { ++questQty; String tokQuery = spaceJoin.join(textCleaner.cleanUp(inpQuery.mQueryText)); String query = TextCleaner.luceneSafeCleanUp(tokQuery).trim(); ResEntry[] results = null; if (query.isEmpty()) { results = new ResEntry[0]; System.out.println(String.format("WARNING, empty query id = '%s'", inpQuery.mQueryId)); } else { try { long start = System.currentTimeMillis(); results = candProvider.getCandidates(questNum, query, numRet); long end = System.currentTimeMillis(); long searchTimeMS = end - start; totalTimeMS += searchTimeMS; System.out.println(String.format( "Obtained results for the query # %d (answered %d queries), queryID %s the search took %d ms, we asked for max %d entries got %d", questNum, questQty, queryID, searchTimeMS, numRet, results.length)); } catch (ParseException e) { e.printStackTrace(); System.err.println( "Error parsing query: " + query + " orig question is :" + inpQuery.mQueryText); System.exit(1); } } boolean bSave = true; if (qrels != null) { boolean bOk = false; for (ResEntry r : results) { String label = qrels.get(queryID, r.mDocId); if (candProvider.isRelevLabel(label, 1)) { bOk = true; break; } } if (!bOk) bSave = false; } // System.out.println(String.format("Ranking results the query # %d queryId='%s' save results? %b", // questNum, queryID, bSave)); if (bSave) { saveTrecResults(queryID, results, trecOutFile, TREC_RUN, numRet); } } if (questNum % 1000 == 0) System.out.println(String.format("Proccessed %d questions", questNum)); } System.out.println(String.format("Proccessed %d questions, the search took %f MS on average", questQty, (float) totalTimeMS / questQty)); trecOutFile.close(); } catch (ParseException e) { e.printStackTrace(); Usage("Cannot parse arguments: " + e, options); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:de.morbz.osmpoispbf.Scanner.java
public static void main(String[] args) { System.out.println("OsmPoisPbf " + VERSION + " started"); // Get input file if (args.length < 1) { System.out.println("Error: Please provide an input file"); System.exit(-1);// w ww.ja va2s . c om } String inputFile = args[args.length - 1]; // Get output file String outputFile; int index = inputFile.indexOf('.'); if (index != -1) { outputFile = inputFile.substring(0, index); } else { outputFile = inputFile; } outputFile += ".csv"; // Setup CLI parameters options = new Options(); options.addOption("ff", "filterFile", true, "The file that is used to filter categories"); options.addOption("of", "outputFile", true, "The output CSV file to be written"); options.addOption("rt", "requiredTags", true, "Comma separated list of tags that are required [name]"); options.addOption("ut", "undesiredTags", true, "Comma separated list of tags=value combinations that should be filtered [key=value]"); options.addOption("ot", "outputTags", true, "Comma separated list of tags that are exported [name]"); options.addOption("ph", "printHeader", false, "If flag is set, the `outputTags` are printed as first line in the output file."); options.addOption("r", "relations", false, "Parse relations"); options.addOption("nw", "noWays", false, "Don't parse ways"); options.addOption("nn", "noNodes", false, "Don't parse nodes"); options.addOption("u", "allowUnclosedWays", false, "Allow ways that aren't closed"); options.addOption("d", "decimals", true, "Number of decimal places of coordinates [7]"); options.addOption("s", "separator", true, "Separator character for CSV [|]"); options.addOption("v", "verbose", false, "Print all found POIs"); options.addOption("h", "help", false, "Print this help"); // Parse parameters CommandLine line = null; try { line = (new DefaultParser()).parse(options, args); } catch (ParseException exp) { System.err.println(exp.getMessage()); printHelp(); System.exit(-1); } // Help if (line.hasOption("help")) { printHelp(); System.exit(0); } // Get filter file String filterFile = null; if (line.hasOption("filterFile")) { filterFile = line.getOptionValue("filterFile"); } // Get output file if (line.hasOption("outputFile")) { outputFile = line.getOptionValue("outputFile"); } // Check files if (inputFile.equals(outputFile)) { System.out.println("Error: Input and output files are the same"); System.exit(-1); } File file = new File(inputFile); if (!file.exists()) { System.out.println("Error: Input file doesn't exist"); System.exit(-1); } // Check OSM entity types boolean parseNodes = true; boolean parseWays = true; boolean parseRelations = false; if (line.hasOption("noNodes")) { parseNodes = false; } if (line.hasOption("noWays")) { parseWays = false; } if (line.hasOption("relations")) { parseRelations = true; } // Unclosed ways allowed? if (line.hasOption("allowUnclosedWays")) { onlyClosedWays = false; } // Get CSV separator char separator = '|'; if (line.hasOption("separator")) { String arg = line.getOptionValue("separator"); if (arg.length() != 1) { System.out.println("Error: The CSV separator has to be exactly 1 character"); System.exit(-1); } separator = arg.charAt(0); } Poi.setSeparator(separator); // Set decimals int decimals = 7; // OSM default if (line.hasOption("decimals")) { String arg = line.getOptionValue("decimals"); try { int dec = Integer.valueOf(arg); if (dec < 0) { System.out.println("Error: Decimals must not be less than 0"); System.exit(-1); } else { decimals = dec; } } catch (NumberFormatException ex) { System.out.println("Error: Decimals have to be a number"); System.exit(-1); } } Poi.setDecimals(decimals); // Verbose mode? if (line.hasOption("verbose")) { printPois = true; } // Required tags if (line.hasOption("requiredTags")) { String arg = line.getOptionValue("requiredTags"); requiredTags = arg.split(","); } // Undesired tags if (line.hasOption("undesiredTags")) { String arg = line.getOptionValue("undesiredTags"); undesiredTags = new HashMap<>(); for (String undesired : arg.split(",")) { String[] keyVal = undesired.split("="); if (keyVal.length != 2) { System.out.println("Error: Undesired Tags have to formated like tag=value"); System.exit(-1); } if (!undesiredTags.containsKey(keyVal[0])) { undesiredTags.put(keyVal[0], new HashSet<>(1)); } undesiredTags.get(keyVal[0]).add(keyVal[1]); } } // Output tags if (line.hasOption("outputTags")) { String arg = line.getOptionValue("outputTags"); outputTags = arg.split(","); } // Get filter rules FilterFileParser parser = new FilterFileParser(filterFile); filters = parser.parse(); if (filters == null) { System.exit(-1); } // Setup CSV output try { writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile), "UTF8")); } catch (IOException e) { System.out.println("Error: Output file error"); System.exit(-1); } // Print Header if (line.hasOption("printHeader")) { String header = "category" + separator + "osm_id" + separator + "lat" + separator + "lon"; for (int i = 0; i < outputTags.length; i++) { header += separator + outputTags[i]; } try { writer.write(header + "\n"); } catch (IOException e) { System.out.println("Error: Output file write error"); System.exit(-1); } } // Setup OSMonaut EntityFilter filter = new EntityFilter(parseNodes, parseWays, parseRelations); Osmonaut naut = new Osmonaut(inputFile, filter, false); // Start watch StopWatch stopWatch = new StopWatch(); stopWatch.start(); // Start OSMonaut String finalSeparator = String.valueOf(separator); naut.scan(new IOsmonautReceiver() { boolean entityNeeded; @Override public boolean needsEntity(EntityType type, Tags tags) { // Are there any tags? if (tags.size() == 0) { return false; } // Check required tags for (String tag : requiredTags) { if (!tags.hasKey(tag)) { return false; } } entityNeeded = getCategory(tags, filters) != null; if (undesiredTags != null && entityNeeded) { for (String key : undesiredTags.keySet()) { if (tags.hasKey(key)) { for (String val : undesiredTags.get(key)) { if (tags.hasKeyValue(key, val)) { return false; } } } } } return entityNeeded; } @Override public void foundEntity(Entity entity) { // Check if way is closed if (onlyClosedWays && entity.getEntityType() == EntityType.WAY) { if (!((Way) entity).isClosed()) { return; } } // Get category Tags tags = entity.getTags(); String cat = getCategory(tags, filters); if (cat == null) { return; } // Get center LatLon center = entity.getCenter(); if (center == null) { return; } // Make OSM-ID String type = ""; switch (entity.getEntityType()) { case NODE: type = "node"; break; case WAY: type = "way"; break; case RELATION: type = "relation"; break; } String id = String.valueOf(entity.getId()); // Make output tags String[] values = new String[outputTags.length]; for (int i = 0; i < outputTags.length; i++) { String key = outputTags[i]; if (tags.hasKey(key)) { values[i] = tags.get(key).replaceAll(finalSeparator, "").replaceAll("\"", ""); } } // Make POI poisFound++; Poi poi = new Poi(values, cat, center, type, id); // Output if (printPois && System.currentTimeMillis() > lastMillis + 40) { printPoisFound(); lastMillis = System.currentTimeMillis(); } // Write to file try { writer.write(poi.toCsv() + "\n"); } catch (IOException e) { System.out.println("Error: Output file write error"); System.exit(-1); } } }); // Close writer try { writer.close(); } catch (IOException e) { System.out.println("Error: Output file close error"); System.exit(-1); } // Output results stopWatch.stop(); printPoisFound(); System.out.println(); System.out.println("Elapsed time in milliseconds: " + stopWatch.getElapsedTime()); // Quit System.exit(0); }
From source file:gobblin.runtime.util.JobStateToJsonConverter.java
@SuppressWarnings("all") public static void main(String[] args) throws Exception { Option sysConfigOption = Option.builder("sc").argName("system configuration file") .desc("Gobblin system configuration file").longOpt("sysconfig").hasArgs().build(); Option storeUrlOption = Option.builder("u").argName("gobblin state store URL") .desc("Gobblin state store root path URL").longOpt("storeurl").hasArgs().required().build(); Option jobNameOption = Option.builder("n").argName("gobblin job name").desc("Gobblin job name") .longOpt("name").hasArgs().required().build(); Option jobIdOption = Option.builder("i").argName("gobblin job id").desc("Gobblin job id").longOpt("id") .hasArgs().build();// www. j a va 2 s .c om Option convertAllOption = Option.builder("a") .desc("Whether to convert all past job states of the given job").longOpt("all").build(); Option keepConfigOption = Option.builder("kc").desc("Whether to keep all configuration properties") .longOpt("keepConfig").build(); Option outputToFile = Option.builder("t").argName("output file name").desc("Output file name") .longOpt("toFile").hasArgs().build(); Options options = new Options(); options.addOption(sysConfigOption); options.addOption(storeUrlOption); options.addOption(jobNameOption); options.addOption(jobIdOption); options.addOption(convertAllOption); options.addOption(keepConfigOption); options.addOption(outputToFile); CommandLine cmd = null; try { CommandLineParser parser = new DefaultParser(); cmd = parser.parse(options, args); } catch (ParseException pe) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("JobStateToJsonConverter", options); System.exit(1); } Properties sysConfig = new Properties(); if (cmd.hasOption(sysConfigOption.getLongOpt())) { sysConfig = JobConfigurationUtils.fileToProperties(cmd.getOptionValue(sysConfigOption.getLongOpt())); } JobStateToJsonConverter converter = new JobStateToJsonConverter(sysConfig, cmd.getOptionValue('u'), cmd.hasOption("kc")); StringWriter stringWriter = new StringWriter(); if (cmd.hasOption('i')) { converter.convert(cmd.getOptionValue('n'), cmd.getOptionValue('i'), stringWriter); } else { if (cmd.hasOption('a')) { converter.convertAll(cmd.getOptionValue('n'), stringWriter); } else { converter.convert(cmd.getOptionValue('n'), stringWriter); } } if (cmd.hasOption('t')) { Closer closer = Closer.create(); try { FileOutputStream fileOutputStream = closer.register(new FileOutputStream(cmd.getOptionValue('t'))); OutputStreamWriter outputStreamWriter = closer.register( new OutputStreamWriter(fileOutputStream, ConfigurationKeys.DEFAULT_CHARSET_ENCODING)); BufferedWriter bufferedWriter = closer.register(new BufferedWriter(outputStreamWriter)); bufferedWriter.write(stringWriter.toString()); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } } else { System.out.println(stringWriter.toString()); } }
From source file:fr.tpt.s3.mcdag.bench.MainBench.java
public static void main(String[] args) throws IOException, InterruptedException { // Command line options Options options = new Options(); Option input = new Option("i", "input", true, "MC-DAG XML models"); input.setRequired(true);//from w ww .j a v a2 s .co m input.setArgs(Option.UNLIMITED_VALUES); options.addOption(input); Option output = new Option("o", "output", true, "Folder where results have to be written."); output.setRequired(true); options.addOption(output); Option uUti = new Option("u", "utilization", true, "Utilization."); uUti.setRequired(true); options.addOption(uUti); Option output2 = new Option("ot", "output-total", true, "File where total results are being written"); output2.setRequired(true); options.addOption(output2); Option oCores = new Option("c", "cores", true, "Cores given to the test"); oCores.setRequired(true); options.addOption(oCores); Option oLvls = new Option("l", "levels", true, "Levels tested for the system"); oLvls.setRequired(true); options.addOption(oLvls); Option jobs = new Option("j", "jobs", true, "Number of threads to be launched."); jobs.setRequired(false); options.addOption(jobs); Option debug = new Option("d", "debug", false, "Debug logs."); debug.setRequired(false); options.addOption(debug); /* * Parsing of the command line */ CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println(e.getMessage()); formatter.printHelp("Benchmarks MultiDAG", options); System.exit(1); return; } String inputFilePath[] = cmd.getOptionValues("input"); String outputFilePath = cmd.getOptionValue("output"); String outputFilePathTotal = cmd.getOptionValue("output-total"); double utilization = Double.parseDouble(cmd.getOptionValue("utilization")); boolean boolDebug = cmd.hasOption("debug"); int nbLvls = Integer.parseInt(cmd.getOptionValue("levels")); int nbJobs = 1; int nbFiles = inputFilePath.length; if (cmd.hasOption("jobs")) nbJobs = Integer.parseInt(cmd.getOptionValue("jobs")); int nbCores = Integer.parseInt(cmd.getOptionValue("cores")); /* * While files need to be allocated * run the tests in the pool of threads */ // For dual-criticality systems we call a specific thread if (nbLvls == 2) { System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls); int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int fedTotal = 0; int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int fedPreempts = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int fedActiv = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { fedTotal += Integer.parseInt(val); } else if (j == 3) { fedPreempts += Integer.parseInt(val); } else if (j == 4) { fedActiv += Integer.parseInt(val); } else if (j == 5) { laxTotal += Integer.parseInt(val); } else if (j == 6) { laxPreempts += Integer.parseInt(val); } else if (j == 7) { laxActiv += Integer.parseInt(val); } else if (j == 8) { edfTotal += Integer.parseInt(val); } else if (j == 9) { edfPreempts += Integer.parseInt(val); } else if (j == 10) { edfActiv += Integer.parseInt(val); } else if (j == 11) { hybridTotal += Integer.parseInt(val); } else if (j == 12) { hybridPreempts += Integer.parseInt(val); } else if (j == 13) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double fedPerc = (double) fedTotal / nbFiles; double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double fedPercPreempts = (double) fedPreempts / fedActiv; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; " + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else if (nbLvls > 2) { int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { laxTotal += Integer.parseInt(val); } else if (j == 3) { laxPreempts += Integer.parseInt(val); } else if (j == 4) { laxActiv += Integer.parseInt(val); } else if (j == 5) { edfTotal += Integer.parseInt(val); } else if (j == 6) { edfPreempts += Integer.parseInt(val); } else if (j == 7) { edfActiv += Integer.parseInt(val); } else if (j == 8) { hybridTotal += Integer.parseInt(val); } else if (j == 9) { hybridPreempts += Integer.parseInt(val); } else if (j == 10) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else { System.err.println("Wrong number of levels"); System.exit(-1); } System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls); }
From source file:apps.quantification.LearnQuantificationSVMLight.java
public static void main(String[] args) throws IOException { String cmdLineSyntax = LearnQuantificationSVMLight.class.getName() + " [OPTIONS] <path to svm_light_learn> <path to svm_light_classify> <trainingIndexDirectory> <outputDirectory>"; Options options = new Options(); OptionBuilder.withArgName("f"); OptionBuilder.withDescription("Number of folds"); OptionBuilder.withLongOpt("f"); OptionBuilder.isRequired(true);/*from www .j a v a 2 s. c o m*/ OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("c"); OptionBuilder.withDescription("The c value for svm_light (default 1)"); OptionBuilder.withLongOpt("c"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("k"); OptionBuilder.withDescription("Kernel type (default 0: linear, 1: polynomial, 2: RBF, 3: sigmoid)"); OptionBuilder.withLongOpt("k"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("t"); OptionBuilder.withDescription("Path for temporary files"); OptionBuilder.withLongOpt("t"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("v"); OptionBuilder.withDescription("Verbose output"); OptionBuilder.withLongOpt("v"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(false); options.addOption(OptionBuilder.create()); OptionBuilder.withArgName("s"); OptionBuilder.withDescription("Don't delete temporary training file in svm_light format (default: delete)"); OptionBuilder.withLongOpt("s"); OptionBuilder.isRequired(false); OptionBuilder.hasArg(false); options.addOption(OptionBuilder.create()); SvmLightLearnerCustomizer classificationLearnerCustomizer = null; SvmLightClassifierCustomizer classificationCustomizer = null; int folds = -1; GnuParser parser = new GnuParser(); String[] remainingArgs = null; try { CommandLine line = parser.parse(options, args); remainingArgs = line.getArgs(); classificationLearnerCustomizer = new SvmLightLearnerCustomizer(remainingArgs[0]); classificationCustomizer = new SvmLightClassifierCustomizer(remainingArgs[1]); folds = Integer.parseInt(line.getOptionValue("f")); if (line.hasOption("c")) classificationLearnerCustomizer.setC(Float.parseFloat(line.getOptionValue("c"))); if (line.hasOption("k")) { System.out.println("Kernel type: " + line.getOptionValue("k")); classificationLearnerCustomizer.setKernelType(Integer.parseInt(line.getOptionValue("k"))); } if (line.hasOption("v")) classificationLearnerCustomizer.printSvmLightOutput(true); if (line.hasOption("s")) classificationLearnerCustomizer.setDeleteTrainingFiles(false); if (line.hasOption("t")) { classificationLearnerCustomizer.setTempPath(line.getOptionValue("t")); classificationCustomizer.setTempPath(line.getOptionValue("t")); } } catch (Exception exp) { System.err.println("Parsing failed. Reason: " + exp.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(cmdLineSyntax, options); System.exit(-1); } assert (classificationLearnerCustomizer != null); if (remainingArgs.length != 4) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(cmdLineSyntax, options); System.exit(-1); } String indexFile = remainingArgs[2]; File file = new File(indexFile); String indexName = file.getName(); String indexPath = file.getParent(); String outputPath = remainingArgs[3]; SvmLightLearner classificationLearner = new SvmLightLearner(); classificationLearner.setRuntimeCustomizer(classificationLearnerCustomizer); FileSystemStorageManager fssm = new FileSystemStorageManager(indexPath, false); fssm.open(); IIndex training = TroveReadWriteHelper.readIndex(fssm, indexName, TroveContentDBType.Full, TroveClassificationDBType.Full); final TextualProgressBar progressBar = new TextualProgressBar("Learning the quantifiers"); IOperationStatusListener status = new IOperationStatusListener() { @Override public void operationStatus(double percentage) { progressBar.signal((int) percentage); } }; QuantificationLearner quantificationLearner = new QuantificationLearner(folds, classificationLearner, classificationLearnerCustomizer, classificationCustomizer, ClassificationMode.PER_CATEGORY, new LogisticFunction(), status); IQuantifier[] quantifiers = quantificationLearner.learn(training); File executableFile = new File(classificationLearnerCustomizer.getSvmLightLearnPath()); IDataManager classifierDataManager = new SvmLightDataManager(new SvmLightClassifierCustomizer( executableFile.getParentFile().getAbsolutePath() + Os.pathSeparator() + "svm_light_classify")); String description = "_SVMLight_C-" + classificationLearnerCustomizer.getC() + "_K-" + classificationLearnerCustomizer.getKernelType(); if (classificationLearnerCustomizer.getAdditionalParameters().length() > 0) description += "_" + classificationLearnerCustomizer.getAdditionalParameters(); String quantifierPrefix = indexName + "_Quantifier-" + folds + description; FileSystemStorageManager fssmo = new FileSystemStorageManager( outputPath + File.separatorChar + quantifierPrefix, true); fssmo.open(); QuantificationLearner.write(quantifiers, fssmo, classifierDataManager); fssmo.close(); BufferedWriter bfs = new BufferedWriter( new FileWriter(outputPath + File.separatorChar + quantifierPrefix + "_rates.txt")); TShortDoubleHashMap simpleTPRs = quantificationLearner.getSimpleTPRs(); TShortDoubleHashMap simpleFPRs = quantificationLearner.getSimpleFPRs(); TShortDoubleHashMap scaledTPRs = quantificationLearner.getScaledTPRs(); TShortDoubleHashMap scaledFPRs = quantificationLearner.getScaledFPRs(); ContingencyTableSet contingencyTableSet = quantificationLearner.getContingencyTableSet(); short[] cats = simpleTPRs.keys(); for (int i = 0; i < cats.length; ++i) { short cat = cats[i]; String catName = training.getCategoryDB().getCategoryName(cat); ContingencyTable contingencyTable = contingencyTableSet.getCategoryContingencyTable(cat); double simpleTPR = simpleTPRs.get(cat); double simpleFPR = simpleFPRs.get(cat); double scaledTPR = scaledTPRs.get(cat); double scaledFPR = scaledFPRs.get(cat); String line = quantifierPrefix + "\ttrain\tsimple\t" + catName + "\t" + cat + "\t" + contingencyTable.tp() + "\t" + contingencyTable.fp() + "\t" + contingencyTable.fn() + "\t" + contingencyTable.tn() + "\t" + simpleTPR + "\t" + simpleFPR + "\n"; bfs.write(line); line = quantifierPrefix + "\ttrain\tscaled\t" + catName + "\t" + cat + "\t" + contingencyTable.tp() + "\t" + contingencyTable.fp() + "\t" + contingencyTable.fn() + "\t" + contingencyTable.tn() + "\t" + scaledTPR + "\t" + scaledFPR + "\n"; bfs.write(line); } bfs.close(); }
From source file:discovery.compression.kdd2011.ratio.RatioCompressionReport.java
public static void main(String[] args) throws GraphReadingException, IOException, java.text.ParseException { opts.addOption("r", true, "Goal compression ratio"); // opts.addOption( "a", // true, // "Algorithm used for compression. The default and only currently available option is \"greedy\""); //opts.addOption("cost-output",true,"Output file for costs, default is costs.txt"); //opts.addOption("cost-format",true,"Output format for "); opts.addOption("ctype", true, "Connectivity type: global or local, default is global."); opts.addOption("connectivity", false, "enables output for connectivity. Connectivity info will be written to connectivity.txt"); opts.addOption("output_bmg", true, "Write bmg file with groups to given file."); opts.addOption("algorithm", true, "Algorithm to use, one of: greedy random1 random2 bruteforce slowgreedy"); opts.addOption("hop2", false, "Only try to merge nodes that have common neighbors"); opts.addOption("kmedoids", false, "Enables output for kmedoids clustering"); opts.addOption("kmedoids_k", true, "Number of clusters to be used in kmedoids. Default is 3"); opts.addOption("kmedoids_output", true, "Output file for kmedoid clusters. Default is clusters.txt. This file will be overwritten."); opts.addOption("norefresh", false, "Use old style merging: all connectivities are not refreshed when merging"); opts.addOption("edge_attribute", true, "Attribute from bmgraph used as edge weight"); opts.addOption("only_times", false, "Only write times.txt"); //opts.addOption("no_metrics",false,"Exit after compression, don't calculate any metrics or produce output bmg for the compression."); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;//from ww w . ja va2s. c om try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } boolean connectivity = false; double ratio = 0; boolean hop2 = cmd.hasOption("hop2"); RatioCompression compression = new GreedyRatioCompression(hop2); if (cmd.hasOption("connectivity")) connectivity = true; ConnectivityType ctype = ConnectivityType.GLOBAL; CompressionMergeModel mergeModel = new PathAverageMergeModel(); if (cmd.hasOption("ctype")) { String ctypeStr = cmd.getOptionValue("ctype"); if (ctypeStr.equals("local")) { ctype = ConnectivityType.LOCAL; mergeModel = new EdgeAverageMergeModel(); } else if (ctypeStr.equals("global")) { ctype = ConnectivityType.GLOBAL; mergeModel = new PathAverageMergeModel(); } else { System.out.println(PROGRAM_NAME + ": unknown connectivity type " + ctypeStr); printHelp(); } } if (cmd.hasOption("norefresh")) mergeModel = new PathAverageMergeModelNorefresh(); if (cmd.hasOption("algorithm")) { String alg = cmd.getOptionValue("algorithm"); if (alg.equals("greedy")) { compression = new GreedyRatioCompression(hop2); } else if (alg.equals("random1")) { compression = new RandomRatioCompression(hop2); } else if (alg.equals("random2")) { compression = new SmartRandomRatioCompression(hop2); } else if (alg.equals("bruteforce")) { compression = new BruteForceCompression(hop2, ctype == ConnectivityType.LOCAL); } else if (alg.equals("slowgreedy")) { compression = new SlowGreedyRatioCompression(hop2); } else { System.out.println("algorithm must be one of: greedy random1 random2 bruteforce slowgreedy"); printHelp(); } } compression.setMergeModel(mergeModel); if (cmd.hasOption("r")) { ratio = Double.parseDouble(cmd.getOptionValue("r")); } else { System.out.println(PROGRAM_NAME + ": compression ratio not defined"); printHelp(); } if (cmd.hasOption("help")) { printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } boolean kmedoids = false; int kmedoidsK = 3; String kmedoidsOutput = "clusters.txt"; if (cmd.hasOption("kmedoids")) kmedoids = true; if (cmd.hasOption("kmedoids_k")) kmedoidsK = Integer.parseInt(cmd.getOptionValue("kmedoids_k")); if (cmd.hasOption("kmedoids_output")) kmedoidsOutput = cmd.getOptionValue("kmedoids_output"); String edgeAttrib = "goodness"; if (cmd.hasOption("edge_attribute")) edgeAttrib = cmd.getOptionValue("edge_attribute"); // This program should directly use bmgraph-java to read and // DefaultGraph should have a constructor that takes a BMGraph as an // argument. //VisualGraph vg = new VisualGraph(infile, edgeAttrib, false); //System.out.println("vg read"); //SimpleVisualGraph origSG = new SimpleVisualGraph(vg); BMGraph bmg = BMGraphUtils.readBMGraph(infile); int origN = bmg.getNodes().size(); //for(int i=0;i<origN;i++) //System.out.println(i+"="+origSG.getVisualNode(i)); System.out.println("bmgraph read"); BMNode[] i2n = new BMNode[origN]; HashMap<BMNode, Integer> n2i = new HashMap<BMNode, Integer>(); { int pi = 0; for (BMNode nod : bmg.getNodes()) { n2i.put(nod, pi); i2n[pi++] = nod; } } DefaultGraph dg = new DefaultGraph(); for (BMEdge e : bmg.getEdges()) { dg.addEdge(n2i.get(e.getSource()), n2i.get(e.getTarget()), Double.parseDouble(e.get(edgeAttrib))); } DefaultGraph origDG = dg.copy(); System.out.println("inputs read"); RatioCompression nopCompressor = new RatioCompression.DefaultRatioCompression(); ResultGraph nopResult = nopCompressor.compressGraph(dg, 1); long start = System.currentTimeMillis(); ResultGraph result = compression.compressGraph(dg, ratio); long timeSpent = System.currentTimeMillis() - start; double seconds = timeSpent * 0.001; BufferedWriter timesWriter = new BufferedWriter(new FileWriter("times.txt", true)); timesWriter.append("" + seconds + "\n"); timesWriter.close(); if (cmd.hasOption("only_times")) { System.out.println("Compression done, exiting."); System.exit(0); } BufferedWriter costsWriter = new BufferedWriter(new FileWriter("costs.txt", true)); costsWriter.append("" + nopResult.getCompressorCosts() + " " + result.getCompressorCosts() + "\n"); costsWriter.close(); double[][] origProb; double[][] compProb; int[] group = new int[origN]; for (int i = 0; i < result.partition.size(); i++) for (int x : result.partition.get(i)) group[x] = i; if (ctype == ConnectivityType.LOCAL) { origProb = new double[origN][origN]; compProb = new double[origN][origN]; DefaultGraph g = result.uncompressedGraph(); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { origProb[i][j] = dg.getEdgeWeight(i, j); compProb[i][j] = g.getEdgeWeight(i, j); } } System.out.println("Writing edge-dissimilarity"); } else { origProb = ProbDijkstra.getProbMatrix(origDG); compProb = new double[origN][origN]; System.out.println("nodeCount = " + result.graph.getNodeCount()); double[][] ccProb = ProbDijkstra.getProbMatrix(result.graph); System.out.println("ccProb.length = " + ccProb.length); System.out.println("ccProb[0].length = " + ccProb[0].length); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { if (group[i] == group[j]) compProb[i][j] = result.graph.getEdgeWeight(group[i], group[j]); else { int gj = group[j]; int gi = group[i]; compProb[i][j] = ccProb[group[i]][group[j]]; } } } System.out.println("Writing best-path-dissimilarity"); //compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } { BufferedWriter connWr = null;// if (connectivity) { connWr = new BufferedWriter(new FileWriter("connectivity.txt", true)); } double totalDiff = 0; for (int i = 0; i < origN; i++) { for (int j = i + 1; j < origN; j++) { double diff = Math.abs(origProb[i][j] - compProb[i][j]); //VisualNode ni = origSG.getVisualNode(i); //VisualNode nj = origSG.getVisualNode(j); BMNode ni = i2n[i]; BMNode nj = i2n[j]; if (connectivity) connWr.append(ni + "\t" + nj + "\t" + origProb[i][j] + "\t" + compProb[i][j] + "\t" + diff + "\n"); totalDiff += diff * diff; } } if (connectivity) { connWr.append("\n"); connWr.close(); } totalDiff = Math.sqrt(totalDiff); BufferedWriter dissWr = new BufferedWriter(new FileWriter("dissimilarity.txt", true)); dissWr.append("" + totalDiff + "\n"); dissWr.close(); } if (cmd.hasOption("output_bmg")) { BMGraph outgraph = new BMGraph(); String outputfile = cmd.getOptionValue("output_bmg"); HashMap<Integer, BMNode> nodes = new HashMap<Integer, BMNode>(); for (int i = 0; i < result.partition.size(); i++) { ArrayList<Integer> g = result.partition.get(i); if (g.size() == 0) continue; BMNode node = new BMNode("Supernode_" + i); HashMap<String, String> attributes = new HashMap<String, String>(); StringBuffer contents = new StringBuffer(); for (int x : g) contents.append(i2n[x] + ","); contents.delete(contents.length() - 1, contents.length()); attributes.put("nodes", contents.toString()); attributes.put("self-edge", "" + result.graph.getEdgeWeight(i, i)); node.setAttributes(attributes); nodes.put(i, node); outgraph.ensureHasNode(node); } for (int i = 0; i < result.partition.size(); i++) { if (result.partition.get(i).size() == 0) continue; for (int x : result.graph.getNeighbors(i)) { if (x < i) continue; BMNode from = nodes.get(i); BMNode to = nodes.get(x); if (from == null || to == null) { System.out.println(from + "->" + to); System.out.println(i + "->" + x); System.out.println(""); } BMEdge e = new BMEdge(nodes.get(i), nodes.get(x), "notype"); e.setAttributes(new HashMap<String, String>()); e.put("goodness", "" + result.graph.getEdgeWeight(i, x)); outgraph.ensureHasEdge(e); } } BMGraphUtils.writeBMGraph(outgraph, outputfile); } // k medoids! if (kmedoids) { //KMedoidsResult clustersOrig=KMedoids.runKMedoids(origProb,kmedoidsK); if (ctype == ConnectivityType.LOCAL) { compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } //KMedoidsResult compClusters = KMedoids.runKMedoids(ProbDijkstra.getProbMatrix(result.graph),kmedoidsK); KMedoidsResult clustersComp = KMedoids.runKMedoids(compProb, kmedoidsK); BufferedWriter bw = new BufferedWriter(new FileWriter(kmedoidsOutput)); for (int i = 0; i < origN; i++) { int g = group[i]; //bw.append(origSG.getVisualNode(i).getBMNode()+" "+compClusters.clusters[g]+"\n"); bw.append(i2n[i] + " " + clustersComp.clusters[i] + "\n"); } bw.close(); } System.exit(0); }
From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java
public static void main(String[] args) throws IOException, java.text.ParseException { opts.addOption("algorithm", true, "Used algorithm for compression. Possible values are 'brute-force', " + "'brute-force-edges','brute-force-merges','randomized','randomized-merges'," + "'randomized-edges'," + "'fast-brute-force'," + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'."); opts.addOption("query", true, "Query nodes ids, separated by comma."); opts.addOption("queryfile", true, "Read query nodes from file."); opts.addOption("ratio", true, "Goal ratio"); opts.addOption("importancefile", true, "Read importances straight from file"); opts.addOption("keepedges", false, "Don't remove edges during merges"); opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case"); opts.addOption("paths", false, "Do path oriented compression"); opts.addOption("edges", false, "Do edge oriented compression"); // opts.addOption( "a", double sigma = 1.0; CommandLineParser parser = new PosixParser(); CommandLine cmd = null;// ww w . ja v a 2s . c o m try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } String queryStr = cmd.getOptionValue("query"); String[] queryNodeIDs = {}; double[] queryNodeIMP = {}; if (queryStr != null) { queryNodeIDs = queryStr.split(","); queryNodeIMP = new double[queryNodeIDs.length]; for (int i = 0; i < queryNodeIDs.length; i++) { String s = queryNodeIDs[i]; String[] es = s.split("="); queryNodeIMP[i] = 1; if (es.length == 2) { queryNodeIDs[i] = es[0]; queryNodeIMP[i] = Double.parseDouble(es[1]); } else if (es.length > 2) { System.out.println("Too many '=' in querynode specification: " + s); } } } String queryFile = cmd.getOptionValue("queryfile"); Map<String, Double> queryNodes = Collections.EMPTY_MAP; if (queryFile != null) { File in = new File(queryFile); BufferedReader read = new BufferedReader(new FileReader(in)); queryNodes = readMap(read); read.close(); } String impfile = cmd.getOptionValue("importancefile"); Map<String, Double> importances = null; if (impfile != null) { File in = new File(impfile); BufferedReader read = new BufferedReader(new FileReader(in)); importances = readMap(read); read.close(); } String algoStr = cmd.getOptionValue("algorithm"); CompressionAlgorithm algo = null; if (algoStr == null || algoStr.equals("brute-force")) { algo = new BruteForceCompression(); } else if (algoStr.equals("brute-force-edges")) { algo = new BruteForceCompressionOnlyEdges(); } else if (algoStr.equals("brute-force-merges")) { algo = new BruteForceCompressionOnlyMerges(); } else if (algoStr.equals("fast-brute-force-merges")) { //algo = new FastBruteForceCompressionOnlyMerges(); algo = new FastBruteForceCompression(true, false); } else if (algoStr.equals("fast-brute-force-edges")) { algo = new FastBruteForceCompression(false, true); //algo = new FastBruteForceCompressionOnlyEdges(); } else if (algoStr.equals("fast-brute-force")) { algo = new FastBruteForceCompression(true, true); } else if (algoStr.equals("randomized-edges")) { algo = new RandomizedCompressionOnlyEdges(); //modified } else if (algoStr.equals("randomized")) { algo = new RandomizedCompression(); } else if (algoStr.equals("randomized-merges")) { algo = new RandomizedCompressionOnlyMerges(); } else { System.out.println("Unsupported algorithm: " + algoStr); printHelp(); } String ratioStr = cmd.getOptionValue("ratio"); double ratio = 0; if (ratioStr != null) { ratio = Double.parseDouble(ratioStr); } else { System.out.println("Goal ratio not specified"); printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile)); HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>(); for (String id : queryNodes.keySet()) { queryBMNodes.put(bmg.getNode(id), queryNodes.get(id)); } long startMillis = System.currentTimeMillis(); ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes); if (importances != null) { for (String id : importances.keySet()) { wrap.setImportance(bmg.getNode(id), importances.get(id)); } } ImportanceMerger merger = null; if (cmd.hasOption("edges")) { merger = new ImportanceMergerEdges(wrap.getImportanceGraph()); } else if (cmd.hasOption("paths")) { merger = new ImportanceMergerPaths(wrap.getImportanceGraph()); } else { System.out.println("Specify either 'paths' or 'edges'."); System.exit(1); } if (cmd.hasOption("keepedges")) { merger.setKeepEdges(true); } algo.compress(merger, ratio); long endMillis = System.currentTimeMillis(); // write importance { BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false)); for (BMNode nod : bmg.getNodes()) { wr.write(nod + " " + wrap.getImportance(nod) + "\n"); } wr.close(); } // write sum of all pairs of node importance added by Fang /* { BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); double sum = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { for (int j = i+1; j <= orig.getMaxNodeId(); j++) { sum = sum+ wrap.getImportance(i)* wrap.getImportance(j); } } wr.write(""+sum); wr.write("\n"); wr.close(); } */ // write uncompressed edges { BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); for (int i = 0; i <= orig.getMaxNodeId(); i++) { String iname = wrap.intToNode(i).toString(); HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i < j) continue; String jname = wrap.intToNode(j).toString(); double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b)); wr.write("\n"); } } wr.close(); } // write distance { // BufferedWriter wr = new BufferedWriter(new // FileWriter("distance.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double error = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i <= j) continue; double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j); // modify by Fang: multiply imp(u)imp(v) } } error = Math.sqrt(error); //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each // edge is counted twice wr.write("" + error); wr.write("\n"); wr.close(); } // write sizes { ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph comp = merger.getCurrentGraph(); // BufferedWriter wr = new BufferedWriter(new // FileWriter("sizes.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " " + comp.getEdgeCount()); wr.write("\n"); wr.close(); } //write time { System.out.println("writing time"); BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang double secs = (endMillis - startMillis) * 0.001; wr.write("" + secs + "\n"); wr.close(); } //write change of connectivity for edge-oriented case // added by Fang { if (cmd.hasOption("connectivity")) { BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double diff = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { ProbDijkstra pdori = new ProbDijkstra(orig, i); ProbDijkstra pducom = new ProbDijkstra(ucom, i); for (int j = i + 1; j <= orig.getMaxNodeId(); j++) { double oriconn = pdori.getProbTo(j); double ucomconn = pducom.getProbTo(j); diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i) * wrap.getImportance(j); } } diff = Math.sqrt(diff); wr.write("" + diff); wr.write("\n"); wr.close(); } } //write output graph { BMGraph output = bmg;//new BMGraph(bmg); int no = 0; BMNode[] nodes = new BMNode[merger.getGroups().size()]; for (ArrayList<Integer> gr : merger.getGroups()) { BMNode bmgroup = new BMNode("Group", "" + (no + 1)); bmgroup.setAttributes(new HashMap<String, String>()); bmgroup.put("autoedges", "0"); nodes[no] = bmgroup; no++; if (gr.size() == 0) continue; for (int x : gr) { BMNode nod = output.getNode(wrap.intToNode(x).toString()); BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to"); output.ensureHasEdge(belongs); } output.ensureHasNode(bmgroup); } for (int i = 0; i < nodes.length; i++) { for (int x : merger.getCurrentGraph().getNeighbors(i)) { if (x == i) { nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x)); continue; } BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge"); ge.setAttributes(new HashMap<String, String>()); ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); output.ensureHasEdge(ge); } } System.out.println(output.getGroupNodes()); BMGraphUtils.writeBMGraph(output, "output.bmg"); } }
From source file:microbiosima.SelectiveMicrobiosima.java
/** * @param args//w w w.j a v a 2 s . c o m * the command line arguments * @throws java.io.FileNotFoundException * @throws java.io.UnsupportedEncodingException */ public static void main(String[] args) throws FileNotFoundException, UnsupportedEncodingException { int populationSize = 500;//Integer.parseInt(parameters[1]); int microSize = 1000;//Integer.parseInt(parameters[2]); int numberOfSpecies = 150;//Integer.parseInt(parameters[3]); int numberOfGeneration = 10000; int Ngene = 10; int numberOfObservation = 100; int numberOfReplication = 10; double Ngenepm = 5; double pctEnv = 0; double pctPool = 0; double msCoeff = 1; double hsCoeff = 1; boolean HMS_or_TMS = true; Options options = new Options(); Option help = new Option("h", "help", false, "print this message"); Option version = new Option("v", "version", false, "print the version information and exit"); options.addOption(help); options.addOption(version); options.addOption(Option.builder("o").longOpt("obs").hasArg().argName("OBS") .desc("Number generation for observation [default: 100]").build()); options.addOption(Option.builder("r").longOpt("rep").hasArg().argName("REP") .desc("Number of replication [default: 1]").build()); Builder C = Option.builder("c").longOpt("config").numberOfArgs(6).argName("Pop Micro Spec Gen") .desc("Four Parameters in the following orders: " + "(1) population size, (2) microbe size, (3) number of species, (4) number of generation, (5) number of total traits, (6)number of traits per microbe" + " [default: 500 1000 150 10000 10 5]"); options.addOption(C.build()); HelpFormatter formatter = new HelpFormatter(); String syntax = "microbiosima pctEnv pctPool"; String header = "\nSimulates the evolutionary and ecological dynamics of microbiomes within a population of hosts.\n\n" + "required arguments:\n" + " pctEnv Percentage of environmental acquisition\n" + " pctPool Percentage of pooled environmental component\n" + " msCoeff Parameter related to microbe selection strength\n" + " hsCoeff Parameter related to host selection strength\n" + " HMS_or_TMS String HMS or TMS to specify host-mediated or trait-mediated microbe selection\n" + "\noptional arguments:\n"; String footer = "\n"; formatter.setWidth(80); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); String[] pct_config = cmd.getArgs(); if (cmd.hasOption("h") || args.length == 0) { formatter.printHelp(syntax, header, options, footer, true); System.exit(0); } if (cmd.hasOption("v")) { System.out.println("Microbiosima " + VERSION); System.exit(0); } if (pct_config.length != 5) { System.out.println( "ERROR! Required exactly five argumennts for pct_env, pct_pool, msCoeff, hsCoeff and HMS_or_TMS. It got " + pct_config.length + ": " + Arrays.toString(pct_config)); formatter.printHelp(syntax, header, options, footer, true); System.exit(3); } else { pctEnv = Double.parseDouble(pct_config[0]); pctPool = Double.parseDouble(pct_config[1]); msCoeff = Double.parseDouble(pct_config[2]); hsCoeff = Double.parseDouble(pct_config[3]); if (pct_config[4].equals("HMS")) HMS_or_TMS = true; if (pct_config[4].equals("TMS")) HMS_or_TMS = false; if (pctEnv < 0 || pctEnv > 1) { System.out.println( "ERROR: pctEnv (Percentage of environmental acquisition) must be between 0 and 1 (pctEnv=" + pctEnv + ")! EXIT"); System.exit(3); } if (pctPool < 0 || pctPool > 1) { System.out.println( "ERROR: pctPool (Percentage of pooled environmental component must) must be between 0 and 1 (pctPool=" + pctPool + ")! EXIT"); System.exit(3); } if (msCoeff < 1) { System.out.println( "ERROR: msCoeff (parameter related to microbe selection strength) must be not less than 1 (msCoeff=" + msCoeff + ")! EXIT"); System.exit(3); } if (hsCoeff < 1) { System.out.println( "ERROR: hsCoeff (parameter related to host selection strength) must be not less than 1 (hsCoeff=" + hsCoeff + ")! EXIT"); System.exit(3); } if (!(pct_config[4].equals("HMS") || pct_config[4].equals("TMS"))) { System.out.println( "ERROR: HMS_or_TMS (parameter specifying host-mediated or trait-mediated selection) must be either 'HMS' or 'TMS' (HMS_or_TMS=" + pct_config[4] + ")! EXIT"); System.exit(3); } } if (cmd.hasOption("config")) { String[] configs = cmd.getOptionValues("config"); populationSize = Integer.parseInt(configs[0]); microSize = Integer.parseInt(configs[1]); numberOfSpecies = Integer.parseInt(configs[2]); numberOfGeneration = Integer.parseInt(configs[3]); Ngene = Integer.parseInt(configs[4]); Ngenepm = Double.parseDouble(configs[5]); if (Ngenepm > Ngene) { System.out.println( "ERROR: number of traits per microbe must not be greater than number of total traits! EXIT"); System.exit(3); } } if (cmd.hasOption("obs")) { numberOfObservation = Integer.parseInt(cmd.getOptionValue("obs")); } if (cmd.hasOption("rep")) { numberOfReplication = Integer.parseInt(cmd.getOptionValue("rep")); } } catch (ParseException e) { e.printStackTrace(); System.exit(3); } StringBuilder sb = new StringBuilder(); sb.append("Configuration Summary:").append("\n\tPopulation size: ").append(populationSize) .append("\n\tMicrobe size: ").append(microSize).append("\n\tNumber of species: ") .append(numberOfSpecies).append("\n\tNumber of generation: ").append(numberOfGeneration) .append("\n\tNumber generation for observation: ").append(numberOfObservation) .append("\n\tNumber of replication: ").append(numberOfReplication) .append("\n\tNumber of total traits: ").append(Ngene).append("\n\tNumber of traits per microbe: ") .append(Ngenepm).append("\n"); System.out.println(sb.toString()); double[] environment = new double[numberOfSpecies]; for (int i = 0; i < numberOfSpecies; i++) { environment[i] = 1 / (double) numberOfSpecies; } int[] fitnessToHost = new int[Ngene]; int[] fitnessToMicrobe = new int[Ngene]; for (int rep = 0; rep < numberOfReplication; rep++) { String prefix = "" + (rep + 1) + "_"; String sufix; if (HMS_or_TMS) sufix = "_E" + pctEnv + "_P" + pctPool + "_HS" + hsCoeff + "_HMS" + msCoeff + ".txt"; else sufix = "_E" + pctEnv + "_P" + pctPool + "_HS" + hsCoeff + "_TMS" + msCoeff + ".txt"; System.out.println("Output 5 result files in the format of: " + prefix + "[****]" + sufix); try { PrintWriter file1 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "gamma_diversity" + sufix))); PrintWriter file2 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "alpha_diversity" + sufix))); PrintWriter file3 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "beta_diversity" + sufix))); PrintWriter file4 = new PrintWriter(new BufferedWriter(new FileWriter(prefix + "sum" + sufix))); PrintWriter file5 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "inter_generation_distance" + sufix))); PrintWriter file6 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "environment_population_distance" + sufix))); PrintWriter file7 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "host_fitness" + sufix))); PrintWriter file8 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "cos_theta" + sufix))); PrintWriter file9 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "host_fitness_distribution" + sufix))); PrintWriter file10 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "microbiome_fitness_distribution" + sufix))); PrintWriter file11 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "bacteria_contents" + sufix))); PrintWriter file12 = new PrintWriter( new BufferedWriter(new FileWriter(prefix + "individual_bacteria_contents" + sufix))); for (int i = 0; i < Ngene; i++) { fitnessToMicrobe[i] = MathUtil.getNextInt(2) - 1; fitnessToHost[i] = MathUtil.getNextInt(2) - 1; } MathUtil.setSeed(rep % numberOfReplication); SelectiveSpeciesRegistry ssr = new SelectiveSpeciesRegistry(numberOfSpecies, Ngene, Ngenepm, msCoeff, fitnessToHost, fitnessToMicrobe); MathUtil.setSeed(); SelectivePopulation population = new SelectivePopulation(microSize, environment, populationSize, pctEnv, pctPool, 0, 0, ssr, hsCoeff, HMS_or_TMS); while (population.getNumberOfGeneration() < numberOfGeneration) { population.sumSpecies(); if (population.getNumberOfGeneration() % numberOfObservation == 0) { //file1.print(population.gammaDiversity(false)); //file2.print(population.alphaDiversity(false)); //file1.print("\t"); //file2.print("\t"); file1.println(population.gammaDiversity(true)); file2.println(population.alphaDiversity(true)); //file3.print(population.betaDiversity(true)); //file3.print("\t"); file3.println(population.BrayCurtis(true)); file4.println(population.printOut()); file5.println(population.interGenerationDistance()); file6.println(population.environmentPopulationDistance()); file7.print(population.averageHostFitness()); file7.print("\t"); file7.println(population.varianceHostFitness()); file8.println(population.cosOfMH()); file9.println(population.printOutHFitness()); file10.println(population.printOutMFitness()); file11.println(population.printBacteriaContents()); } population.getNextGen(); } for (SelectiveIndividual host : population.getIndividuals()) { file12.println(host.printBacteriaContents()); } file1.close(); file2.close(); file3.close(); file4.close(); file5.close(); file6.close(); file7.close(); file8.close(); file9.close(); file10.close(); file11.close(); file12.close(); } catch (IOException e) { e.printStackTrace(); } } }
From source file:Main.java
public static String writeXMLToFile(String resString, String fileName) { try {//from w ww.jav a 2s.c om BufferedWriter writer = new BufferedWriter(new FileWriter(new File(fileName))); writer.write(resString); writer.close(); } catch (IOException e) { e.printStackTrace(); return null; } return fileName; }