List of usage examples for java.io FileWriter FileWriter
public FileWriter(File file, Charset charset) throws IOException
From source file:DruidResponseTime.java
public static void main(String[] args) throws Exception { try (CloseableHttpClient client = HttpClients.createDefault()) { HttpPost post = new HttpPost("http://localhost:8082/druid/v2/?pretty"); post.addHeader("content-type", "application/json"); CloseableHttpResponse res;/*from ww w. j a va 2 s .co m*/ if (STORE_RESULT) { File dir = new File(RESULT_DIR); if (!dir.exists()) { dir.mkdirs(); } } int length; // Make sure all segments online System.out.println("Test if number of records is " + RECORD_NUMBER); post.setEntity(new StringEntity("{" + "\"queryType\":\"timeseries\"," + "\"dataSource\":\"tpch_lineitem\"," + "\"intervals\":[\"1992-01-01/1999-01-01\"]," + "\"granularity\":\"all\"," + "\"aggregations\":[{\"type\":\"count\",\"name\":\"count\"}]}")); while (true) { System.out.print('*'); res = client.execute(post); boolean valid; try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent())) { length = in.read(BYTE_BUFFER); valid = new String(BYTE_BUFFER, 0, length, "UTF-8").contains("\"count\" : 6001215"); } res.close(); if (valid) { break; } else { Thread.sleep(5000); } } System.out.println("Number of Records Test Passed"); for (int i = 0; i < QUERIES.length; i++) { System.out.println( "--------------------------------------------------------------------------------"); System.out.println("Start running query: " + QUERIES[i]); try (BufferedReader reader = new BufferedReader( new FileReader(QUERY_FILE_DIR + File.separator + i + ".json"))) { length = reader.read(CHAR_BUFFER); post.setEntity(new StringEntity(new String(CHAR_BUFFER, 0, length))); } // Warm-up Rounds System.out.println("Run " + WARMUP_ROUND + " times to warm up cache..."); for (int j = 0; j < WARMUP_ROUND; j++) { res = client.execute(post); res.close(); System.out.print('*'); } System.out.println(); // Test Rounds int[] time = new int[TEST_ROUND]; int totalTime = 0; System.out.println("Run " + TEST_ROUND + " times to get average time..."); for (int j = 0; j < TEST_ROUND; j++) { long startTime = System.currentTimeMillis(); res = client.execute(post); long endTime = System.currentTimeMillis(); if (STORE_RESULT && j == 0) { try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent()); BufferedWriter writer = new BufferedWriter( new FileWriter(RESULT_DIR + File.separator + i + ".json", false))) { while ((length = in.read(BYTE_BUFFER)) > 0) { writer.write(new String(BYTE_BUFFER, 0, length, "UTF-8")); } } } res.close(); time[j] = (int) (endTime - startTime); totalTime += time[j]; System.out.print(time[j] + "ms "); } System.out.println(); // Process Results double avgTime = (double) totalTime / TEST_ROUND; double stdDev = 0; for (int temp : time) { stdDev += (temp - avgTime) * (temp - avgTime) / TEST_ROUND; } stdDev = Math.sqrt(stdDev); System.out.println("The average response time for the query is: " + avgTime + "ms"); System.out.println("The standard deviation is: " + stdDev); } } }
From source file:de.citec.sc.matoll.process.Matoll_CreateMax.java
public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException, InstantiationException, IllegalAccessException, ClassNotFoundException, Exception { String directory;//from w w w.ja v a 2 s. c om String gold_standard_lexicon; String output_lexicon; String configFile; Language language; String output; Stopwords stopwords = new Stopwords(); HashMap<String, Double> maxima; maxima = new HashMap<String, Double>(); if (args.length < 3) { System.out.print("Usage: Matoll --mode=train/test <DIRECTORY> <CONFIG>\n"); return; } // Classifier classifier; directory = args[1]; configFile = args[2]; final Config config = new Config(); config.loadFromFile(configFile); gold_standard_lexicon = config.getGoldStandardLexicon(); String model_file = config.getModel(); output_lexicon = config.getOutputLexicon(); output = config.getOutput(); language = config.getLanguage(); LexiconLoader loader = new LexiconLoader(); Lexicon gold = loader.loadFromFile(gold_standard_lexicon); Set<String> uris = new HashSet<>(); // Map<Integer,String> sentence_list = new HashMap<>(); Map<Integer, Set<Integer>> mapping_words_sentences = new HashMap<>(); //consider only properties for (LexicalEntry entry : gold.getEntries()) { try { for (Sense sense : entry.getSenseBehaviours().keySet()) { String tmp_uri = sense.getReference().getURI().replace("http://dbpedia.org/ontology/", ""); if (!Character.isUpperCase(tmp_uri.charAt(0))) { uris.add(sense.getReference().getURI()); } } } catch (Exception e) { } ; } ModelPreprocessor preprocessor = new ModelPreprocessor(language); preprocessor.setCoreferenceResolution(false); Set<String> dep = new HashSet<>(); dep.add("prep"); dep.add("appos"); dep.add("nn"); dep.add("dobj"); dep.add("pobj"); dep.add("num"); preprocessor.setDEP(dep); List<File> list_files = new ArrayList<>(); if (config.getFiles().isEmpty()) { File folder = new File(directory); File[] files = folder.listFiles(); for (File file : files) { if (file.toString().contains(".ttl")) list_files.add(file); } } else { list_files.addAll(config.getFiles()); } System.out.println(list_files.size()); int sentence_counter = 0; Map<String, Set<Integer>> bag_words_uri = new HashMap<>(); Map<String, Integer> mapping_word_id = new HashMap<>(); for (File file : list_files) { Model model = RDFDataMgr.loadModel(file.toString()); for (Model sentence : getSentences(model)) { String reference = getReference(sentence); reference = reference.replace("http://dbpedia/", "http://dbpedia.org/"); if (uris.contains(reference)) { sentence_counter += 1; Set<Integer> words_ids = getBagOfWords(sentence, stopwords, mapping_word_id); //TODO: add sentence preprocessing String obj = getObject(sentence); String subj = getSubject(sentence); preprocessor.preprocess(sentence, subj, obj, language); //TODO: also return marker if object or subject of property (in SPARQL this has to be optional of course) String parsed_sentence = getParsedSentence(sentence); try (FileWriter fw = new FileWriter("mapping_sentences_to_ids_goldstandard.tsv", true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { out.println(sentence_counter + "\t" + parsed_sentence); } catch (IOException e) { e.printStackTrace(); } for (Integer word_id : words_ids) { if (mapping_words_sentences.containsKey(word_id)) { Set<Integer> tmp_set = mapping_words_sentences.get(word_id); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } else { Set<Integer> tmp_set = new HashSet<>(); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } } if (bag_words_uri.containsKey(reference)) { Set<Integer> tmp = bag_words_uri.get(reference); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } else { Set<Integer> tmp = new HashSet<>(); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } } } model.close(); } PrintWriter writer = new PrintWriter("bag_of_words_only_goldstandard.tsv"); StringBuilder string_builder = new StringBuilder(); for (String r : bag_words_uri.keySet()) { string_builder.append(r); for (Integer i : bag_words_uri.get(r)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); writer = new PrintWriter("mapping_words_to_sentenceids_goldstandard.tsv"); string_builder = new StringBuilder(); for (Integer w : mapping_words_sentences.keySet()) { string_builder.append(w); for (int i : mapping_words_sentences.get(w)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); }
From source file:diffhunter.DiffHunter.java
/** * @param args the command line arguments * @throws org.apache.commons.cli.ParseException * @throws java.io.IOException//from w w w .j av a2s . co m */ public static void main(String[] args) throws ParseException, IOException { //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString(); // TODO code application logic here /*args = new String[] { "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData" };*/ /*args = new String[] { "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData" };*/ Options options = new Options(); // add t option options.addOption("i", "index", false, "Indexing BED files."); options.addOption("b", "bed", true, "bed file to be indexed"); options.addOption("o", "output", true, "Folder that the index/comparison file will be created."); options.addOption("r", "reference", true, "Reference annotation file to be used for indexing"); options.addOption("c", "compare", false, "Finding differences between two conditions"); options.addOption("1", "first", true, "First sample index location"); options.addOption("2", "second", true, "Second sample index location"); options.addOption("w", "window", true, "Length of window for identifying differences"); options.addOption("s", "sliding", true, "Length of sliding"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); boolean indexing = false; boolean comparing = false; //Indexing! if (cmd.hasOption("i")) { //if(cmd.hasOption("1")) //System.err.println("sasan"); //System.out.println("sasa"); indexing = true; } else if (cmd.hasOption("c")) { //System.err.println(""); comparing = true; } else { //System.err.println("Option is not deteced."); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } //Indexing is selected // if (indexing == true) { //Since indexing is true. //User have to provide file for indexing. if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String bedfile_ = cmd.getOptionValue("b"); String reference_file = cmd.getOptionValue("r"); String folder_loc = cmd.getOptionValue("o"); String sample_name = FilenameUtils.getBaseName(bedfile_); try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB( Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) { Indexer indexing_ = new Indexer(reference_file); indexing_.Make_Index(B2, bedfile_, Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString()); B2.close(); } } else if (comparing == true) { if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1") || cmd.hasOption("2"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String folder_loc = cmd.getOptionValue("o"); int window_ = Integer.parseInt(cmd.getOptionValue("w")); //int window_=600; int slide_ = Integer.parseInt(cmd.getOptionValue("s")); String first = cmd.getOptionValue("1").replace("_BDB", ""); String second = cmd.getOptionValue("2").replace("_BDB", ""); String reference_file = cmd.getOptionValue("r"); //String folder_loc=cmd.getOptionValue("o"); String sample_name_first = FilenameUtils.getBaseName(first); String sample_name_second = FilenameUtils.getBaseName(second); Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first); Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second); List<String> first_condition_genes = Files .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath()) .collect(Collectors.toList()); List<String> second_condition_genes = Files .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath()) .collect(Collectors.toList()); System.out.println("First and second condition are loaded!!! "); List<String> intersection_ = new ArrayList<>(first_condition_genes); intersection_.retainAll(second_condition_genes); BufferedWriter output = new BufferedWriter( new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt") .toAbsolutePath().toString(), false)); List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>()); Worker_New worker_class = new Worker_New(); worker_class.Read_Reference(reference_file); while (!intersection_.isEmpty()) { List<String> selected_genes = new ArrayList<>(); //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));} //else selected_genes.addAll(intersection_.subList(0, 10000)); if (intersection_.size() <= intersection_.size()) { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } else { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } intersection_.removeAll(selected_genes); //System.out.println("Intersection count is:"+intersection_.size()); //final List<Result_Window> resultssss_=new ArrayList<>(); IntStream.range(0, selected_genes.size()).parallel().forEach(i -> { System.out.println(selected_genes.get(i) + "\tprocessing......"); String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2"; int start = worker_class.dic_genes.get(gene_of_interest).start_loc; int end = worker_class.dic_genes.get(gene_of_interest).end_loc; Map<Integer, Integer> first_ = Collections.EMPTY_MAP; try { first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } Map<Integer, Integer> second_ = Collections.EMPTY_MAP; try { second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_); List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_); //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i)); // System.out.println("top_window_first_Count\t"+top_windows_first.size()); // System.out.println("top_window_second_Count\t"+top_windows_second.size()); if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) { return; } List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, sample_name_first, sample_name_second, 0.01); if (!res_temp.isEmpty()) { final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01)); } //System.out.println(selected_genes.get(i)+"\tprocessed."); }); /*selected_genes.parallelStream().forEach(i -> { });*/ List<Double> pvals = new ArrayList<>(); for (int i = 0; i < final_results.size(); i++) { pvals.add(final_results.get(i).p_value); } List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals); System.out.println("Writing to file..."); output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR"); output.newLine(); for (int i = 0; i < final_results.size(); i++) { Result_Window item = final_results.get(i); output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } /* for (Result_Window item : final_results) { output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } */ final_results.clear(); } output.close(); } System.out.println("Done."); }
From source file:com.kappaware.logtrawler.Main.java
@SuppressWarnings("static-access") static public void main(String[] argv) throws Throwable { Config config;/* w ww. j ava2 s. co m*/ Options options = new Options(); options.addOption(OptionBuilder.hasArg().withArgName("configFile").withLongOpt("config-file") .withDescription("JSON configuration file").create("c")); options.addOption(OptionBuilder.hasArg().withArgName("folder").withLongOpt("folder") .withDescription("Folder to monitor").create("f")); options.addOption(OptionBuilder.hasArg().withArgName("exclusion").withLongOpt("exclusion") .withDescription("Exclusion regex").create("x")); options.addOption(OptionBuilder.hasArg().withArgName("adminEndpoint").withLongOpt("admin-endpoint") .withDescription("Endpoint for admin REST").create("e")); options.addOption(OptionBuilder.hasArg().withArgName("outputFlow").withLongOpt("output-flow") .withDescription("Target to post result on").create("o")); options.addOption(OptionBuilder.hasArg().withArgName("hostname").withLongOpt("hostname") .withDescription("This hostname").create("h")); options.addOption(OptionBuilder.withLongOpt("displayDot").withDescription("Display Dot").create("d")); options.addOption(OptionBuilder.hasArg().withArgName("mimeType").withLongOpt("mime-type") .withDescription("Valid MIME type").create("m")); options.addOption(OptionBuilder.hasArg().withArgName("allowedAdmin").withLongOpt("allowedAdmin") .withDescription("Allowed admin network").create("a")); options.addOption(OptionBuilder.hasArg().withArgName("configFile").withLongOpt("gen-config-file") .withDescription("Generate JSON configuration file").create("g")); options.addOption(OptionBuilder.hasArg().withArgName("maxBatchSize").withLongOpt("max-batch-size") .withDescription("Max JSON batch (array) size").create("b")); CommandLineParser clParser = new BasicParser(); CommandLine line; String configFile = null; try { // parse the command line argument line = clParser.parse(options, argv); if (line.hasOption("c")) { configFile = line.getOptionValue("c"); config = Json.fromJson(Config.class, new BufferedReader(new InputStreamReader(new FileInputStream(configFile)))); } else { config = new Config(); } if (line.hasOption("f")) { String[] fs = line.getOptionValues("f"); // Get the first agent (Create it if needed) if (config.getAgents() == null || config.getAgents().size() == 0) { Config.Agent agent = new Config.Agent("default"); config.addAgent(agent); } Config.Agent agent = config.getAgents().iterator().next(); for (String f : fs) { agent.addFolder(new Config.Agent.Folder(f, false)); } } if (line.hasOption("e")) { String e = line.getOptionValue("e"); config.setAdminEndpoint(e); } if (line.hasOption("o")) { String[] es = line.getOptionValues("o"); if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { for (String s : es) { agent.addOuputFlow(s); } } } } if (line.hasOption("h")) { String e = line.getOptionValue("h"); config.setHostname(e); } if (line.hasOption("x")) { if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { if (agent.getFolders() != null) { for (Folder folder : agent.getFolders()) { String[] exs = line.getOptionValues("x"); for (String ex : exs) { folder.addExcludedPath(ex); } } } } } } if (line.hasOption("m")) { if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { String[] exs = line.getOptionValues("m"); for (String ex : exs) { agent.addLogMimeType(ex); } } } } if (line.hasOption("a")) { String[] exs = line.getOptionValues("a"); for (String ex : exs) { config.addAdminAllowedNetwork(ex); } } if (line.hasOption("d")) { config.setDisplayDot(true); } if (line.hasOption("b")) { Integer i = getIntegerParameter(line, "b"); if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { agent.setOutputMaxBatchSize(i); } } } config.setDefault(); if (line.hasOption("g")) { String fileName = line.getOptionValue("g"); PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, false))); out.println(Json.toJson(config, true)); out.flush(); out.close(); System.exit(0); } } catch (ParseException exp) { // oops, something went wrong usage(options, exp.getMessage()); return; } try { // Check config if (config.getAgents() == null || config.getAgents().size() < 1) { throw new ConfigurationException("At least one folder to monitor must be provided!"); } Map<String, AgentHandler> agentHandlerByName = new HashMap<String, AgentHandler>(); for (Config.Agent agent : config.getAgents()) { agentHandlerByName.put(agent.getName(), new AgentHandler(agent)); } if (!Utils.isNullOrEmpty(config.getAdminEndpoint())) { new AdminServer(config, agentHandlerByName); } } catch (ConfigurationException e) { log.error(e.toString()); System.exit(1); } catch (Throwable t) { log.error("Error in main", t); System.exit(2); } }
From source file:edu.usc.qufd.Main.java
/** * The main method.//from w w w . j a v a2 s . c o m * * @param args the arguments * @throws IOException Signals that an I/O exception has occurred. */ public static void main(String[] args) throws IOException { if (parseInputs(args) == false) { System.exit(-1); //The input files do not exist } /* * Parsing inputs: fabric & qasm file */ PrintWriter outputFile; RandomAccessFile raf = null; String latencyPlaceHolder; if (RuntimeConfig.OUTPUT_TO_FILE) { latencyPlaceHolder = "Total Latency: " + Long.MAX_VALUE + " us" + System.lineSeparator(); raf = new RandomAccessFile(outputFileAddr, "rws"); //removing the old values in the file raf.setLength(0); //writing a place holder for the total latency raf.writeBytes(latencyPlaceHolder); raf.close(); outputFile = new PrintWriter(new BufferedWriter(new FileWriter(outputFileAddr, true)), true); } else { //writing to stdout outputFile = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out)), true); } /* parsing the input*/ layout = LayoutParser.parse(pmdFileAddr); qasm = QASMParser.QASMParser(qasmFileAddr, layout); long totalLatency = qufd(outputFile); if (RuntimeConfig.OUTPUT_TO_FILE) { outputFile.close(); //Over writing the place holder with the actual latency String latencyActual = "Total Latency: " + totalLatency + " " + layout.getTimeUnit(); latencyActual = StringUtils.rightPad(latencyActual, latencyPlaceHolder.length() - System.lineSeparator().length()); raf = new RandomAccessFile(outputFileAddr, "rws"); //Writing to the top of a file raf.seek(0); //writing the actual total latency in the at the top of the output file raf.writeBytes(latencyActual + System.lineSeparator()); raf.close(); } else { outputFile.flush(); System.out.println("Total Latency: " + totalLatency + " " + layout.getTimeUnit()); } if (RuntimeConfig.VERBOSE) { System.out.println("Done."); } outputFile.close(); }
From source file:examples.mail.IMAPExportMbox.java
public static void main(String[] args) throws IOException { int connect_timeout = CONNECT_TIMEOUT; int read_timeout = READ_TIMEOUT; int argIdx = 0; String eol = EOL_DEFAULT;//w ww . j av a 2 s . co m boolean printHash = false; boolean printMarker = false; int retryWaitSecs = 0; for (argIdx = 0; argIdx < args.length; argIdx++) { if (args[argIdx].equals("-c")) { connect_timeout = Integer.parseInt(args[++argIdx]); } else if (args[argIdx].equals("-r")) { read_timeout = Integer.parseInt(args[++argIdx]); } else if (args[argIdx].equals("-R")) { retryWaitSecs = Integer.parseInt(args[++argIdx]); } else if (args[argIdx].equals("-LF")) { eol = LF; } else if (args[argIdx].equals("-CRLF")) { eol = CRLF; } else if (args[argIdx].equals("-.")) { printHash = true; } else if (args[argIdx].equals("-X")) { printMarker = true; } else { break; } } final int argCount = args.length - argIdx; if (argCount < 2) { System.err.println("Usage: IMAPExportMbox [-LF|-CRLF] [-c n] [-r n] [-R n] [-.] [-X]" + " imap[s]://user:password@host[:port]/folder/path [+|-]<mboxfile> [sequence-set] [itemnames]"); System.err.println( "\t-LF | -CRLF set end-of-line to LF or CRLF (default is the line.separator system property)"); System.err.println("\t-c connect timeout in seconds (default 10)"); System.err.println("\t-r read timeout in seconds (default 10)"); System.err.println("\t-R temporary failure retry wait in seconds (default 0; i.e. disabled)"); System.err.println("\t-. print a . for each complete message received"); System.err.println("\t-X print the X-IMAP line for each complete message received"); System.err.println( "\tthe mboxfile is where the messages are stored; use '-' to write to standard output."); System.err.println( "\tPrefix filename with '+' to append to the file. Prefix with '-' to allow overwrite."); System.err.println( "\ta sequence-set is a list of numbers/number ranges e.g. 1,2,3-10,20:* - default 1:*"); System.err .println("\titemnames are the message data item name(s) e.g. BODY.PEEK[HEADER.FIELDS (SUBJECT)]" + " or a macro e.g. ALL - default (INTERNALDATE BODY.PEEK[])"); System.exit(1); } final URI uri = URI.create(args[argIdx++]); final String file = args[argIdx++]; String sequenceSet = argCount > 2 ? args[argIdx++] : "1:*"; final String itemNames; // Handle 0, 1 or multiple item names if (argCount > 3) { if (argCount > 4) { StringBuilder sb = new StringBuilder(); sb.append("("); for (int i = 4; i <= argCount; i++) { if (i > 4) { sb.append(" "); } sb.append(args[argIdx++]); } sb.append(")"); itemNames = sb.toString(); } else { itemNames = args[argIdx++]; } } else { itemNames = "(INTERNALDATE BODY.PEEK[])"; } final boolean checkSequence = sequenceSet.matches("\\d+:(\\d+|\\*)"); // are we expecting a sequence? final MboxListener chunkListener; if (file.equals("-")) { chunkListener = null; } else if (file.startsWith("+")) { final File mbox = new File(file.substring(1)); System.out.println("Appending to file " + mbox); chunkListener = new MboxListener(new BufferedWriter(new FileWriter(mbox, true)), eol, printHash, printMarker, checkSequence); } else if (file.startsWith("-")) { final File mbox = new File(file.substring(1)); System.out.println("Writing to file " + mbox); chunkListener = new MboxListener(new BufferedWriter(new FileWriter(mbox, false)), eol, printHash, printMarker, checkSequence); } else { final File mbox = new File(file); if (mbox.exists()) { throw new IOException("mailbox file: " + mbox + " already exists!"); } System.out.println("Creating file " + mbox); chunkListener = new MboxListener(new BufferedWriter(new FileWriter(mbox)), eol, printHash, printMarker, checkSequence); } String path = uri.getPath(); if (path == null || path.length() < 1) { throw new IllegalArgumentException("Invalid folderPath: '" + path + "'"); } String folder = path.substring(1); // skip the leading / // suppress login details final PrintCommandListener listener = new PrintCommandListener(System.out, true) { @Override public void protocolReplyReceived(ProtocolCommandEvent event) { if (event.getReplyCode() != IMAPReply.PARTIAL) { // This is dealt with by the chunk listener super.protocolReplyReceived(event); } } }; // Connect and login final IMAPClient imap = IMAPUtils.imapLogin(uri, connect_timeout * 1000, listener); String maxIndexInFolder = null; try { imap.setSoTimeout(read_timeout * 1000); if (!imap.select(folder)) { throw new IOException("Could not select folder: " + folder); } for (String line : imap.getReplyStrings()) { maxIndexInFolder = matches(line, PATEXISTS, 1); if (maxIndexInFolder != null) { break; } } if (chunkListener != null) { imap.setChunkListener(chunkListener); } // else the command listener displays the full output without processing while (true) { boolean ok = imap.fetch(sequenceSet, itemNames); // If the fetch failed, can we retry? if (!ok && retryWaitSecs > 0 && chunkListener != null && checkSequence) { final String replyString = imap.getReplyString(); //includes EOL if (startsWith(replyString, PATTEMPFAIL)) { System.err.println("Temporary error detected, will retry in " + retryWaitSecs + "seconds"); sequenceSet = (chunkListener.lastSeq + 1) + ":*"; try { Thread.sleep(retryWaitSecs * 1000); } catch (InterruptedException e) { // ignored } } else { throw new IOException( "FETCH " + sequenceSet + " " + itemNames + " failed with " + replyString); } } else { break; } } } catch (IOException ioe) { String count = chunkListener == null ? "?" : Integer.toString(chunkListener.total); System.err.println("FETCH " + sequenceSet + " " + itemNames + " failed after processing " + count + " complete messages "); if (chunkListener != null) { System.err.println("Last complete response seen: " + chunkListener.lastFetched); } throw ioe; } finally { if (printHash) { System.err.println(); } if (chunkListener != null) { chunkListener.close(); final Iterator<String> missingIds = chunkListener.missingIds.iterator(); if (missingIds.hasNext()) { StringBuilder sb = new StringBuilder(); for (;;) { sb.append(missingIds.next()); if (!missingIds.hasNext()) { break; } sb.append(","); } System.err.println("*** Missing ids: " + sb.toString()); } } imap.logout(); imap.disconnect(); } if (chunkListener != null) { System.out.println("Processed " + chunkListener.total + " messages."); } if (maxIndexInFolder != null) { System.out.println("Folder contained " + maxIndexInFolder + " messages."); } }
From source file:com.vmware.photon.controller.core.Main.java
public static void main(String[] args) throws Throwable { try {/* ww w . java 2 s. com*/ LoggingFactory.bootstrap(); logger.info("args: " + Arrays.toString(args)); ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true) .description("Photon Controller Core"); parser.addArgument("config-file").help("photon controller configuration file"); parser.addArgument("--manual").type(Boolean.class).setDefault(false) .help("If true, create default deployment."); Namespace namespace = parser.parseArgsOrFail(args); PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace); DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig(); new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure(); SSLContext sslContext; if (deployerConfig.getDeployerContext().isAuthEnabled()) { sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL); TrustManagerFactory tmf = null; tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream in = FileUtils .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath())); keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray()); tmf.init(keyStore); sslContext.init(null, tmf.getTrustManagers(), null); } else { KeyStoreUtils.generateKeys("/thrift/"); sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } ThriftModule thriftModule = new ThriftModule(sslContext); PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule, deployerConfig, sslContext); if ((Boolean) namespace.get("manual")) { DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(), deployerConfig, xenonHost); } // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config // so that it can match the Configuration class of dropwizard. File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp"); File source = new File(args[0]); FileInputStream fis = new FileInputStream(source); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); FileWriter fstream = new FileWriter(apiFeTempConfig, true); BufferedWriter out = new BufferedWriter(fstream); String aLine = null; while ((aLine = in.readLine()) != null) { if (aLine.equals("apife:")) { aLine = aLine.replace("apife:", "server:"); } out.write(aLine); out.newLine(); } in.close(); out.close(); // This approach can be simplified once the apife container is gone, but for the time being // it expects the first arg to be the string "server". String[] apiFeArgs = new String[2]; apiFeArgs[0] = "server"; apiFeArgs[1] = apiFeTempConfig.getAbsolutePath(); ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs); ApiFeService.addServiceHost(xenonHost); ApiFeService.setSSLContext(sslContext); ApiFeService apiFeService = new ApiFeService(); apiFeService.run(apiFeArgs); apiFeTempConfig.deleteOnExit(); LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class); xenonHost.setApiClient(localApiClient); // in the non-auth enabled scenario we need to be able to accept any self-signed certificate if (!deployerConfig.getDeployerContext().isAuthEnabled()) { KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { logger.info("Shutting down"); xenonHost.stop(); logger.info("Done"); LoggingFactory.detachAndStop(); } }); } catch (Exception e) { logger.error("Failed to start photon controller ", e); throw e; } }
From source file:br.edu.ufcg.lsd.oursim.ui.CLI.java
/** * Exemplo:/*from w w w . j av a 2 s . co m*/ * * <pre> * java -jar oursim.jar -w resources/trace_filtrado_primeiros_1000_jobs.txt -m resources/hostinfo_sdsc.dat -synthetic_av -o oursim_trace.txt * -w resources/trace_filtrado_primeiros_1000_jobs.txt -s persistent -nr 20 -md resources/hostinfo_sdsc.dat -av resources/disponibilidade.txt -o oursim_trace.txt * -w resources/new_iosup_workload.txt -s persistent -pd resources/iosup_site_description.txt -wt iosup -nr 1 -synthetic_av -o oursim_trace.txt * -w resources/new_workload.txt -s persistent -pd resources/marcus_site_description.txt -wt marcus -nr 20 -d -o oursim_trace.txt * 1 ms + 1 dia = 2678400 segundos * </pre> * * @param args * @throws FileNotFoundException */ public static void main(String[] args) throws IOException { StopWatch stopWatch = new StopWatch(); stopWatch.start(); List<Closeable> closeables = new ArrayList<Closeable>(); CommandLine cmd = parseCommandLine(args, prepareOptions(), HELP, USAGE, EXECUTION_LINE); File outputFile = (File) cmd.getOptionObject(OUTPUT); PrintOutput printOutput = new PrintOutput(outputFile, false); JobEventDispatcher.getInstance().addListener(printOutput); closeables.add(printOutput); if (cmd.hasOption(EXTRACT_REMOTE_WORKLOAD)) { File remoteWorkloadFile = (File) cmd.getOptionObject(EXTRACT_REMOTE_WORKLOAD); Output remoteWorkloadExtractor = new RemoteTasksExtractorOutput(remoteWorkloadFile); closeables.add(remoteWorkloadExtractor); JobEventDispatcher.getInstance().addListener(remoteWorkloadExtractor); } Grid grid = prepareGrid(cmd); ComputingElementEventCounter computingElementEventCounter = prepareOutputAccounting(cmd, cmd.hasOption(VERBOSE)); Input<? extends AvailabilityRecord> availability = defineAvailability(cmd, grid.getMapOfPeers()); prepareOptionalOutputFiles(cmd, grid, (SyntheticAvailabilityCharacterizationAbstract) availability, closeables); long timeOfFirstSubmission = cmd.getOptionValue(WORKLOAD_TYPE).equals("gwa") ? GWAFormat.extractSubmissionTimeFromFirstJob(cmd.getOptionValue(WORKLOAD)) : 0; Workload workload = defineWorkloadType(cmd, cmd.getOptionValue(WORKLOAD), grid.getMapOfPeers(), timeOfFirstSubmission); JobSchedulerPolicy jobScheduler = defineScheduler(cmd, grid.getListOfPeers()); OurSim oursim = new OurSim(EventQueue.getInstance(), grid, jobScheduler, workload, availability); oursim.setActiveEntity(new ActiveEntityImp()); if (cmd.hasOption(HALT_SIMULATION)) { oursim.addHaltEvent(((Number) cmd.getOptionObject(HALT_SIMULATION)).longValue()); } oursim.start(); for (Closeable c : closeables) { c.close(); } EventQueue.getInstance().clear(); // adiciona mtricas-resumo ao fim do arquivo FileWriter fw = new FileWriter(cmd.getOptionValue(OUTPUT), true); closeables.add(fw); stopWatch.stop(); fw.write("# Simulation duration:" + stopWatch + ".\n"); double utilization = grid.getUtilization(); double realUtilization = grid.getTrueUtilization(); int numberOfResourcesByPeer = Integer.parseInt(cmd.getOptionValue(NUM_RESOURCES_BY_PEER, "0")); fw.write(formatSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(), numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()) + "\n"); fw.close(); System.out.println( getSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(), numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime())); }
From source file:fr.tpt.s3.mcdag.bench.MainBench.java
public static void main(String[] args) throws IOException, InterruptedException { // Command line options Options options = new Options(); Option input = new Option("i", "input", true, "MC-DAG XML models"); input.setRequired(true);// w w w .j a va 2 s .c o m input.setArgs(Option.UNLIMITED_VALUES); options.addOption(input); Option output = new Option("o", "output", true, "Folder where results have to be written."); output.setRequired(true); options.addOption(output); Option uUti = new Option("u", "utilization", true, "Utilization."); uUti.setRequired(true); options.addOption(uUti); Option output2 = new Option("ot", "output-total", true, "File where total results are being written"); output2.setRequired(true); options.addOption(output2); Option oCores = new Option("c", "cores", true, "Cores given to the test"); oCores.setRequired(true); options.addOption(oCores); Option oLvls = new Option("l", "levels", true, "Levels tested for the system"); oLvls.setRequired(true); options.addOption(oLvls); Option jobs = new Option("j", "jobs", true, "Number of threads to be launched."); jobs.setRequired(false); options.addOption(jobs); Option debug = new Option("d", "debug", false, "Debug logs."); debug.setRequired(false); options.addOption(debug); /* * Parsing of the command line */ CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println(e.getMessage()); formatter.printHelp("Benchmarks MultiDAG", options); System.exit(1); return; } String inputFilePath[] = cmd.getOptionValues("input"); String outputFilePath = cmd.getOptionValue("output"); String outputFilePathTotal = cmd.getOptionValue("output-total"); double utilization = Double.parseDouble(cmd.getOptionValue("utilization")); boolean boolDebug = cmd.hasOption("debug"); int nbLvls = Integer.parseInt(cmd.getOptionValue("levels")); int nbJobs = 1; int nbFiles = inputFilePath.length; if (cmd.hasOption("jobs")) nbJobs = Integer.parseInt(cmd.getOptionValue("jobs")); int nbCores = Integer.parseInt(cmd.getOptionValue("cores")); /* * While files need to be allocated * run the tests in the pool of threads */ // For dual-criticality systems we call a specific thread if (nbLvls == 2) { System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls); int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int fedTotal = 0; int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int fedPreempts = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int fedActiv = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { fedTotal += Integer.parseInt(val); } else if (j == 3) { fedPreempts += Integer.parseInt(val); } else if (j == 4) { fedActiv += Integer.parseInt(val); } else if (j == 5) { laxTotal += Integer.parseInt(val); } else if (j == 6) { laxPreempts += Integer.parseInt(val); } else if (j == 7) { laxActiv += Integer.parseInt(val); } else if (j == 8) { edfTotal += Integer.parseInt(val); } else if (j == 9) { edfPreempts += Integer.parseInt(val); } else if (j == 10) { edfActiv += Integer.parseInt(val); } else if (j == 11) { hybridTotal += Integer.parseInt(val); } else if (j == 12) { hybridPreempts += Integer.parseInt(val); } else if (j == 13) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double fedPerc = (double) fedTotal / nbFiles; double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double fedPercPreempts = (double) fedPreempts / fedActiv; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; " + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else if (nbLvls > 2) { int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { laxTotal += Integer.parseInt(val); } else if (j == 3) { laxPreempts += Integer.parseInt(val); } else if (j == 4) { laxActiv += Integer.parseInt(val); } else if (j == 5) { edfTotal += Integer.parseInt(val); } else if (j == 6) { edfPreempts += Integer.parseInt(val); } else if (j == 7) { edfActiv += Integer.parseInt(val); } else if (j == 8) { hybridTotal += Integer.parseInt(val); } else if (j == 9) { hybridPreempts += Integer.parseInt(val); } else if (j == 10) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else { System.err.println("Wrong number of levels"); System.exit(-1); } System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls); }
From source file:discovery.compression.kdd2011.ratio.RatioCompressionReport.java
public static void main(String[] args) throws GraphReadingException, IOException, java.text.ParseException { opts.addOption("r", true, "Goal compression ratio"); // opts.addOption( "a", // true, // "Algorithm used for compression. The default and only currently available option is \"greedy\""); //opts.addOption("cost-output",true,"Output file for costs, default is costs.txt"); //opts.addOption("cost-format",true,"Output format for "); opts.addOption("ctype", true, "Connectivity type: global or local, default is global."); opts.addOption("connectivity", false, "enables output for connectivity. Connectivity info will be written to connectivity.txt"); opts.addOption("output_bmg", true, "Write bmg file with groups to given file."); opts.addOption("algorithm", true, "Algorithm to use, one of: greedy random1 random2 bruteforce slowgreedy"); opts.addOption("hop2", false, "Only try to merge nodes that have common neighbors"); opts.addOption("kmedoids", false, "Enables output for kmedoids clustering"); opts.addOption("kmedoids_k", true, "Number of clusters to be used in kmedoids. Default is 3"); opts.addOption("kmedoids_output", true, "Output file for kmedoid clusters. Default is clusters.txt. This file will be overwritten."); opts.addOption("norefresh", false, "Use old style merging: all connectivities are not refreshed when merging"); opts.addOption("edge_attribute", true, "Attribute from bmgraph used as edge weight"); opts.addOption("only_times", false, "Only write times.txt"); //opts.addOption("no_metrics",false,"Exit after compression, don't calculate any metrics or produce output bmg for the compression."); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;/* w ww. j av a 2 s. co m*/ try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } boolean connectivity = false; double ratio = 0; boolean hop2 = cmd.hasOption("hop2"); RatioCompression compression = new GreedyRatioCompression(hop2); if (cmd.hasOption("connectivity")) connectivity = true; ConnectivityType ctype = ConnectivityType.GLOBAL; CompressionMergeModel mergeModel = new PathAverageMergeModel(); if (cmd.hasOption("ctype")) { String ctypeStr = cmd.getOptionValue("ctype"); if (ctypeStr.equals("local")) { ctype = ConnectivityType.LOCAL; mergeModel = new EdgeAverageMergeModel(); } else if (ctypeStr.equals("global")) { ctype = ConnectivityType.GLOBAL; mergeModel = new PathAverageMergeModel(); } else { System.out.println(PROGRAM_NAME + ": unknown connectivity type " + ctypeStr); printHelp(); } } if (cmd.hasOption("norefresh")) mergeModel = new PathAverageMergeModelNorefresh(); if (cmd.hasOption("algorithm")) { String alg = cmd.getOptionValue("algorithm"); if (alg.equals("greedy")) { compression = new GreedyRatioCompression(hop2); } else if (alg.equals("random1")) { compression = new RandomRatioCompression(hop2); } else if (alg.equals("random2")) { compression = new SmartRandomRatioCompression(hop2); } else if (alg.equals("bruteforce")) { compression = new BruteForceCompression(hop2, ctype == ConnectivityType.LOCAL); } else if (alg.equals("slowgreedy")) { compression = new SlowGreedyRatioCompression(hop2); } else { System.out.println("algorithm must be one of: greedy random1 random2 bruteforce slowgreedy"); printHelp(); } } compression.setMergeModel(mergeModel); if (cmd.hasOption("r")) { ratio = Double.parseDouble(cmd.getOptionValue("r")); } else { System.out.println(PROGRAM_NAME + ": compression ratio not defined"); printHelp(); } if (cmd.hasOption("help")) { printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } boolean kmedoids = false; int kmedoidsK = 3; String kmedoidsOutput = "clusters.txt"; if (cmd.hasOption("kmedoids")) kmedoids = true; if (cmd.hasOption("kmedoids_k")) kmedoidsK = Integer.parseInt(cmd.getOptionValue("kmedoids_k")); if (cmd.hasOption("kmedoids_output")) kmedoidsOutput = cmd.getOptionValue("kmedoids_output"); String edgeAttrib = "goodness"; if (cmd.hasOption("edge_attribute")) edgeAttrib = cmd.getOptionValue("edge_attribute"); // This program should directly use bmgraph-java to read and // DefaultGraph should have a constructor that takes a BMGraph as an // argument. //VisualGraph vg = new VisualGraph(infile, edgeAttrib, false); //System.out.println("vg read"); //SimpleVisualGraph origSG = new SimpleVisualGraph(vg); BMGraph bmg = BMGraphUtils.readBMGraph(infile); int origN = bmg.getNodes().size(); //for(int i=0;i<origN;i++) //System.out.println(i+"="+origSG.getVisualNode(i)); System.out.println("bmgraph read"); BMNode[] i2n = new BMNode[origN]; HashMap<BMNode, Integer> n2i = new HashMap<BMNode, Integer>(); { int pi = 0; for (BMNode nod : bmg.getNodes()) { n2i.put(nod, pi); i2n[pi++] = nod; } } DefaultGraph dg = new DefaultGraph(); for (BMEdge e : bmg.getEdges()) { dg.addEdge(n2i.get(e.getSource()), n2i.get(e.getTarget()), Double.parseDouble(e.get(edgeAttrib))); } DefaultGraph origDG = dg.copy(); System.out.println("inputs read"); RatioCompression nopCompressor = new RatioCompression.DefaultRatioCompression(); ResultGraph nopResult = nopCompressor.compressGraph(dg, 1); long start = System.currentTimeMillis(); ResultGraph result = compression.compressGraph(dg, ratio); long timeSpent = System.currentTimeMillis() - start; double seconds = timeSpent * 0.001; BufferedWriter timesWriter = new BufferedWriter(new FileWriter("times.txt", true)); timesWriter.append("" + seconds + "\n"); timesWriter.close(); if (cmd.hasOption("only_times")) { System.out.println("Compression done, exiting."); System.exit(0); } BufferedWriter costsWriter = new BufferedWriter(new FileWriter("costs.txt", true)); costsWriter.append("" + nopResult.getCompressorCosts() + " " + result.getCompressorCosts() + "\n"); costsWriter.close(); double[][] origProb; double[][] compProb; int[] group = new int[origN]; for (int i = 0; i < result.partition.size(); i++) for (int x : result.partition.get(i)) group[x] = i; if (ctype == ConnectivityType.LOCAL) { origProb = new double[origN][origN]; compProb = new double[origN][origN]; DefaultGraph g = result.uncompressedGraph(); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { origProb[i][j] = dg.getEdgeWeight(i, j); compProb[i][j] = g.getEdgeWeight(i, j); } } System.out.println("Writing edge-dissimilarity"); } else { origProb = ProbDijkstra.getProbMatrix(origDG); compProb = new double[origN][origN]; System.out.println("nodeCount = " + result.graph.getNodeCount()); double[][] ccProb = ProbDijkstra.getProbMatrix(result.graph); System.out.println("ccProb.length = " + ccProb.length); System.out.println("ccProb[0].length = " + ccProb[0].length); for (int i = 0; i < origN; i++) { for (int j = 0; j < origN; j++) { if (group[i] == group[j]) compProb[i][j] = result.graph.getEdgeWeight(group[i], group[j]); else { int gj = group[j]; int gi = group[i]; compProb[i][j] = ccProb[group[i]][group[j]]; } } } System.out.println("Writing best-path-dissimilarity"); //compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } { BufferedWriter connWr = null;// if (connectivity) { connWr = new BufferedWriter(new FileWriter("connectivity.txt", true)); } double totalDiff = 0; for (int i = 0; i < origN; i++) { for (int j = i + 1; j < origN; j++) { double diff = Math.abs(origProb[i][j] - compProb[i][j]); //VisualNode ni = origSG.getVisualNode(i); //VisualNode nj = origSG.getVisualNode(j); BMNode ni = i2n[i]; BMNode nj = i2n[j]; if (connectivity) connWr.append(ni + "\t" + nj + "\t" + origProb[i][j] + "\t" + compProb[i][j] + "\t" + diff + "\n"); totalDiff += diff * diff; } } if (connectivity) { connWr.append("\n"); connWr.close(); } totalDiff = Math.sqrt(totalDiff); BufferedWriter dissWr = new BufferedWriter(new FileWriter("dissimilarity.txt", true)); dissWr.append("" + totalDiff + "\n"); dissWr.close(); } if (cmd.hasOption("output_bmg")) { BMGraph outgraph = new BMGraph(); String outputfile = cmd.getOptionValue("output_bmg"); HashMap<Integer, BMNode> nodes = new HashMap<Integer, BMNode>(); for (int i = 0; i < result.partition.size(); i++) { ArrayList<Integer> g = result.partition.get(i); if (g.size() == 0) continue; BMNode node = new BMNode("Supernode_" + i); HashMap<String, String> attributes = new HashMap<String, String>(); StringBuffer contents = new StringBuffer(); for (int x : g) contents.append(i2n[x] + ","); contents.delete(contents.length() - 1, contents.length()); attributes.put("nodes", contents.toString()); attributes.put("self-edge", "" + result.graph.getEdgeWeight(i, i)); node.setAttributes(attributes); nodes.put(i, node); outgraph.ensureHasNode(node); } for (int i = 0; i < result.partition.size(); i++) { if (result.partition.get(i).size() == 0) continue; for (int x : result.graph.getNeighbors(i)) { if (x < i) continue; BMNode from = nodes.get(i); BMNode to = nodes.get(x); if (from == null || to == null) { System.out.println(from + "->" + to); System.out.println(i + "->" + x); System.out.println(""); } BMEdge e = new BMEdge(nodes.get(i), nodes.get(x), "notype"); e.setAttributes(new HashMap<String, String>()); e.put("goodness", "" + result.graph.getEdgeWeight(i, x)); outgraph.ensureHasEdge(e); } } BMGraphUtils.writeBMGraph(outgraph, outputfile); } // k medoids! if (kmedoids) { //KMedoidsResult clustersOrig=KMedoids.runKMedoids(origProb,kmedoidsK); if (ctype == ConnectivityType.LOCAL) { compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph()); } //KMedoidsResult compClusters = KMedoids.runKMedoids(ProbDijkstra.getProbMatrix(result.graph),kmedoidsK); KMedoidsResult clustersComp = KMedoids.runKMedoids(compProb, kmedoidsK); BufferedWriter bw = new BufferedWriter(new FileWriter(kmedoidsOutput)); for (int i = 0; i < origN; i++) { int g = group[i]; //bw.append(origSG.getVisualNode(i).getBMNode()+" "+compClusters.clusters[g]+"\n"); bw.append(i2n[i] + " " + clustersComp.clusters[i] + "\n"); } bw.close(); } System.exit(0); }