List of usage examples for java.lang String join
public static String join(CharSequence delimiter, Iterable<? extends CharSequence> elements)
From source file:com.iveely.computing.Program.java
/** * @param args the command line arguments * @throws java.io.IOException// w ww .j a v a 2s .co m */ public static void main(String[] args) throws IOException { if (args != null && args.length > 0) { logger.info("start computing with arguments:" + String.join(",", args)); String type = args[0].toLowerCase(Locale.CHINESE); switch (type) { case "master": launchMaster(); return; case "slave": if (args.length == 4) { ConfigWrapper.get().getSlave().setPort(Integer.parseInt(args[1])); ConfigWrapper.get().getSlave().setSlot(Integer.parseInt(args[2])); ConfigWrapper.get().getSlave().setSlotCount(Integer.parseInt(args[3])); } launchSlave(); return; case "supervisor": launchSupervisor(); return; case "console": launchConsole(); return; } } logger.error("arguments error,example [master | supervisor | slave | console]"); System.out.println("press any keys to exit..."); new BufferedReader(new InputStreamReader(System.in)).readLine(); }
From source file:alfio.config.SpringBootLauncher.java
/** * Entry point for spring boot//from w ww . ja v a2 s .c o m * @param args original arguments */ public static void main(String[] args) { Thread.setDefaultUncaughtExceptionHandler(new DefaultExceptionHandler()); String profiles = System.getProperty("spring.profiles.active", ""); SpringApplication application = new SpringApplication(SpringBootInitializer.class, RepositoryConfiguration.class, DataSourceConfiguration.class, WebSecurityConfig.class, MvcConfiguration.class); List<String> additionalProfiles = new ArrayList<>(); additionalProfiles.add(Initializer.PROFILE_SPRING_BOOT); if ("true".equals(System.getenv("ALFIO_LOG_STDOUT_ONLY"))) { // -> will load application-stdout.properties on top to override the logger configuration additionalProfiles.add("stdout"); } if ("true".equals(System.getenv("ALFIO_DEMO_ENABLED"))) { additionalProfiles.add(Initializer.PROFILE_DEMO); } if ("true".equals(System.getenv("ALFIO_JDBC_SESSION_ENABLED"))) { additionalProfiles.add(Initializer.PROFILE_JDBC_SESSION); } application.setAdditionalProfiles(additionalProfiles.toArray(new String[additionalProfiles.size()])); ConfigurableApplicationContext applicationContext = application.run(args); ConfigurableEnvironment environment = applicationContext.getEnvironment(); log.info("profiles: requested {}, active {}", profiles, String.join(", ", (CharSequence[]) environment.getActiveProfiles())); if ("true".equals(System.getProperty("startDBManager"))) { launchHsqlGUI(); } }
From source file:com.datastax.sparql.ConsoleCompiler.java
public static void main(final String[] args) throws IOException { //args = "/examples/modern1.sparql"; final Options options = new Options(); options.addOption("f", "file", true, "a file that contains a SPARQL query"); options.addOption("g", "graph", true, "the graph that's used to execute the query [classic|modern|crew|kryo file]"); // TODO: add an OLAP option (perhaps: "--olap spark"?) final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine; try {//ww w. ja v a 2 s . c om commandLine = parser.parse(options, args); } catch (ParseException e) { System.out.println(e.getMessage()); printHelp(1); return; } final InputStream inputStream = commandLine.hasOption("file") ? new FileInputStream(commandLine.getOptionValue("file")) : System.in; final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); final StringBuilder queryBuilder = new StringBuilder(); if (!reader.ready()) { printHelp(1); } String line; while (null != (line = reader.readLine())) { queryBuilder.append(System.lineSeparator()).append(line); } final String queryString = queryBuilder.toString(); final Graph graph; if (commandLine.hasOption("graph")) { switch (commandLine.getOptionValue("graph").toLowerCase()) { case "classic": graph = TinkerFactory.createClassic(); break; case "modern": graph = TinkerFactory.createModern(); System.out.println("Modern Graph Created"); break; case "crew": graph = TinkerFactory.createTheCrew(); break; default: graph = TinkerGraph.open(); System.out.println("Graph Created"); long startTime = System.nanoTime(); graph.io(IoCore.gryo()).readGraph(commandLine.getOptionValue("graph")); long endTime = System.nanoTime(); System.out.println("Time taken to load graph from kyro file: " + (endTime - startTime) / 1000000 + " mili seconds"); break; } } else { graph = TinkerFactory.createModern(); } final Traversal<Vertex, ?> traversal = SparqlToGremlinCompiler.convertToGremlinTraversal(graph, queryString); printWithHeadline("SPARQL Query", queryString); printWithHeadline("Traversal (prior execution)", traversal); Bytecode traversalByteCode = traversal.asAdmin().getBytecode(); //JavaTranslator.of(graph.traversal()).translate(traversalByteCode); System.out.println("the Byte Code : " + traversalByteCode.toString()); printWithHeadline("Result", String.join(System.lineSeparator(), JavaTranslator.of(graph.traversal()) .translate(traversalByteCode).toStream().map(Object::toString).collect(Collectors.toList()))); printWithHeadline("Traversal (after execution)", traversal); }
From source file:com.twentyn.chemicalClassifier.Runner.java
public static void main(String[] args) throws Exception { BufferedReader reader = new BufferedReader(new FileReader(args[0])); BufferedWriter writer = new BufferedWriter(new FileWriter(args[1])); try {/*from ww w. j av a 2 s. c om*/ Oscar oscar = new Oscar(); String line = null; /* NOTE: this is exactly the wrong way to write a TSV reader. Caveat emptor. * See http://tburette.github.io/blog/2014/05/25/so-you-want-to-write-your-own-CSV-code/ * and then use org.apache.commons.csv.CSVParser instead. */ while ((line = reader.readLine()) != null) { // TSV means split on tabs! Nothing else will do. List<String> fields = Arrays.asList(line.split("\t")); // Choke if our invariants aren't satisfied. We expect ever line to have a name and an InChI. if (fields.size() != 2) { throw new RuntimeException( String.format("Found malformed line (all lines must have two fields: %s", line)); } String name = fields.get(1); List<ResolvedNamedEntity> entities = oscar.findAndResolveNamedEntities(name); System.out.println("**********"); System.out.println("Name: " + name); List<String> outputFields = new ArrayList<>(fields.size() + 1); outputFields.addAll(fields); if (entities.size() == 0) { System.out.println("No match"); outputFields.add("noMatch"); } else if (entities.size() == 1) { ResolvedNamedEntity entity = entities.get(0); NamedEntity ne = entity.getNamedEntity(); if (ne.getStart() != 0 || ne.getEnd() != name.length()) { System.out.println("Partial match"); printEntity(entity); outputFields.add("partialMatch"); } else { System.out.println("Exact match"); printEntity(entity); outputFields.add("exactMatch"); List<ChemicalStructure> structures = entity.getChemicalStructures(FormatType.STD_INCHI); for (ChemicalStructure s : structures) { outputFields.add(s.getValue()); } } } else { // Multiple matches found! System.out.println("Multiple matches"); for (ResolvedNamedEntity e : entities) { printEntity(e); } outputFields.add("multipleMatches"); } writer.write(String.join("\t", outputFields)); writer.newLine(); } } finally { writer.flush(); writer.close(); } }
From source file:edu.cmu.lti.oaqa.knn4qa.apps.ExtractDataAndQueryAsSparseVectors.java
public static void main(String[] args) { String optKeys[] = { CommonParams.MAX_NUM_QUERY_PARAM, MAX_NUM_DATA_PARAM, CommonParams.MEMINDEX_PARAM, IN_QUERIES_PARAM, OUT_QUERIES_PARAM, OUT_DATA_PARAM, TEXT_FIELD_PARAM, TEST_QTY_PARAM, }; String optDescs[] = { CommonParams.MAX_NUM_QUERY_DESC, MAX_NUM_DATA_DESC, CommonParams.MEMINDEX_DESC, IN_QUERIES_DESC, OUT_QUERIES_DESC, OUT_DATA_DESC, TEXT_FIELD_DESC, TEST_QTY_DESC }; boolean hasArg[] = { true, true, true, true, true, true, true, true }; ParamHelper prmHlp = null;//from w ww. ja va 2 s . c om try { prmHlp = new ParamHelper(args, optKeys, optDescs, hasArg); CommandLine cmd = prmHlp.getCommandLine(); Options opt = prmHlp.getOptions(); int maxNumQuery = Integer.MAX_VALUE; String tmpn = cmd.getOptionValue(CommonParams.MAX_NUM_QUERY_PARAM); if (tmpn != null) { try { maxNumQuery = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(CommonParams.MAX_NUM_QUERY_PARAM, opt); } } int maxNumData = Integer.MAX_VALUE; tmpn = cmd.getOptionValue(MAX_NUM_DATA_PARAM); if (tmpn != null) { try { maxNumData = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(MAX_NUM_DATA_PARAM, opt); } } String memIndexPref = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM); if (null == memIndexPref) { UsageSpecify(CommonParams.MEMINDEX_PARAM, opt); } String textField = cmd.getOptionValue(TEXT_FIELD_PARAM); if (null == textField) { UsageSpecify(TEXT_FIELD_PARAM, opt); } textField = textField.toLowerCase(); int fieldId = -1; for (int i = 0; i < FeatureExtractor.mFieldNames.length; ++i) if (FeatureExtractor.mFieldNames[i].compareToIgnoreCase(textField) == 0) { fieldId = i; break; } if (-1 == fieldId) { Usage("Wrong field index, should be one of the following: " + String.join(",", FeatureExtractor.mFieldNames), opt); } InMemForwardIndex indx = new InMemForwardIndex( FeatureExtractor.indexFileName(memIndexPref, FeatureExtractor.mFieldNames[fieldId])); BM25SimilarityLucene bm25simil = new BM25SimilarityLucene(FeatureExtractor.BM25_K1, FeatureExtractor.BM25_B, indx); String inQueryFile = cmd.getOptionValue(IN_QUERIES_PARAM); String outQueryFile = cmd.getOptionValue(OUT_QUERIES_PARAM); if ((inQueryFile == null) != (outQueryFile == null)) { Usage("You should either specify both " + IN_QUERIES_PARAM + " and " + OUT_QUERIES_PARAM + " or none of them", opt); } String outDataFile = cmd.getOptionValue(OUT_DATA_PARAM); tmpn = cmd.getOptionValue(TEST_QTY_PARAM); int testQty = 0; if (tmpn != null) { try { testQty = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(TEST_QTY_PARAM, opt); } } ArrayList<DocEntry> testDocEntries = new ArrayList<DocEntry>(); ArrayList<DocEntry> testQueryEntries = new ArrayList<DocEntry>(); ArrayList<TrulySparseVector> testDocVectors = new ArrayList<TrulySparseVector>(); ArrayList<TrulySparseVector> testQueryVectors = new ArrayList<TrulySparseVector>(); if (outDataFile != null) { BufferedWriter out = new BufferedWriter( new OutputStreamWriter(CompressUtils.createOutputStream(outDataFile))); ArrayList<DocEntryExt> docEntries = indx.getDocEntries(); for (int id = 0; id < Math.min(maxNumData, docEntries.size()); ++id) { DocEntry e = docEntries.get(id).mDocEntry; TrulySparseVector v = bm25simil.getDocSparseVector(e, false); if (id < testQty) { testDocEntries.add(e); testDocVectors.add(v); } outputVector(out, v); } out.close(); } Splitter splitOnSpace = Splitter.on(' ').trimResults().omitEmptyStrings(); if (outQueryFile != null) { BufferedReader inpText = new BufferedReader( new InputStreamReader(CompressUtils.createInputStream(inQueryFile))); BufferedWriter out = new BufferedWriter( new OutputStreamWriter(CompressUtils.createOutputStream(outQueryFile))); String queryText = XmlHelper.readNextXMLIndexEntry(inpText); for (int queryQty = 0; queryText != null && queryQty < maxNumQuery; queryText = XmlHelper .readNextXMLIndexEntry(inpText), queryQty++) { Map<String, String> queryFields = null; // 1. Parse a query try { queryFields = XmlHelper.parseXMLIndexEntry(queryText); } catch (Exception e) { System.err.println("Parsing error, offending QUERY:\n" + queryText); throw new Exception("Parsing error."); } String fieldText = queryFields.get(FeatureExtractor.mFieldsSOLR[fieldId]); if (fieldText == null) { fieldText = ""; } ArrayList<String> tmpa = new ArrayList<String>(); for (String s : splitOnSpace.split(fieldText)) tmpa.add(s); DocEntry e = indx.createDocEntry(tmpa.toArray(new String[tmpa.size()])); TrulySparseVector v = bm25simil.getDocSparseVector(e, true); if (queryQty < testQty) { testQueryEntries.add(e); testQueryVectors.add(v); } outputVector(out, v); } out.close(); } int testedQty = 0, diffQty = 0; // Now let's do some testing for (int iq = 0; iq < testQueryEntries.size(); ++iq) { DocEntry queryEntry = testQueryEntries.get(iq); TrulySparseVector queryVector = testQueryVectors.get(iq); for (int id = 0; id < testDocEntries.size(); ++id) { DocEntry docEntry = testDocEntries.get(id); TrulySparseVector docVector = testDocVectors.get(id); float val1 = bm25simil.compute(queryEntry, docEntry); float val2 = TrulySparseVector.scalarProduct(queryVector, docVector); ++testedQty; if (Math.abs(val1 - val2) > 1e5) { System.err.println( String.format("Potential mismatch BM25=%f <-> scalar product=%f", val1, val2)); ++diffQty; } } } if (testedQty > 0) System.out.println(String.format("Tested %d Mismatched %d", testedQty, diffQty)); } catch (ParseException e) { Usage("Cannot parse arguments: " + e, prmHlp != null ? prmHlp.getOptions() : null); e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:de.unisb.cs.st.javalanche.rhino.coverage.CoberturaParser.java
private static void summarizePriorization(FailureMatrix fm, List<PriorizationResult> prioritizedAdditional, String prioritizationType) { Collections.reverse(prioritizedAdditional); List<String> testList = new ArrayList<String>(); int totalFailures = fm.getNumberOfFailures(); int count = 0; logger.info("Result for: " + prioritizationType); StringBuilder sb = new StringBuilder(); for (PriorizationResult prioritizationResult : prioritizedAdditional) { count++;/*from w w w .j ava 2 s. co m*/ testList.add(prioritizationResult.getTestName()); int detectedFailures = fm.getDetectedFailures(testList); System.out.println( count + " " + prioritizationResult.getTestName() + " (" + prioritizationResult.getInfo() + ") - " + detectedFailures + " out of " + totalFailures + " failures"); String join = Join.join(",", new Object[] { count, prioritizationResult.getTestName(), detectedFailures, totalFailures, "\"" + prioritizationResult.getInfo() + "\"" }); sb.append(join).append('\n'); } Io.writeFile(sb.toString(), new File(prioritizationType + ".csv")); }
From source file:com.act.lcms.db.io.ExportStandardIonResultsFromDB.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/* ww w .j av a2s . c o m*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } try (DB db = DB.openDBFromCLI(cl)) { List<String> chemicalNames = new ArrayList<>(); if (cl.hasOption(OPTION_CONSTRUCT)) { // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT)); for (Pair<ChemicalAssociatedWithPathway, Double> pair : productMasses) { chemicalNames.add(pair.getLeft().getChemical()); } } if (cl.hasOption(OPTION_CHEMICALS)) { chemicalNames.addAll(Arrays.asList(cl.getOptionValues(OPTION_CHEMICALS))); } if (chemicalNames.size() == 0) { System.err.format("No chemicals can be found from the input query.\n"); System.exit(-1); } List<String> standardIonHeaderFields = new ArrayList<String>() { { add(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name()); add(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name()); add(STANDARD_ION_HEADER_FIELDS.MANUAL_PICK.name()); add(STANDARD_ION_HEADER_FIELDS.AUTHOR.name()); add(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name()); add(STANDARD_ION_HEADER_FIELDS.NOTE.name()); } }; String outAnalysis; if (cl.hasOption(OPTION_OUTPUT_PREFIX)) { outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + TSV_FORMAT; } else { outAnalysis = String.join("-", chemicalNames) + "." + TSV_FORMAT; } File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY)); if (!lcmsDir.isDirectory()) { System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR); TSVWriter<String, String> resultsWriter = new TSVWriter<>(standardIonHeaderFields); resultsWriter.open(new File(outAnalysis)); // For each chemical, create a TSV row and a corresponding diagnostic plot for (String chemicalName : chemicalNames) { List<String> graphLabels = new ArrayList<>(); List<Double> yMaxList = new ArrayList<>(); String outData = plottingDirectory + "/" + chemicalName + ".data"; String outImg = plottingDirectory + "/" + chemicalName + ".pdf"; // For each diagnostic plot, open a new file stream. try (FileOutputStream fos = new FileOutputStream(outData)) { List<StandardIonResult> getResultByChemicalName = StandardIonResult.getByChemicalName(db, chemicalName); if (getResultByChemicalName != null && getResultByChemicalName.size() > 0) { // PART 1: Get the best metlin ion across all standard ion results for a given chemical String bestGlobalMetlinIon = AnalysisHelper .scoreAndReturnBestMetlinIonFromStandardIonResults(getResultByChemicalName, new HashMap<>(), true, true); // PART 2: Plot all the graphs related to the chemical. The plots are structured as follows: // // Page 1: All graphs (water, MeOH, Yeast) for Global ion picked (best ion among ALL standard ion runs for // the given chemical) by the algorithm // Page 2: All graphs for M+H // Page 3: All graphs for Local ions picked (best ion within a SINGLE standard ion run) + negative controls // for Yeast. // // Each page is demarcated by a blank graph. // Arrange results based on media Map<String, List<StandardIonResult>> categories = StandardIonResult .categorizeListOfStandardWellsByMedia(db, getResultByChemicalName); // This set contains all the best metlin ions corresponding to all the standard ion runs. Set<String> bestLocalIons = new HashSet<>(); bestLocalIons.add(bestGlobalMetlinIon); bestLocalIons.add(DEFAULT_ION); for (StandardIonResult result : getResultByChemicalName) { bestLocalIons.add(result.getBestMetlinIon()); } // We sort the best local ions are follows: // 1) Global best ion spectra 2) M+H spectra 3) Local best ion spectra List<String> bestLocalIonsArray = new ArrayList<>(bestLocalIons); Collections.sort(bestLocalIonsArray, new Comparator<String>() { @Override public int compare(String o1, String o2) { if (o1.equals(bestGlobalMetlinIon) && !o2.equals(bestGlobalMetlinIon)) { return -1; } else if (o1.equals(DEFAULT_ION) && !o2.equals(bestGlobalMetlinIon)) { return -1; } else { return 1; } } }); // This variable stores the index of the array at which all the remaining spectra are contained in one // page. This happens right after the M+H ion spectra. Integer combineAllSpectraIntoPageThreeFromIndex = 0; for (int i = 0; i < bestLocalIonsArray.size(); i++) { if (bestLocalIonsArray.get(i).equals(DEFAULT_ION)) { combineAllSpectraIntoPageThreeFromIndex = i + 1; } } for (int i = 0; i < bestLocalIonsArray.size(); i++) { String ion = bestLocalIonsArray.get(i); for (Map.Entry<String, List<StandardIonResult>> mediaToListOfIonResults : categories .entrySet()) { for (StandardIonResult result : mediaToListOfIonResults.getValue()) { // For every standard ion result, we plot the best global metlin ion and M+H. These plots are in the // pages 1 and 2. For all page 3 (aka miscellaneous spectra), we only plot the best local ion // corresponding to it's spectra and not some other graph's spectra. In the below condition, // we reach the page 3 case with not the same best ion as the spectra, in which case we just continue // and not draw anything on the page. if (i >= combineAllSpectraIntoPageThreeFromIndex && !(result.getBestMetlinIon().equals(ion))) { continue; } StandardWell positiveWell = StandardWell.getInstance().getById(db, result.getStandardWellId()); String positiveControlChemical = positiveWell.getChemical(); ScanData<StandardWell> encapsulatedDataForPositiveControl = AnalysisHelper .getScanDataForWell(db, lcmsDir, positiveWell, positiveControlChemical, positiveControlChemical); Set<String> singletonSet = Collections.singleton(ion); String additionalInfo = generateAdditionalLabelInformation(positiveWell, result, ion); List<String> labels = AnalysisHelper .writeScanData(fos, lcmsDir, MAX_INTENSITY, encapsulatedDataForPositiveControl, false, false, singletonSet) .stream().map(label -> label + additionalInfo) .collect(Collectors.toList()); yMaxList.add(encapsulatedDataForPositiveControl.getMs1ScanResults() .getMaxIntensityForIon(ion)); List<String> negativeLabels = null; // Only do the negative control in the miscellaneous page (page 3) and if the well is in yeast media. if (mediaToListOfIonResults.getKey() .equals(StandardWell.MEDIA_TYPE.YEAST.name()) && (i >= combineAllSpectraIntoPageThreeFromIndex && (result.getBestMetlinIon().equals(ion)))) { //TODO: Change the representative negative well to one that displays the highest noise in the future. // For now, we just use the first index among the negative wells. int representativeIndex = 0; StandardWell representativeNegativeControlWell = StandardWell.getInstance() .getById(db, result.getNegativeWellIds().get(representativeIndex)); ScanData encapsulatedDataForNegativeControl = AnalysisHelper .getScanDataForWell(db, lcmsDir, representativeNegativeControlWell, positiveWell.getChemical(), representativeNegativeControlWell.getChemical()); String negativePlateAdditionalInfo = generateAdditionalLabelInformation( representativeNegativeControlWell, null, null); negativeLabels = AnalysisHelper.writeScanData(fos, lcmsDir, MAX_INTENSITY, encapsulatedDataForNegativeControl, false, false, singletonSet) .stream().map(label -> label + negativePlateAdditionalInfo) .collect(Collectors.toList()); yMaxList.add(encapsulatedDataForNegativeControl.getMs1ScanResults() .getMaxIntensityForIon(ion)); } graphLabels.addAll(labels); if (negativeLabels != null) { graphLabels.addAll(negativeLabels); } } } // Add a blank graph to demarcate pages. if (i < combineAllSpectraIntoPageThreeFromIndex) { graphLabels.addAll(AnalysisHelper.writeScanData(fos, lcmsDir, 0.0, BLANK_SCAN, false, false, new HashSet<>())); yMaxList.add(0.0d); } } // We need to pass the yMax values as an array to the Gnuplotter. Double fontScale = null; if (cl.hasOption(FONT_SCALE)) { try { fontScale = Double.parseDouble(cl.getOptionValue(FONT_SCALE)); } catch (IllegalArgumentException e) { System.err.format("Argument for font-scale must be a floating point number.\n"); System.exit(1); } } Double[] yMaxes = yMaxList.toArray(new Double[yMaxList.size()]); Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale); plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time", null, "intensity", "pdf", null, null, yMaxes, outImg + ".gnuplot"); Map<String, String> row = new HashMap<>(); row.put(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name(), chemicalName); row.put(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name(), bestGlobalMetlinIon); row.put(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name(), outImg); resultsWriter.append(row); resultsWriter.flush(); } } } resultsWriter.flush(); resultsWriter.close(); } }
From source file:it.infn.mw.iam.util.ssh.RSAPublicKeyUtils.java
public static String getFormattedMD5Fingerprint(String key) { return String.join(":", buildMD5Fingerprint(key).split("(?<=\\G..)")); }
From source file:com.yahoo.gondola.container.RegistryClients.java
private static String getZookeeperConnectionString(Config config) { return String.join(",", config.getList("registry_zookeeper.servers")); }
From source file:com.haulmont.cuba.web.gui.FileUploadTypesHelper.java
public static String convertToMIME(String[] types, String separator) { for (int i = 0; i < types.length; i++) { types[i] = FileTypesHelper.getMIMEType(types[i]); }//from w w w . ja va2s. c om return String.join(separator, types); }