List of usage examples for java.io FileWriter FileWriter
public FileWriter(FileDescriptor fd)
From source file:FileCompressor.java
public static void main(String[] args) throws IOException { String file = "D:\\XJad.rar.txt"; BufferedReader reader = new BufferedReader(new FileReader(file)); BufferedWriter writer = new BufferedWriter(new FileWriter(file + "_out.txt")); StringBuilder content = new StringBuilder(); String tmp;//from w ww. j a va2s .com while ((tmp = reader.readLine()) != null) { content.append(tmp); content.append(System.getProperty("line.separator")); } FileCompressor f = new FileCompressor(); writer.write(f.compress(content.toString())); writer.close(); reader.close(); reader = new BufferedReader(new FileReader(file + "_out.txt")); StringBuilder content2 = new StringBuilder(); while ((tmp = reader.readLine()) != null) { content2.append(tmp); content2.append(System.getProperty("line.separator")); } String decompressed = f.decompress(content2.toString()); String c = content.toString(); System.out.println(decompressed.equals(c)); }
From source file:com.twentyn.patentScorer.ScoreMerger.java
public static void main(String[] args) throws Exception { System.out.println("Starting up..."); System.out.flush();/*from w w w . j a v a2s. com*/ Options opts = new Options(); opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build()); opts.addOption(Option.builder("r").longOpt("results").required().hasArg() .desc("A directory of search results to read").build()); opts.addOption(Option.builder("s").longOpt("scores").required().hasArg() .desc("A directory of patent classification scores to read").build()); opts.addOption(Option.builder("o").longOpt("output").required().hasArg() .desc("The output file where results will be written.").build()); HelpFormatter helpFormatter = new HelpFormatter(); CommandLineParser cmdLineParser = new DefaultParser(); CommandLine cmdLine = null; try { cmdLine = cmdLineParser.parse(opts, args); } catch (ParseException e) { System.out.println("Caught exception when parsing command line: " + e.getMessage()); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("help")) { helpFormatter.printHelp("DocumentIndexer", opts); System.exit(0); } File scoresDirectory = new File(cmdLine.getOptionValue("scores")); if (cmdLine.getOptionValue("scores") == null || !scoresDirectory.isDirectory()) { LOGGER.error("Not a directory of score files: " + cmdLine.getOptionValue("scores")); } File resultsDirectory = new File(cmdLine.getOptionValue("results")); if (cmdLine.getOptionValue("results") == null || !resultsDirectory.isDirectory()) { LOGGER.error("Not a directory of results files: " + cmdLine.getOptionValue("results")); } FileWriter outputWriter = new FileWriter(cmdLine.getOptionValue("output")); ObjectMapper objectMapper = new ObjectMapper(); objectMapper.enable(SerializationFeature.INDENT_OUTPUT); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); FilenameFilter jsonFilter = new FilenameFilter() { public final Pattern JSON_PATTERN = Pattern.compile("\\.json$"); public boolean accept(File dir, String name) { return JSON_PATTERN.matcher(name).find(); } }; Map<String, PatentScorer.ClassificationResult> scores = new HashMap<>(); LOGGER.info("Reading scores from directory at " + scoresDirectory.getAbsolutePath()); for (File scoreFile : scoresDirectory.listFiles(jsonFilter)) { BufferedReader reader = new BufferedReader(new FileReader(scoreFile)); int count = 0; String line; while ((line = reader.readLine()) != null) { PatentScorer.ClassificationResult res = objectMapper.readValue(line, PatentScorer.ClassificationResult.class); scores.put(res.docId, res); count++; } LOGGER.info("Read " + count + " scores from " + scoreFile.getAbsolutePath()); } Map<String, List<DocumentSearch.SearchResult>> synonymsToResults = new HashMap<>(); Map<String, List<DocumentSearch.SearchResult>> inchisToResults = new HashMap<>(); LOGGER.info("Reading results from directory at " + resultsDirectory); // With help from http://stackoverflow.com/questions/6846244/jackson-and-generic-type-reference. JavaType resultsType = objectMapper.getTypeFactory().constructCollectionType(List.class, DocumentSearch.SearchResult.class); List<File> resultsFiles = Arrays.asList(resultsDirectory.listFiles(jsonFilter)); Collections.sort(resultsFiles, new Comparator<File>() { @Override public int compare(File o1, File o2) { return o1.getName().compareTo(o2.getName()); } }); for (File resultsFile : resultsFiles) { BufferedReader reader = new BufferedReader(new FileReader(resultsFile)); CharBuffer buffer = CharBuffer.allocate(Long.valueOf(resultsFile.length()).intValue()); int bytesRead = reader.read(buffer); LOGGER.info("Read " + bytesRead + " bytes from " + resultsFile.getName() + " (length is " + resultsFile.length() + ")"); List<DocumentSearch.SearchResult> results = objectMapper.readValue(new CharArrayReader(buffer.array()), resultsType); LOGGER.info("Read " + results.size() + " results from " + resultsFile.getAbsolutePath()); int count = 0; for (DocumentSearch.SearchResult sres : results) { for (DocumentSearch.ResultDocument resDoc : sres.getResults()) { String docId = resDoc.getDocId(); PatentScorer.ClassificationResult classificationResult = scores.get(docId); if (classificationResult == null) { LOGGER.warn("No classification result found for " + docId); } else { resDoc.setClassifierScore(classificationResult.getScore()); } } if (!synonymsToResults.containsKey(sres.getSynonym())) { synonymsToResults.put(sres.getSynonym(), new ArrayList<DocumentSearch.SearchResult>()); } synonymsToResults.get(sres.getSynonym()).add(sres); count++; if (count % 1000 == 0) { LOGGER.info("Processed " + count + " search result documents"); } } } Comparator<DocumentSearch.ResultDocument> resultDocumentComparator = new Comparator<DocumentSearch.ResultDocument>() { @Override public int compare(DocumentSearch.ResultDocument o1, DocumentSearch.ResultDocument o2) { int cmp = o2.getClassifierScore().compareTo(o1.getClassifierScore()); if (cmp != 0) { return cmp; } cmp = o2.getScore().compareTo(o1.getScore()); return cmp; } }; for (Map.Entry<String, List<DocumentSearch.SearchResult>> entry : synonymsToResults.entrySet()) { DocumentSearch.SearchResult newSearchRes = null; // Merge all result documents into a single search result. for (DocumentSearch.SearchResult sr : entry.getValue()) { if (newSearchRes == null) { newSearchRes = sr; } else { newSearchRes.getResults().addAll(sr.getResults()); } } if (newSearchRes == null || newSearchRes.getResults() == null) { LOGGER.error("Search results for " + entry.getKey() + " are null."); continue; } Collections.sort(newSearchRes.getResults(), resultDocumentComparator); if (!inchisToResults.containsKey(newSearchRes.getInchi())) { inchisToResults.put(newSearchRes.getInchi(), new ArrayList<DocumentSearch.SearchResult>()); } inchisToResults.get(newSearchRes.getInchi()).add(newSearchRes); } List<String> sortedKeys = new ArrayList<String>(inchisToResults.keySet()); Collections.sort(sortedKeys); List<GroupedInchiResults> orderedResults = new ArrayList<>(sortedKeys.size()); Comparator<DocumentSearch.SearchResult> synonymSorter = new Comparator<DocumentSearch.SearchResult>() { @Override public int compare(DocumentSearch.SearchResult o1, DocumentSearch.SearchResult o2) { return o1.getSynonym().compareTo(o2.getSynonym()); } }; for (String inchi : sortedKeys) { List<DocumentSearch.SearchResult> res = inchisToResults.get(inchi); Collections.sort(res, synonymSorter); orderedResults.add(new GroupedInchiResults(inchi, res)); } objectMapper.writerWithView(Object.class).writeValue(outputWriter, orderedResults); outputWriter.close(); }
From source file:com.linkedin.pinotdruidbenchmark.DruidResponseTime.java
public static void main(String[] args) throws Exception { if (args.length != 4 && args.length != 5) { System.err.println(//from w ww .j a v a2s .co m "4 or 5 arguments required: QUERY_DIR, RESOURCE_URL, WARM_UP_ROUNDS, TEST_ROUNDS, RESULT_DIR (optional)."); return; } File queryDir = new File(args[0]); String resourceUrl = args[1]; int warmUpRounds = Integer.parseInt(args[2]); int testRounds = Integer.parseInt(args[3]); File resultDir; if (args.length == 4) { resultDir = null; } else { resultDir = new File(args[4]); if (!resultDir.exists()) { if (!resultDir.mkdirs()) { throw new RuntimeException("Failed to create result directory: " + resultDir); } } } File[] queryFiles = queryDir.listFiles(); assert queryFiles != null; Arrays.sort(queryFiles); try (CloseableHttpClient httpClient = HttpClients.createDefault()) { HttpPost httpPost = new HttpPost(resourceUrl); httpPost.addHeader("content-type", "application/json"); for (File queryFile : queryFiles) { StringBuilder stringBuilder = new StringBuilder(); try (BufferedReader bufferedReader = new BufferedReader(new FileReader(queryFile))) { int length; while ((length = bufferedReader.read(CHAR_BUFFER)) > 0) { stringBuilder.append(new String(CHAR_BUFFER, 0, length)); } } String query = stringBuilder.toString(); httpPost.setEntity(new StringEntity(query)); System.out.println( "--------------------------------------------------------------------------------"); System.out.println("Running query: " + query); System.out.println( "--------------------------------------------------------------------------------"); // Warm-up Rounds System.out.println("Run " + warmUpRounds + " times to warm up..."); for (int i = 0; i < warmUpRounds; i++) { CloseableHttpResponse httpResponse = httpClient.execute(httpPost); httpResponse.close(); System.out.print('*'); } System.out.println(); // Test Rounds System.out.println("Run " + testRounds + " times to get response time statistics..."); long[] responseTimes = new long[testRounds]; long totalResponseTime = 0L; for (int i = 0; i < testRounds; i++) { long startTime = System.currentTimeMillis(); CloseableHttpResponse httpResponse = httpClient.execute(httpPost); httpResponse.close(); long responseTime = System.currentTimeMillis() - startTime; responseTimes[i] = responseTime; totalResponseTime += responseTime; System.out.print(responseTime + "ms "); } System.out.println(); // Store result. if (resultDir != null) { File resultFile = new File(resultDir, queryFile.getName() + ".result"); CloseableHttpResponse httpResponse = httpClient.execute(httpPost); try (BufferedInputStream bufferedInputStream = new BufferedInputStream( httpResponse.getEntity().getContent()); BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(resultFile))) { int length; while ((length = bufferedInputStream.read(BYTE_BUFFER)) > 0) { bufferedWriter.write(new String(BYTE_BUFFER, 0, length)); } } httpResponse.close(); } // Process response times. double averageResponseTime = (double) totalResponseTime / testRounds; double temp = 0; for (long responseTime : responseTimes) { temp += (responseTime - averageResponseTime) * (responseTime - averageResponseTime); } double standardDeviation = Math.sqrt(temp / testRounds); System.out.println("Average response time: " + averageResponseTime + "ms"); System.out.println("Standard deviation: " + standardDeviation); } } }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out// ww w.j av a2s . c om .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName())); System.exit(1); } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; GraphMLWriter graphWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); graphWriter = new GraphMLWriter(fileWriter); // Customize the rendering of nodes final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); nodeStyle.setHeight(50.0f); graphWriter.setNodeStyle(nodeStyle); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the artifacts per depth inside the dependency // graph (the topmost dependency is the first one in the stack) final Stack<Artifact> stack = new Stack<Artifact>(); final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>(); // List of parent/child relationships between artifacts final List<Relationship> relationships = new ArrayList<Relationship>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary artifacts while (depth <= stack.size()) { stack.pop(); } // Create an artifact from the dependency (group, artifact, // version) tuple final Artifact artifact = createArtifact(line); stack.push(artifact); if (stack.size() > 1) { // Store the artifact and its parent relationships.add(new Relationship(stack.get(stack.size() - 2), artifact)); } if (!artifactsByGroup.containsKey(artifact.group)) { artifactsByGroup.put(artifact.group, new HashSet<Artifact>()); } artifactsByGroup.get(artifact.group).add(artifact); } // Open the graph graphWriter.graph(); final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>(); // Loop over the groups and generate the associated nodes for (String group : artifactsByGroup.keySet()) { graphWriter.group(group, true); for (Artifact artifact : artifactsByGroup.get(group)) { final String nodeId = graphWriter.node(artifact.getLabel()); nodeIdsByArtifact.put(artifact, nodeId); } graphWriter.closeGroup(); } // Generate the edges for (Relationship relationship : relationships) { final String parentId = nodeIdsByArtifact.get(relationship.parent); final String childId = nodeIdsByArtifact.get(relationship.child); graphWriter.edge(parentId, childId); } // Close the graph graphWriter.closeGraph(); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources graphWriter.close(); fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:com.twentyn.patentScorer.PatentScorer.java
public static void main(String[] args) throws Exception { System.out.println("Starting up..."); System.out.flush();/* ww w . j av a 2s. c om*/ Options opts = new Options(); opts.addOption(Option.builder("i").longOpt("input").hasArg().required() .desc("Input file or directory to score").build()); opts.addOption(Option.builder("o").longOpt("output").hasArg().required() .desc("Output file to which to write score JSON").build()); opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build()); opts.addOption(Option.builder("v").longOpt("verbose").desc("Print verbose log output").build()); HelpFormatter helpFormatter = new HelpFormatter(); CommandLineParser cmdLineParser = new DefaultParser(); CommandLine cmdLine = null; try { cmdLine = cmdLineParser.parse(opts, args); } catch (ParseException e) { System.out.println("Caught exception when parsing command line: " + e.getMessage()); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("help")) { helpFormatter.printHelp("DocumentIndexer", opts); System.exit(0); } if (cmdLine.hasOption("verbose")) { // With help from http://stackoverflow.com/questions/23434252/programmatically-change-log-level-in-log4j2 LoggerContext ctx = (LoggerContext) LogManager.getContext(false); Configuration ctxConfig = ctx.getConfiguration(); LoggerConfig logConfig = ctxConfig.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); logConfig.setLevel(Level.DEBUG); ctx.updateLoggers(); LOGGER.debug("Verbose logging enabled"); } String inputFileOrDir = cmdLine.getOptionValue("input"); File splitFileOrDir = new File(inputFileOrDir); if (!(splitFileOrDir.exists())) { LOGGER.error("Unable to find directory at " + inputFileOrDir); System.exit(1); } try (FileWriter writer = new FileWriter(cmdLine.getOptionValue("output"))) { PatentScorer scorer = new PatentScorer(PatentModel.getModel(), writer); PatentCorpusReader corpusReader = new PatentCorpusReader(scorer, splitFileOrDir); corpusReader.readPatentCorpus(); } }
From source file:be.ugent.maf.cellmissy.gui.controller.MSDGenerator.java
public static void main(String[] args) { // get the application context ApplicationContext context = ApplicationContextProvider.getInstance().getApplicationContext(); // get the services we need ExperimentService experimentService = (ExperimentService) context.getBean("experimentService"); ProjectService projectService = (ProjectService) context.getBean("projectService"); WellService wellService = (WellService) context.getBean("wellService"); SingleCellConditionPreProcessor singleCellConditionPreProcessor = (SingleCellConditionPreProcessor) context .getBean("singleCellConditionPreProcessor"); SingleCellConditionOperator singleCellConditionOperator = (SingleCellConditionOperator) context .getBean("singleCellConditionOperator"); // get all the experiments from DB Project project = projectService.findById(4L); List<Experiment> experiments = experimentService.findExperimentsByProjectId(project.getProjectid()); // root folder File folder = new File("C:\\Users\\Paola\\Desktop\\benchmark\\cellmissy"); for (Experiment experiment : experiments) { if (experiment.getExperimentNumber() == 1) { List<List<TrackDataHolder>> biologicalConditions = new ArrayList<>(); double instrumentConversionFactor = experiment.getInstrument().getConversionFactor(); double magnificationValue = experiment.getMagnification().getMagnificationValue(); double conversionFactor = instrumentConversionFactor * magnificationValue / 10; // fetch the migration data System.out/*from w w w . ja va2 s .c om*/ .println("fetching data for project: " + project + ", experiment: " + experiment + " ..."); for (PlateCondition plateCondition : experiment.getPlateConditionList()) { List<Well> wells = new ArrayList<>(); for (Well well : plateCondition.getWellList()) { Well fetchedWell = wellService.fetchMigrationData(well.getWellid()); wells.add(fetchedWell); } plateCondition.setWellList(wells); } for (PlateCondition plateCondition : experiment.getPlateConditionList()) { // create a new object to hold pre-processing results SingleCellConditionDataHolder singleCellConditionDataHolder = new SingleCellConditionDataHolder( plateCondition); System.out.println("****************computations started for condition: " + plateCondition); // do the computations singleCellConditionPreProcessor.generateDataHolders(singleCellConditionDataHolder); singleCellConditionPreProcessor.generateDataStructure(singleCellConditionDataHolder); singleCellConditionPreProcessor.preProcessStepsAndCells(singleCellConditionDataHolder, conversionFactor, experiment.getExperimentInterval()); singleCellConditionPreProcessor .generateRawTrackCoordinatesMatrix(singleCellConditionDataHolder); singleCellConditionPreProcessor .generateShiftedTrackCoordinatesMatrix(singleCellConditionDataHolder); singleCellConditionOperator.operateOnStepsAndCells(singleCellConditionDataHolder); List<TrackDataHolder> trackDataHolders = singleCellConditionDataHolder.getTrackDataHolders(); biologicalConditions.add(trackDataHolders); } try (BufferedWriter bufferedWriter = new BufferedWriter( new FileWriter(new File(folder, "bench_msd.txt")))) { // header of the file bufferedWriter.append("traj_id" + " " + "t_lag" + " " + "msd"); bufferedWriter.newLine(); for (List<TrackDataHolder> conditionTracks : biologicalConditions) { for (TrackDataHolder trackDataHolder : conditionTracks) { StepCentricDataHolder stepCentricDataHolder = trackDataHolder .getStepCentricDataHolder(); double[][] msd = stepCentricDataHolder.getMSD(); for (int i = 0; i < msd.length; i++) { bufferedWriter.append("" + stepCentricDataHolder.getTrack().getTrackid()); bufferedWriter.append(" "); bufferedWriter.append("" + msd[i][0]); bufferedWriter.append(" "); bufferedWriter.append("" + msd[i][1]); bufferedWriter.newLine(); } } } System.out.println("txt file succ. created!"); } catch (IOException ex) { } } } }
From source file:SentiWordNetDemoCode.java
public static void main(String[] args) throws IOException { if (args.length < 2) { System.err.println("Usage: java SentiWordNetDemoCode <pathToSentiWordNetFile>"); return;//from w ww . j a v a 2s . c om } String pathToSWN = args[0]; SentiWordNetDemoCode sentiwordnet = new SentiWordNetDemoCode(pathToSWN); JSONObject js = new JSONObject(sentiwordnet.dictionary); FileWriter csv = null; try { csv = new FileWriter(args[1]); csv.write(js.toJSONString()); System.out.println("error"); } catch (Exception e) { e.printStackTrace(); } finally { if (csv != null) { csv.close(); } } // System.out.println("good#a "+sentiwordnet.extract("good", "a")); // System.out.println("bad#a "+sentiwordnet.extract("bad", "a")); // System.out.println("blue#a "+sentiwordnet.extract("blue", "a")); // System.out.println("blue#n "+sentiwordnet.extract("blue", "n")); }
From source file:com.act.lcms.db.io.report.IonAnalysisInterchangeModelOperations.java
public static void main(String[] args) throws IOException { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/*from w ww .jav a2s .c o m*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { LOGGER.error("Argument parsing failed: %s", e.getMessage()); HELP_FORMATTER.printHelp(IonAnalysisInterchangeModelOperations.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(IonAnalysisInterchangeModelOperations.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption(OPTION_LOG_DISTRIBUTION)) { IonAnalysisInterchangeModel model = new IonAnalysisInterchangeModel(); model.loadResultsFromFile(new File(cl.getOptionValue(OPTION_INPUT_FILE))); Map<Pair<Double, Double>, Integer> rangeToCount = model .computeLogFrequencyDistributionOfMoleculeCountToMetric( IonAnalysisInterchangeModel.METRIC.valueOf(cl.getOptionValue(OPTION_LOG_DISTRIBUTION))); try (BufferedWriter predictionWriter = new BufferedWriter( new FileWriter(new File(OPTION_OUTPUT_FILE)))) { for (Map.Entry<Pair<Double, Double>, Integer> entry : rangeToCount.entrySet()) { String value = String.format("%f,%d", entry.getKey().getLeft(), entry.getValue()); predictionWriter.write(value); predictionWriter.newLine(); } } } }
From source file:com.jaeksoft.searchlib.util.StringUtils.java
public static void main(String[] args) throws IOException { List<String> lines = FileUtils.readLines(new File(args[0])); FileWriter fw = new FileWriter(new File(args[1])); PrintWriter pw = new PrintWriter(fw); for (String line : lines) pw.println(StringEscapeUtils.unescapeHtml(line)); pw.close();// w w w . jav a 2 s.com fw.close(); }
From source file:com.act.lcms.db.io.ExportPlateCompositionFromDB.java
public static void main(String[] args) throws Exception { Options opts = new Options(); opts.addOption(Option.builder("b").argName("barcode").desc("The barcode of the plate to print").hasArg() .longOpt("barcode").build()); opts.addOption(Option.builder("n").argName("name").desc("The name of the plate to print").hasArg() .longOpt("name").build()); opts.addOption(Option.builder("o").argName("output file").desc( "An output file to which to write this plate's composition table (writes to stdout if omitted") .hasArg().longOpt("output-file").build()); // DB connection options. opts.addOption(Option.builder().argName("database url") .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build()); opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg() .longOpt("db-user").build()); opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg() .longOpt("db-pass").build()); opts.addOption(Option.builder("H").argName("database host") .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host") .build());// w w w.jav a2 s. c o m opts.addOption(Option.builder("P").argName("database port") .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port") .build()); opts.addOption(Option.builder("N").argName("database name") .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg() .longOpt("db-name").build()); // Everybody needs a little help from their friends. opts.addOption( Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build()); CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true); System.exit(1); } if (cl.hasOption("help")) { new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true); return; } if (!cl.hasOption("b") && !cl.hasOption("n")) { System.err.format("Must specify either plate barcode or plate name."); new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true); System.exit(1); } DB db = null; try { if (cl.hasOption("db-url")) { db = new DB().connectToDB(cl.getOptionValue("db-url")); } else { Integer port = null; if (cl.getOptionValue("P") != null) { port = Integer.parseInt(cl.getOptionValue("P")); } db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"), cl.getOptionValue("p")); } Writer writer = null; if (cl.hasOption("o")) { writer = new FileWriter(cl.getOptionValue("o")); } else { writer = new OutputStreamWriter(System.out); } PlateCompositionWriter cw = new PlateCompositionWriter(); if (cl.hasOption("b")) { cw.writePlateCompositionByBarcode(db, cl.getOptionValue("b"), writer); } else if (cl.hasOption("n")) { cw.writePlateCompositionByName(db, cl.getOptionValue("n"), writer); } writer.close(); } finally { if (db != null) { db.close(); } } }