List of usage examples for java.lang String replace
public String replace(CharSequence target, CharSequence replacement)
From source file:com.milaboratory.mitcr.cli.Main.java
public static void main(String[] args) { int o = 0;/*w w w. j a v a 2 s . c o m*/ BuildInformation buildInformation = BuildInformationProvider.get(); final boolean isProduction = "default".equals(buildInformation.scmBranch); // buildInformation.version != null && buildInformation.version.lastIndexOf("SNAPSHOT") < 0; orderingMap.put(PARAMETERS_SET_OPTION, o++); orderingMap.put(SPECIES_OPTION, o++); orderingMap.put(GENE_OPTION, o++); orderingMap.put(ERROR_CORECTION_LEVEL_OPTION, o++); orderingMap.put(QUALITY_THRESHOLD_OPTION, o++); orderingMap.put(AVERAGE_QUALITY_OPTION, o++); orderingMap.put(LQ_OPTION, o++); orderingMap.put(CLUSTERIZATION_OPTION, o++); orderingMap.put(INCLUDE_CYS_PHE_OPTION, o++); orderingMap.put(LIMIT_OPTION, o++); orderingMap.put(EXPORT_OPTION, o++); orderingMap.put(REPORT_OPTION, o++); orderingMap.put(REPORTING_LEVEL_OPTION, o++); orderingMap.put(PHRED33_OPTION, o++); orderingMap.put(PHRED64_OPTION, o++); orderingMap.put(THREADS_OPTION, o++); orderingMap.put(COMPRESSED_OPTION, o++); orderingMap.put(PRINT_HELP_OPTION, o++); orderingMap.put(PRINT_VERSION_OPTION, o++); orderingMap.put(PRINT_DEBUG_OPTION, o++); options.addOption(OptionBuilder.withArgName("preset name").hasArg() .withDescription("preset of pipeline parameters to use").create(PARAMETERS_SET_OPTION)); options.addOption(OptionBuilder.withArgName("species").hasArg() .withDescription("overrides species ['hs' for Homo sapiens, 'mm' for us Mus musculus] " + "(default for built-in presets is 'hs')") .create(SPECIES_OPTION)); options.addOption(OptionBuilder.withArgName("gene").hasArg() .withDescription("overrides gene: TRB or TRA (default value for built-in parameter sets is TRB)") .create(GENE_OPTION)); options.addOption(OptionBuilder.withArgName("0|1|2").hasArg() .withDescription( "overrides error correction level (0 = don't correct errors, 1 = correct sequenecing " + "errors only (see -" + QUALITY_THRESHOLD_OPTION + " and -" + LQ_OPTION + " options for details), " + "2 = also correct PCR errors (see -" + CLUSTERIZATION_OPTION + " option)") .create(ERROR_CORECTION_LEVEL_OPTION)); options.addOption(OptionBuilder.withArgName("value").hasArg().withDescription( "overrides quality threshold value for segment alignment and bad quality sequences " + "correction algorithms. 0 tells the program not to process quality information. (default is 25)") .create(QUALITY_THRESHOLD_OPTION)); if (!isProduction) options.addOption(OptionBuilder.hasArg(false) .withDescription("use this option to output average instead of " + "maximal, quality for CDR3 nucleotide sequences. (Experimental option, use with caution.)") .create(AVERAGE_QUALITY_OPTION)); options.addOption(OptionBuilder.withArgName("map | drop").hasArg() .withDescription("overrides low quality CDR3s processing strategy (drop = filter off, " + "map = map onto clonotypes created from the high quality CDR3s). This option makes no difference if " + "quality threshold (-" + QUALITY_THRESHOLD_OPTION + " option) is set to 0, or error correction " + "level (-" + ERROR_CORECTION_LEVEL_OPTION + ") is 0.") .create(LQ_OPTION)); options.addOption(OptionBuilder.withArgName("smd | ete").hasArg() .withDescription("overrides the PCR error correction algorithm: smd = \"save my diversity\", " + "ete = \"eliminate these errors\". Default value for built-in parameters is ete.") .create(CLUSTERIZATION_OPTION)); options.addOption(OptionBuilder.withArgName("0|1").hasArg() .withDescription("overrides weather include bounding Cys & Phe into CDR3 sequence") .create(INCLUDE_CYS_PHE_OPTION)); options.addOption( OptionBuilder.withArgName("# of reads").hasArg() .withDescription("limits the number of input sequencing reads, use this parameter to " + "normalize several datasets or to have a glance at the data") .create(LIMIT_OPTION)); options.addOption(OptionBuilder.withArgName("new name").hasArg() .withDescription("use this option to export presets to a local xml files").create(EXPORT_OPTION)); options.addOption(OptionBuilder.withArgName("file name").hasArg() .withDescription("use this option to write analysis report (summary) to file") .create(REPORT_OPTION)); options.addOption(OptionBuilder.withArgName("1|2|3").hasArg(true) .withDescription("output detalization level (1 = simple, 2 = medium, 3 = full, this format " + "could be deserialized using mitcr API). Affects only tab-delimited output. Default value is 3.") .create(REPORTING_LEVEL_OPTION)); options.addOption(OptionBuilder.hasArg(false).withDescription( "add this option if input file is in old illumina format with 64 byte offset for quality " + "string (MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)") .create(PHRED64_OPTION)); options.addOption(OptionBuilder.hasArg(false) .withDescription("add this option if input file is in Phred+33 format for quality values " + "(MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)") .create(PHRED33_OPTION)); options.addOption(OptionBuilder.withArgName("threads").hasArg() .withDescription( "specifies the number of CDR3 extraction threads (default = number of available CPU cores)") .create(THREADS_OPTION)); if (!isProduction) options.addOption(OptionBuilder.hasArg(false) .withDescription("use compressed data structures for storing individual " + "clone segments statistics (from which arises the clone segment information). This option reduces required " + "amount of memory, but introduces small stochastic errors into the algorithm which determines clone " + "segments. (Experimental option, use with caution.)") .create(COMPRESSED_OPTION)); options.addOption( OptionBuilder.hasArg(false).withDescription("print this message").create(PRINT_HELP_OPTION)); options.addOption(OptionBuilder.hasArg(false).withDescription("print version information") .create(PRINT_VERSION_OPTION)); options.addOption(OptionBuilder.hasArg(false) .withDescription("print additional information about analysis process").create(PRINT_DEBUG_OPTION)); PosixParser parser = new PosixParser(); try { long input_limit = -1; int threads = Runtime.getRuntime().availableProcessors(); int reporting_level = 3; int ec_level = 2; CommandLine cl = parser.parse(options, args, true); if (cl.hasOption(PRINT_HELP_OPTION)) { printHelp(); return; } boolean averageQuality = cl.hasOption(AVERAGE_QUALITY_OPTION), compressedAggregators = cl.hasOption(COMPRESSED_OPTION); if (cl.hasOption(PRINT_VERSION_OPTION)) { System.out.println("MiTCR by MiLaboratory, version: " + buildInformation.version); System.out.println("Branch: " + buildInformation.scmBranch); System.out.println("Built: " + buildInformation.buildDate + ", " + buildInformation.jdk + " JDK, " + "build machine: " + buildInformation.builtBy); System.out.println("SCM changeset: " + buildInformation.scmChangeset + " (" + buildInformation.scmDate.replace("\"", "") + ")"); return; } //Normal execution String paramName = cl.getOptionValue(PARAMETERS_SET_OPTION); if (paramName == null) { err.println("No parameters set is specified."); return; } Parameters params = ParametersIO.getParameters(paramName); if (params == null) { err.println("No parameters set found with name '" + paramName + "'."); return; } String value; if ((value = cl.getOptionValue(THREADS_OPTION)) != null) threads = Integer.decode(value); if ((value = cl.getOptionValue(REPORTING_LEVEL_OPTION)) != null) reporting_level = Integer.decode(value); if ((value = cl.getOptionValue(LIMIT_OPTION)) != null) input_limit = Long.decode(value); if ((value = cl.getOptionValue(GENE_OPTION)) != null) params.setGene(Gene.fromXML(value)); if ((value = cl.getOptionValue(SPECIES_OPTION)) != null) params.setSpecies(Species.getFromShortName(value)); if ((value = cl.getOptionValue(INCLUDE_CYS_PHE_OPTION)) != null) { if (value.equals("1")) params.getCDR3ExtractorParameters().setIncludeCysPhe(true); else if (value.equals("0")) params.getCDR3ExtractorParameters().setIncludeCysPhe(false); else { err.println("Illegal value for -" + INCLUDE_CYS_PHE_OPTION + " parameter."); return; } } if ((value = cl.getOptionValue(ERROR_CORECTION_LEVEL_OPTION)) != null) { int v = Integer.decode(value); ec_level = v; if (v == 0) { params.setCloneGeneratorParameters(new BasicCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.None); } else if (v == 1) { params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.None); } else if (v == 2) { params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.OneMismatch, .1f); } else throw new RuntimeException("This (" + v + ") error correction level is not supported."); } if ((value = cl.getOptionValue(QUALITY_THRESHOLD_OPTION)) != null) { int v = Integer.decode(value); if (v == 0) params.setQualityInterpretationStrategy(new DummyQualityInterpretationStrategy()); else params.setQualityInterpretationStrategy(new IlluminaQualityInterpretationStrategy((byte) v)); } if ((value = cl.getOptionValue(LQ_OPTION)) != null) if (ec_level > 0) switch (value) { case "map": params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters( ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .getSegmentInformationAggregationFactor(), 3, true)); break; case "drop": params.setCloneGeneratorParameters(new LQFilteringOffCloneGeneratorParameters( ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .getSegmentInformationAggregationFactor())); break; default: throw new RuntimeException("Wrong value for -" + LQ_OPTION + " option."); } if ((value = cl.getOptionValue(CLUSTERIZATION_OPTION)) != null) if (ec_level > 1) // == 2 switch (value) { case "smd": params.setClusterizationType(CloneClusterizationType.V2D1J2T3Explicit); break; case "ete": params.setClusterizationType(CloneClusterizationType.OneMismatch); break; default: throw new RuntimeException("Wrong value for -" + CLUSTERIZATION_OPTION + " option."); } ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .setAccumulatorType(AccumulatorType.get(compressedAggregators, averageQuality)); if ((value = cl.getOptionValue(EXPORT_OPTION)) != null) { //Exporting parameters ParametersIO.exportParameters(params, value); return; } String[] offArgs = cl.getArgs(); if (offArgs.length == 0) { err.println("Input file not specified."); return; } else if (offArgs.length == 1) { err.println("Output file not specified."); return; } else if (offArgs.length > 2) { err.println("Unrecognized argument."); return; } String inputFileName = offArgs[0]; String outputFileName = offArgs[1]; File input = new File(inputFileName); if (!input.exists()) { err.println("Input file not found."); return; } //TODO This also done inside SFastqReader constructor CompressionType compressionType = CompressionType.None; if (inputFileName.endsWith(".gz")) compressionType = CompressionType.GZIP; QualityFormat format = null; // If variable remains null file format will be detected automatically if (cl.hasOption(PHRED33_OPTION)) format = QualityFormat.Phred33; if (cl.hasOption(PHRED64_OPTION)) if (format == null) format = QualityFormat.Phred64; else { err.println( "Options: -" + PHRED33_OPTION + " and -" + PHRED64_OPTION + " are mutually exclusive"); return; } SFastqReader reads = format == null ? new SFastqReader(input, compressionType) : new SFastqReader(input, format, compressionType); OutputPort<SSequencingRead> inputToPipeline = reads; if (input_limit >= 0) inputToPipeline = new CountLimitingOutputPort<>(inputToPipeline, input_limit); SegmentLibrary library = DefaultSegmentLibrary.load(); AnalysisStatisticsAggregator statisticsAggregator = new AnalysisStatisticsAggregator(); FullPipeline pipeline = new FullPipeline(inputToPipeline, params, false, library); pipeline.setThreads(threads); pipeline.setAnalysisListener(statisticsAggregator); new Thread(new SmartProgressReporter(pipeline, err)).start(); // Printing status to the standard error stream pipeline.run(); if (cl.hasOption(PRINT_DEBUG_OPTION)) { err.println("Memory = " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); err.println("Clusterization: " + pipeline.getQC().getReadsClusterized() + "% of reads, " + pipeline.getQC().getClonesClusterized() + " % clones"); } CloneSetClustered cloneSet = pipeline.getResult(); if ((value = cl.getOptionValue(REPORT_OPTION)) != null) { File file = new File(value); TablePrintStreamAdapter table; if (file.exists()) table = new TablePrintStreamAdapter(new FileOutputStream(file, true)); else { table = new TablePrintStreamAdapter(file); ReportExporter.printHeader(table); } //CloneSetQualityControl qc = new CloneSetQualityControl(library, params.getSpecies(), params.getGene(), cloneSet); ReportExporter.printRow(table, inputFileName, outputFileName, pipeline.getQC(), statisticsAggregator); table.close(); } if (outputFileName.endsWith(".cls")) ClsExporter.export(pipeline, outputFileName.replace(".cls", "") + " " + new Date().toString(), input.getName(), outputFileName); else { //Dry run if (outputFileName.startsWith("-")) return; ExportDetalizationLevel detalization = ExportDetalizationLevel.fromLevel(reporting_level); CompressionType compressionType1 = CompressionType.None; if (outputFileName.endsWith(".gz")) compressionType1 = CompressionType.GZIP; CloneSetIO.exportCloneSet(outputFileName, cloneSet, detalization, params, input.getAbsolutePath(), compressionType1); } } catch (ParseException | RuntimeException | IOException e) { err.println("Error occurred in the analysis pipeline."); err.println(); e.printStackTrace(); //printHelp(); } }
From source file:io.apicurio.studio.tools.release.ReleaseTool.java
/** * Main method.//from w ww . ja v a2s. c o m * @param args */ public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("n", "release-name", true, "The name of the new release."); options.addOption("p", "prerelease", false, "Indicate that this is a pre-release."); options.addOption("t", "release-tag", true, "The tag name of the new release."); options.addOption("o", "previous-tag", true, "The tag name of the previous release."); options.addOption("g", "github-pat", true, "The GitHub PAT (for authentication/authorization)."); options.addOption("a", "artifact", true, "The binary release artifact (full path)."); options.addOption("d", "output-directory", true, "Where to store output file(s)."); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (!cmd.hasOption("n") || !cmd.hasOption("t") || !cmd.hasOption("o") || !cmd.hasOption("g") || !cmd.hasOption("a")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("release-studio", options); System.exit(1); } // Arguments (command line) String releaseName = cmd.getOptionValue("n"); boolean isPrerelease = cmd.hasOption("p"); String releaseTag = cmd.getOptionValue("t"); String oldReleaseTag = cmd.getOptionValue("o"); String githubPAT = cmd.getOptionValue("g"); String artifact = cmd.getOptionValue("a"); File outputDir = new File(""); if (cmd.hasOption("d")) { outputDir = new File(cmd.getOptionValue("d")); if (!outputDir.exists()) { outputDir.mkdirs(); } } File releaseArtifactFile = new File(artifact); File releaseArtifactSigFile = new File(artifact + ".asc"); String releaseArtifact = releaseArtifactFile.getName(); String releaseArtifactSig = releaseArtifactSigFile.getName(); if (!releaseArtifactFile.isFile()) { System.err.println("Missing file: " + releaseArtifactFile.getAbsolutePath()); System.exit(1); } if (!releaseArtifactSigFile.isFile()) { System.err.println("Missing file: " + releaseArtifactSigFile.getAbsolutePath()); System.exit(1); } System.out.println("========================================="); System.out.println("Creating Release: " + releaseTag); System.out.println("Previous Release: " + oldReleaseTag); System.out.println(" Name: " + releaseName); System.out.println(" Artifact: " + releaseArtifact); System.out.println(" Pre-Release: " + isPrerelease); System.out.println("========================================="); String releaseNotes = ""; // Step #1 - Generate Release Notes // * Grab info about the previous release (extract publish date) // * Query all Issues for ones closed since that date // * Generate Release Notes from the resulting Issues try { System.out.println("Getting info about release " + oldReleaseTag); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/tags/v" + oldReleaseTag) .header("Accept", "application/json").header("Authorization", "token " + githubPAT).asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get old release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for previous release " + oldReleaseTag); } System.out.println("Release " + oldReleaseTag + " was published on " + publishedDate); List<JSONObject> issues = getIssuesForRelease(publishedDate, githubPAT); System.out.println("Found " + issues.size() + " issues closed in release " + releaseTag); System.out.println("Generating Release Notes"); releaseNotes = generateReleaseNotes(releaseName, releaseTag, issues); System.out.println("------------ Release Notes --------------"); System.out.println(releaseNotes); System.out.println("-----------------------------------------"); } catch (Exception e) { e.printStackTrace(); System.exit(1); } String assetUploadUrl = null; // Step #2 - Create a GitHub Release try { System.out.println("\nCreating GitHub Release " + releaseTag); JSONObject body = new JSONObject(); body.put("tag_name", "v" + releaseTag); body.put("name", releaseName); body.put("body", releaseNotes); body.put("prerelease", isPrerelease); HttpResponse<JsonNode> response = Unirest .post("https://api.github.com/repos/apicurio/apicurio-studio/releases") .header("Accept", "application/json").header("Content-Type", "application/json") .header("Authorization", "token " + githubPAT).body(body).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to create release in GitHub: " + response.getStatusText()); } assetUploadUrl = response.getBody().getObject().getString("upload_url"); if (assetUploadUrl == null || assetUploadUrl.trim().isEmpty()) { throw new Exception("Failed to get Asset Upload URL for newly created release!"); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } // Step #3 - Upload Release Artifact (zip file) System.out.println("\nUploading Quickstart Artifact: " + releaseArtifact); try { String artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifact); byte[] artifactData = loadArtifactData(releaseArtifactFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); HttpResponse<JsonNode> response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "application/zip").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifact, new Exception(response.getStatus() + "::" + response.getStatusText())); } Thread.sleep(1000); artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifactSig); artifactData = loadArtifactData(releaseArtifactSigFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "text/plain").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifactSig, new Exception(response.getStatus() + "::" + response.getStatusText())); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } Thread.sleep(1000); // Step #4 - Download Latest Release JSON for inclusion in the project web site try { System.out.println("Getting info about the release."); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/latest") .header("Accept", "application/json").asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for release."); } String fname = publishedDate.replace(':', '-'); File outFile = new File(outputDir, fname + ".json"); System.out.println("Writing latest release info to: " + outFile.getAbsolutePath()); String output = body.getObject().toString(4); try (FileOutputStream fos = new FileOutputStream(outFile)) { fos.write(output.getBytes("UTF-8")); fos.flush(); } System.out.println("Release info successfully written."); } catch (Exception e) { e.printStackTrace(); System.exit(1); } System.out.println("========================================="); System.out.println("All Done!"); System.out.println("========================================="); }
From source file:Main.java
public static String encodeUri(String v) { return v.replace(" ", "%20"); }
From source file:Main.java
static String httpDecode(String s) { s = s.replace("\r\n", "<br/>"); return Html.fromHtml(s).toString(); }
From source file:Main.java
public static String formatString(String str) { return str.replace("'", "''"); }
From source file:Main.java
private static String unescapeHtml(String str) { return str.replace(""", "\\\""); }
From source file:Main.java
public static String removeUnUseChars(String str) { return str.replace("\\", "").replace("\"{", "{").replace("}\"", "}"); }
From source file:Main.java
public static String trimString(String str) { return str.replace("]", "").replace("[", "").trim(); }
From source file:Main.java
public static String replaceBareLfWithCrlf(String str) { return str.replace("\r", "").replace("\n", "\r\n"); }
From source file:Main.java
public static String breakLine(String text) { return text.replace("\\n", System.getProperty("line.separator")); }