List of usage examples for java.util List get
E get(int index);
From source file:com.era7.bioinfo.annotation.AutomaticQualityControl.java
public static void main(String[] args) { if (args.length != 4) { System.out.println("This program expects four parameters: \n" + "1. Gene annotation XML filename \n" + "2. Reference protein set (.fasta)\n" + "3. Output TXT filename\n" + "4. Initial Blast XML results filename (the one used at the very beginning of the semiautomatic annotation process)\n"); } else {// w ww.ja va 2 s . co m BufferedWriter outBuff = null; try { File inFile = new File(args[0]); File fastaFile = new File(args[1]); File outFile = new File(args[2]); File blastFile = new File(args[3]); //Primero cargo todos los datos del archivo xml del blast BufferedReader buffReader = new BufferedReader(new FileReader(blastFile)); StringBuilder stBuilder = new StringBuilder(); String line = null; while ((line = buffReader.readLine()) != null) { stBuilder.append(line); } buffReader.close(); System.out.println("Creating blastoutput..."); BlastOutput blastOutput = new BlastOutput(stBuilder.toString()); System.out.println("BlastOutput created! :)"); stBuilder.delete(0, stBuilder.length()); HashMap<String, String> blastProteinsMap = new HashMap<String, String>(); ArrayList<Iteration> iterations = blastOutput.getBlastOutputIterations(); for (Iteration iteration : iterations) { blastProteinsMap.put(iteration.getQueryDef().split("\\|")[1].trim(), iteration.toString()); } //freeing some memory blastOutput = null; //------------------------------------------------------------------------ //Initializing writer for output file outBuff = new BufferedWriter(new FileWriter(outFile)); //reading gene annotation xml file..... buffReader = new BufferedReader(new FileReader(inFile)); stBuilder = new StringBuilder(); line = null; while ((line = buffReader.readLine()) != null) { stBuilder.append(line); } buffReader.close(); XMLElement genesXML = new XMLElement(stBuilder.toString()); //freeing some memory I don't need anymore stBuilder.delete(0, stBuilder.length()); //reading file with the reference proteins set ArrayList<String> proteinsReferenceSet = new ArrayList<String>(); buffReader = new BufferedReader(new FileReader(fastaFile)); while ((line = buffReader.readLine()) != null) { if (line.charAt(0) == '>') { proteinsReferenceSet.add(line.split("\\|")[1]); } } buffReader.close(); Element pGenes = genesXML.asJDomElement().getChild(PredictedGenes.TAG_NAME); List<Element> contigs = pGenes.getChildren(ContigXML.TAG_NAME); System.out.println("There are " + contigs.size() + " contigs to be checked... "); outBuff.write("There are " + contigs.size() + " contigs to be checked... \n"); outBuff.write("Proteins reference set: \n"); for (String st : proteinsReferenceSet) { outBuff.write(st + ","); } outBuff.write("\n"); for (Element elem : contigs) { ContigXML contig = new ContigXML(elem); //escribo el id del contig en el que estoy outBuff.write("Checking contig: " + contig.getId() + "\n"); outBuff.flush(); List<XMLElement> geneList = contig.getChildrenWith(PredictedGene.TAG_NAME); System.out.println("geneList.size() = " + geneList.size()); int numeroDeGenesParaAnalizar = geneList.size() / FACTOR; if (numeroDeGenesParaAnalizar == 0) { numeroDeGenesParaAnalizar++; } ArrayList<Integer> indicesUtilizados = new ArrayList<Integer>(); outBuff.write("\nThe contig has " + geneList.size() + " predicted genes, let's analyze: " + numeroDeGenesParaAnalizar + "\n"); for (int j = 0; j < numeroDeGenesParaAnalizar; j++) { int geneIndex; boolean geneIsDismissed = false; do { geneIsDismissed = false; geneIndex = (int) Math.round(Math.floor(Math.random() * geneList.size())); PredictedGene tempGene = new PredictedGene(geneList.get(geneIndex).asJDomElement()); if (tempGene.getStatus().equals(PredictedGene.STATUS_DISMISSED)) { geneIsDismissed = true; } } while (indicesUtilizados.contains(new Integer(geneIndex)) && geneIsDismissed); indicesUtilizados.add(geneIndex); System.out.println("geneIndex = " + geneIndex); //Ahora hay que sacar el gen correspondiente al indice y hacer el control de calidad PredictedGene gene = new PredictedGene(geneList.get(geneIndex).asJDomElement()); outBuff.write("\nAnalyzing gene with id: " + gene.getId() + " , annotation uniprot id: " + gene.getAnnotationUniprotId() + "\n"); outBuff.write("eValue: " + gene.getEvalue() + "\n"); //--------------PETICION POST HTTP BLAST---------------------- PostMethod post = new PostMethod(BLAST_URL); post.addParameter("program", "blastx"); post.addParameter("sequence", gene.getSequence()); post.addParameter("database", "uniprotkb"); post.addParameter("email", "ppareja@era7.com"); post.addParameter("exp", "1e-10"); post.addParameter("stype", "dna"); // execute the POST HttpClient client = new HttpClient(); int status = client.executeMethod(post); System.out.println("status post = " + status); InputStream inStream = post.getResponseBodyAsStream(); String fileName = "jobid.txt"; FileOutputStream outStream = new FileOutputStream(new File(fileName)); byte[] buffer = new byte[1024]; int len; while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //Once the file is created I just have to read one line in order to extract the job id buffReader = new BufferedReader(new FileReader(new File(fileName))); String jobId = buffReader.readLine(); buffReader.close(); System.out.println("jobId = " + jobId); //--------------HTTP CHECK JOB STATUS REQUEST---------------------- GetMethod get = new GetMethod(CHECK_JOB_STATUS_URL + jobId); String jobStatus = ""; do { try { Thread.sleep(1000);//sleep for 1000 ms } catch (InterruptedException ie) { //If this thread was intrrupted by nother thread } status = client.executeMethod(get); //System.out.println("status get = " + status); inStream = get.getResponseBodyAsStream(); fileName = "jobStatus.txt"; outStream = new FileOutputStream(new File(fileName)); while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //Once the file is created I just have to read one line in order to extract the job id buffReader = new BufferedReader(new FileReader(new File(fileName))); jobStatus = buffReader.readLine(); //System.out.println("jobStatus = " + jobStatus); buffReader.close(); } while (!jobStatus.equals(FINISHED_JOB_STATUS)); //Once I'm here the blast should've already finished //--------------JOB RESULTS HTTP REQUEST---------------------- get = new GetMethod(JOB_RESULT_URL + jobId + "/out"); status = client.executeMethod(get); System.out.println("status get = " + status); inStream = get.getResponseBodyAsStream(); fileName = "jobResults.txt"; outStream = new FileOutputStream(new File(fileName)); while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //--------parsing the blast results file----- TreeSet<GeneEValuePair> featuresBlast = new TreeSet<GeneEValuePair>(); buffReader = new BufferedReader(new FileReader(new File(fileName))); while ((line = buffReader.readLine()) != null) { if (line.length() > 3) { String prefix = line.substring(0, 3); if (prefix.equals("TR:") || prefix.equals("SP:")) { String[] columns = line.split(" "); String id = columns[1]; //System.out.println("id = " + id); String e = ""; String[] arraySt = line.split("\\.\\.\\."); if (arraySt.length > 1) { arraySt = arraySt[1].trim().split(" "); int contador = 0; for (int k = 0; k < arraySt.length && contador <= 2; k++) { String string = arraySt[k]; if (!string.equals("")) { contador++; if (contador == 2) { e = string; } } } } else { //Number before e- String[] arr = arraySt[0].split("e-")[0].split(" "); String numeroAntesE = arr[arr.length - 1]; String numeroDespuesE = arraySt[0].split("e-")[1].split(" ")[0]; e = numeroAntesE + "e-" + numeroDespuesE; } double eValue = Double.parseDouble(e); //System.out.println("eValue = " + eValue); GeneEValuePair g = new GeneEValuePair(id, eValue); featuresBlast.add(g); } } } GeneEValuePair currentGeneEValuePair = new GeneEValuePair(gene.getAnnotationUniprotId(), gene.getEvalue()); System.out.println("currentGeneEValuePair.id = " + currentGeneEValuePair.id); System.out.println("currentGeneEValuePair.eValue = " + currentGeneEValuePair.eValue); boolean blastContainsGene = false; for (GeneEValuePair geneEValuePair : featuresBlast) { if (geneEValuePair.id.equals(currentGeneEValuePair.id)) { blastContainsGene = true; //le pongo la e que tiene en el wu-blast para poder comparar currentGeneEValuePair.eValue = geneEValuePair.eValue; break; } } if (blastContainsGene) { outBuff.write("The protein was found in the WU-BLAST result.. \n"); //Una vez que se que esta en el blast tengo que ver que sea la mejor GeneEValuePair first = featuresBlast.first(); outBuff.write("Protein with best eValue according to the WU-BLAST result: " + first.id + " , " + first.eValue + "\n"); if (first.id.equals(currentGeneEValuePair.id)) { outBuff.write("Proteins with best eValue match up \n"); } else { if (first.eValue == currentGeneEValuePair.eValue) { outBuff.write( "The one with best eValue is not the same protein but has the same eValue \n"); } else if (first.eValue > currentGeneEValuePair.eValue) { outBuff.write( "The one with best eValue is not the same protein but has a worse eValue :) \n"); } else { outBuff.write( "The best protein from BLAST has an eValue smaller than ours, checking if it's part of the reference set...\n"); //System.exit(-1); if (proteinsReferenceSet.contains(first.id)) { //The protein is in the reference set and that shouldn't happen outBuff.write( "The protein was found on the reference set, checking if it belongs to the same contig...\n"); String iterationSt = blastProteinsMap.get(gene.getAnnotationUniprotId()); if (iterationSt != null) { outBuff.write( "The protein was found in the BLAST used at the beginning of the annotation process.\n"); Iteration iteration = new Iteration(iterationSt); ArrayList<Hit> hits = iteration.getIterationHits(); boolean contigFound = false; Hit errorHit = null; for (Hit hit : hits) { if (hit.getHitDef().indexOf(contig.getId()) >= 0) { contigFound = true; errorHit = hit; break; } } if (contigFound) { outBuff.write( "ERROR: A hit from the same contig was find in the Blast file: \n" + errorHit.toString() + "\n"); } else { outBuff.write("There is no hit with the same contig! :)\n"); } } else { outBuff.write( "The protein is NOT in the BLAST used at the beginning of the annotation process.\n"); } } else { //The protein was not found on the reference set so everything's ok outBuff.write( "The protein was not found on the reference, everything's ok :)\n"); } } } } else { outBuff.write("The protein was NOT found on the WU-BLAST !! :( \n"); //System.exit(-1); } } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { //closing outputfile outBuff.close(); } catch (IOException ex) { Logger.getLogger(AutomaticQualityControl.class.getName()).log(Level.SEVERE, null, ex); } } } }
From source file:com.music.tools.ScaleTester.java
public static void main(String[] args) { System.out.println(// w w w . jav a 2s .co m "Usage: java ScaleTester <fundamental frequency> <chromatic scale size> <scale size> <use ET>"); final AudioFormat af = new AudioFormat(sampleRate, 16, 1, true, true); try { fundamentalFreq = getArgument(args, 0, FUNDAMENTAL_FREQUENCY, Double.class); int pitchesInChromaticScale = getArgument(args, 1, CHROMATIC_SCALE_SILZE, Integer.class); List<Double> harmonicFrequencies = new ArrayList<>(); List<String> ratios = new ArrayList<>(); Set<Double> frequencies = new HashSet<Double>(); frequencies.add(fundamentalFreq); int octaveMultiplier = 2; for (int i = 2; i < 100; i++) { // Exclude the 7th harmonic TODO exclude the 11th as well? // http://www.phy.mtu.edu/~suits/badnote.html if (i % 7 == 0) { continue; } double actualFreq = fundamentalFreq * i; double closestTonicRatio = actualFreq / (fundamentalFreq * octaveMultiplier); if (closestTonicRatio < 1 || closestTonicRatio > 2) { octaveMultiplier *= 2; } double closestTonic = actualFreq - actualFreq % (fundamentalFreq * octaveMultiplier); double normalizedFreq = fundamentalFreq * (actualFreq / closestTonic); harmonicFrequencies.add(actualFreq); frequencies.add(normalizedFreq); if (frequencies.size() == pitchesInChromaticScale) { break; } } System.out.println("Harmonic (overtone) frequencies: " + harmonicFrequencies); System.out.println("Transposed harmonic frequencies: " + frequencies); List<Double> chromaticScale = new ArrayList<>(frequencies); Collections.sort(chromaticScale); // find the "perfect" interval (e.g. perfect fifth) int perfectIntervalIndex = 0; int idx = 0; for (Iterator<Double> it = chromaticScale.iterator(); it.hasNext();) { Double noteFreq = it.next(); long[] fraction = findCommonFraction(noteFreq / fundamentalFreq); fractionCache.put(noteFreq, fraction); if (fraction[0] == 3 && fraction[1] == 2) { perfectIntervalIndex = idx; System.out.println("Perfect interval (3/2) idx: " + perfectIntervalIndex); } idx++; ratios.add(Arrays.toString(fraction)); } System.out.println("Ratios to fundemental frequency: " + ratios); if (getBooleanArgument(args, 4, USE_ET)) { chromaticScale = temper(chromaticScale); } System.out.println(); System.out.println("Chromatic scale: " + chromaticScale); Set<Double> scaleSet = new HashSet<Double>(); scaleSet.add(chromaticScale.get(0)); idx = 0; List<Double> orderedInCircle = new ArrayList<>(); // now go around the circle of perfect intervals and put the notes // in order while (orderedInCircle.size() < chromaticScale.size()) { orderedInCircle.add(chromaticScale.get(idx)); idx += perfectIntervalIndex; idx = idx % chromaticScale.size(); } System.out.println("Pitches Ordered in circle of perfect intervals: " + orderedInCircle); List<Double> scale = new ArrayList<Double>(scaleSet); int currentIdxInCircle = orderedInCircle.size() - 1; // start with // the last // note in the // circle int scaleSize = getArgument(args, 3, SCALE_SIZE, Integer.class); while (scale.size() < scaleSize) { double pitch = orderedInCircle.get(currentIdxInCircle % orderedInCircle.size()); if (!scale.contains(pitch)) { scale.add(pitch); } currentIdxInCircle++; } Collections.sort(scale); System.out.println("Scale: " + scale); SourceDataLine line = AudioSystem.getSourceDataLine(af); line.open(af); line.start(); Double[] scaleFrequencies = scale.toArray(new Double[scale.size()]); // first play the whole scale WaveMelodyGenerator.playScale(line, scaleFrequencies); // then generate a random melody in the scale WaveMelodyGenerator.playMelody(line, scaleFrequencies); line.drain(); line.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.act.lcms.db.io.ExportStandardIonResultsFromDB.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/*from w ww.j a va 2 s. co m*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } try (DB db = DB.openDBFromCLI(cl)) { List<String> chemicalNames = new ArrayList<>(); if (cl.hasOption(OPTION_CONSTRUCT)) { // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT)); for (Pair<ChemicalAssociatedWithPathway, Double> pair : productMasses) { chemicalNames.add(pair.getLeft().getChemical()); } } if (cl.hasOption(OPTION_CHEMICALS)) { chemicalNames.addAll(Arrays.asList(cl.getOptionValues(OPTION_CHEMICALS))); } if (chemicalNames.size() == 0) { System.err.format("No chemicals can be found from the input query.\n"); System.exit(-1); } List<String> standardIonHeaderFields = new ArrayList<String>() { { add(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name()); add(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name()); add(STANDARD_ION_HEADER_FIELDS.MANUAL_PICK.name()); add(STANDARD_ION_HEADER_FIELDS.AUTHOR.name()); add(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name()); add(STANDARD_ION_HEADER_FIELDS.NOTE.name()); } }; String outAnalysis; if (cl.hasOption(OPTION_OUTPUT_PREFIX)) { outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + TSV_FORMAT; } else { outAnalysis = String.join("-", chemicalNames) + "." + TSV_FORMAT; } File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY)); if (!lcmsDir.isDirectory()) { System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR); TSVWriter<String, String> resultsWriter = new TSVWriter<>(standardIonHeaderFields); resultsWriter.open(new File(outAnalysis)); // For each chemical, create a TSV row and a corresponding diagnostic plot for (String chemicalName : chemicalNames) { List<String> graphLabels = new ArrayList<>(); List<Double> yMaxList = new ArrayList<>(); String outData = plottingDirectory + "/" + chemicalName + ".data"; String outImg = plottingDirectory + "/" + chemicalName + ".pdf"; // For each diagnostic plot, open a new file stream. try (FileOutputStream fos = new FileOutputStream(outData)) { List<StandardIonResult> getResultByChemicalName = StandardIonResult.getByChemicalName(db, chemicalName); if (getResultByChemicalName != null && getResultByChemicalName.size() > 0) { // PART 1: Get the best metlin ion across all standard ion results for a given chemical String bestGlobalMetlinIon = AnalysisHelper .scoreAndReturnBestMetlinIonFromStandardIonResults(getResultByChemicalName, new HashMap<>(), true, true); // PART 2: Plot all the graphs related to the chemical. The plots are structured as follows: // // Page 1: All graphs (water, MeOH, Yeast) for Global ion picked (best ion among ALL standard ion runs for // the given chemical) by the algorithm // Page 2: All graphs for M+H // Page 3: All graphs for Local ions picked (best ion within a SINGLE standard ion run) + negative controls // for Yeast. // // Each page is demarcated by a blank graph. // Arrange results based on media Map<String, List<StandardIonResult>> categories = StandardIonResult .categorizeListOfStandardWellsByMedia(db, getResultByChemicalName); // This set contains all the best metlin ions corresponding to all the standard ion runs. Set<String> bestLocalIons = new HashSet<>(); bestLocalIons.add(bestGlobalMetlinIon); bestLocalIons.add(DEFAULT_ION); for (StandardIonResult result : getResultByChemicalName) { bestLocalIons.add(result.getBestMetlinIon()); } // We sort the best local ions are follows: // 1) Global best ion spectra 2) M+H spectra 3) Local best ion spectra List<String> bestLocalIonsArray = new ArrayList<>(bestLocalIons); Collections.sort(bestLocalIonsArray, new Comparator<String>() { @Override public int compare(String o1, String o2) { if (o1.equals(bestGlobalMetlinIon) && !o2.equals(bestGlobalMetlinIon)) { return -1; } else if (o1.equals(DEFAULT_ION) && !o2.equals(bestGlobalMetlinIon)) { return -1; } else { return 1; } } }); // This variable stores the index of the array at which all the remaining spectra are contained in one // page. This happens right after the M+H ion spectra. Integer combineAllSpectraIntoPageThreeFromIndex = 0; for (int i = 0; i < bestLocalIonsArray.size(); i++) { if (bestLocalIonsArray.get(i).equals(DEFAULT_ION)) { combineAllSpectraIntoPageThreeFromIndex = i + 1; } } for (int i = 0; i < bestLocalIonsArray.size(); i++) { String ion = bestLocalIonsArray.get(i); for (Map.Entry<String, List<StandardIonResult>> mediaToListOfIonResults : categories .entrySet()) { for (StandardIonResult result : mediaToListOfIonResults.getValue()) { // For every standard ion result, we plot the best global metlin ion and M+H. These plots are in the // pages 1 and 2. For all page 3 (aka miscellaneous spectra), we only plot the best local ion // corresponding to it's spectra and not some other graph's spectra. In the below condition, // we reach the page 3 case with not the same best ion as the spectra, in which case we just continue // and not draw anything on the page. if (i >= combineAllSpectraIntoPageThreeFromIndex && !(result.getBestMetlinIon().equals(ion))) { continue; } StandardWell positiveWell = StandardWell.getInstance().getById(db, result.getStandardWellId()); String positiveControlChemical = positiveWell.getChemical(); ScanData<StandardWell> encapsulatedDataForPositiveControl = AnalysisHelper .getScanDataForWell(db, lcmsDir, positiveWell, positiveControlChemical, positiveControlChemical); Set<String> singletonSet = Collections.singleton(ion); String additionalInfo = generateAdditionalLabelInformation(positiveWell, result, ion); List<String> labels = AnalysisHelper .writeScanData(fos, lcmsDir, MAX_INTENSITY, encapsulatedDataForPositiveControl, false, false, singletonSet) .stream().map(label -> label + additionalInfo) .collect(Collectors.toList()); yMaxList.add(encapsulatedDataForPositiveControl.getMs1ScanResults() .getMaxIntensityForIon(ion)); List<String> negativeLabels = null; // Only do the negative control in the miscellaneous page (page 3) and if the well is in yeast media. if (mediaToListOfIonResults.getKey() .equals(StandardWell.MEDIA_TYPE.YEAST.name()) && (i >= combineAllSpectraIntoPageThreeFromIndex && (result.getBestMetlinIon().equals(ion)))) { //TODO: Change the representative negative well to one that displays the highest noise in the future. // For now, we just use the first index among the negative wells. int representativeIndex = 0; StandardWell representativeNegativeControlWell = StandardWell.getInstance() .getById(db, result.getNegativeWellIds().get(representativeIndex)); ScanData encapsulatedDataForNegativeControl = AnalysisHelper .getScanDataForWell(db, lcmsDir, representativeNegativeControlWell, positiveWell.getChemical(), representativeNegativeControlWell.getChemical()); String negativePlateAdditionalInfo = generateAdditionalLabelInformation( representativeNegativeControlWell, null, null); negativeLabels = AnalysisHelper.writeScanData(fos, lcmsDir, MAX_INTENSITY, encapsulatedDataForNegativeControl, false, false, singletonSet) .stream().map(label -> label + negativePlateAdditionalInfo) .collect(Collectors.toList()); yMaxList.add(encapsulatedDataForNegativeControl.getMs1ScanResults() .getMaxIntensityForIon(ion)); } graphLabels.addAll(labels); if (negativeLabels != null) { graphLabels.addAll(negativeLabels); } } } // Add a blank graph to demarcate pages. if (i < combineAllSpectraIntoPageThreeFromIndex) { graphLabels.addAll(AnalysisHelper.writeScanData(fos, lcmsDir, 0.0, BLANK_SCAN, false, false, new HashSet<>())); yMaxList.add(0.0d); } } // We need to pass the yMax values as an array to the Gnuplotter. Double fontScale = null; if (cl.hasOption(FONT_SCALE)) { try { fontScale = Double.parseDouble(cl.getOptionValue(FONT_SCALE)); } catch (IllegalArgumentException e) { System.err.format("Argument for font-scale must be a floating point number.\n"); System.exit(1); } } Double[] yMaxes = yMaxList.toArray(new Double[yMaxList.size()]); Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale); plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time", null, "intensity", "pdf", null, null, yMaxes, outImg + ".gnuplot"); Map<String, String> row = new HashMap<>(); row.put(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name(), chemicalName); row.put(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name(), bestGlobalMetlinIon); row.put(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name(), outImg); resultsWriter.append(row); resultsWriter.flush(); } } } resultsWriter.flush(); resultsWriter.close(); } }
From source file:edu.msu.cme.rdp.probematch.cli.PrimerMatch.java
public static void main(String[] args) throws Exception { PrintStream out = new PrintStream(System.out); int maxDist = Integer.MAX_VALUE; try {/*from w w w.j a v a 2 s. co m*/ CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("outFile")) { out = new PrintStream(new File(line.getOptionValue("outFile"))); } if (line.hasOption("maxDist")) { maxDist = Integer.valueOf(line.getOptionValue("maxDist")); } args = line.getArgs(); if (args.length != 2) { throw new Exception("Unexpected number of command line arguments"); } } catch (Exception e) { System.err.println("Error: " + e.getMessage()); new HelpFormatter().printHelp("PrimerMatch <primer_list | primer_file> <seq_file>", options); return; } List<PatternBitMask64> primers = new ArrayList(); if (new File(args[0]).exists()) { File primerFile = new File(args[0]); SequenceFormat seqformat = SeqUtils.guessFileFormat(primerFile); if (seqformat.equals(SequenceFormat.FASTA)) { SequenceReader reader = new SequenceReader(primerFile); Sequence seq; while ((seq = reader.readNextSequence()) != null) { primers.add(new PatternBitMask64(seq.getSeqString(), true, seq.getSeqName())); } reader.close(); } else { BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line; while ((line = reader.readLine()) != null) { line = line.trim(); if (!line.equals("")) { primers.add(new PatternBitMask64(line, true)); } } reader.close(); } } else { for (String primer : args[0].split(",")) { primers.add(new PatternBitMask64(primer, true)); } } SeqReader seqReader = new SequenceReader(new File(args[1])); Sequence seq; String primerRegion; out.println("#seqname\tdesc\tprimer_index\tprimer_name\tposition\tmismatches\tseq_primer_region"); while ((seq = seqReader.readNextSequence()) != null) { for (int index = 0; index < primers.size(); index++) { PatternBitMask64 primer = primers.get(index); BitVector64Result results = BitVector64.process(seq.getSeqString().toCharArray(), primer, maxDist); for (BitVector64Match result : results.getResults()) { primerRegion = seq.getSeqString().substring( Math.max(0, result.getPosition() - primer.getPatternLength()), result.getPosition()); if (result.getPosition() < primer.getPatternLength()) { for (int pad = result.getPosition(); pad < primer.getPatternLength(); pad++) { primerRegion = "x" + primerRegion; } } out.println(seq.getSeqName() + "\t" + seq.getDesc() + "\t" + (index + 1) + "\t" + primer.getPrimerName() + "\t" + result.getPosition() + "\t" + result.getScore() + "\t" + primerRegion); } } } out.close(); seqReader.close(); }
From source file:com.px100systems.data.utility.RestoreUtility.java
public static void main(String[] args) { if (args.length < 3) { System.err.println("Usage: java -cp ... com.px100systems.data.utility.RestoreUtility " + "<springXmlConfigFile> <persisterBeanName> <backupDirectory> [compare]"); return;// w w w .ja v a 2 s. co m } FileSystemXmlApplicationContext ctx = new FileSystemXmlApplicationContext("file:" + args[0]); try { PersistenceProvider persister = ctx.getBean(args[1], PersistenceProvider.class); File directory = new File(args[2]); if (!directory.isDirectory()) { System.err.println(directory.getName() + " is not a directory"); return; } List<File> files = new ArrayList<File>(); //noinspection ConstantConditions for (File file : directory.listFiles()) if (BackupFile.isBackup(file)) files.add(file); if (files.isEmpty()) { System.err.println(directory.getName() + " directory has no backup files"); return; } if (args.length == 4 && args[3].equalsIgnoreCase("compare")) { final Map<String, Map<Long, RawRecord>> units = new HashMap<String, Map<Long, RawRecord>>(); for (String storage : persister.storage()) { System.out.println("Storage " + storage); persister.loadByStorage(storage, new PersistenceProvider.LoadCallback() { @Override public void process(RawRecord record) { Map<Long, RawRecord> unitList = units.get(record.getUnitName()); if (unitList == null) { unitList = new HashMap<Long, RawRecord>(); units.put(record.getUnitName(), unitList); } unitList.put(record.getId(), record); } }); for (final Map.Entry<String, Map<Long, RawRecord>> unit : units.entrySet()) { BackupFile file = null; for (int i = 0, n = files.size(); i < n; i++) if (BackupFile.isBackup(files.get(i), unit.getKey())) { file = new BackupFile(files.get(i)); files.remove(i); break; } if (file == null) throw new RuntimeException("Could not find backup file for unit " + unit.getKey()); final Long[] count = new Long[] { 0L }; file.read(new PersistenceProvider.LoadCallback() { @Override public void process(RawRecord record) { RawRecord r = unit.getValue().get(record.getId()); if (r == null) throw new RuntimeException("Could not find persisted record " + record.getId() + " for unit " + unit.getKey()); if (!r.equals(record)) throw new RuntimeException( "Record " + record.getId() + " mismatch for unit " + unit.getKey()); count[0] = count[0] + 1; } }); if (count[0] != unit.getValue().size()) throw new RuntimeException("Extra persisted records for unit " + unit.getKey()); System.out.println(" Unit " + unit.getKey() + ": OK"); } units.clear(); } if (!files.isEmpty()) { System.err.println("Extra backups: "); for (File file : files) System.err.println(" " + file.getName()); } } else { persister.init(); for (File file : files) { InMemoryDatabase.readBackupFile(file, persister); System.out.println("Loaded " + file.getName()); } } } catch (Exception e) { throw new RuntimeException(e); } finally { ctx.close(); } }
From source file:com.adobe.aem.demomachine.RegExp.java
public static void main(String[] args) throws IOException { String fileName = null;// ww w .ja va 2 s . c o m String regExp = null; String position = null; String value = "n/a"; List<String> allMatches = new ArrayList<String>(); // Command line options for this tool Options options = new Options(); options.addOption("f", true, "Filename"); options.addOption("r", true, "RegExp"); options.addOption("p", true, "Position"); CommandLineParser parser = new BasicParser(); try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("f")) { fileName = cmd.getOptionValue("f"); } if (cmd.hasOption("f")) { regExp = cmd.getOptionValue("r"); } if (cmd.hasOption("p")) { position = cmd.getOptionValue("p"); } if (fileName == null || regExp == null || position == null) { System.out.println("Command line parameters: -f fileName -r regExp -p position"); System.exit(-1); } } catch (ParseException ex) { logger.error(ex.getMessage()); } String content = readFile(fileName, Charset.defaultCharset()); if (content != null) { Matcher m = Pattern.compile(regExp).matcher(content); while (m.find()) { String group = m.group(); int pos = group.indexOf(".zip"); if (pos > 0) { group = group.substring(0, pos); } logger.debug("RegExp: " + m.group() + " found returning " + group); allMatches.add(group); } if (allMatches.size() > 0) { if (position.equals("first")) { value = allMatches.get(0); } if (position.equals("last")) { value = allMatches.get(allMatches.size() - 1); } } } System.out.println(value); }
From source file:com.cloudera.oryx.app.traffic.TrafficUtil.java
public static void main(String[] args) throws Exception { if (args.length < 3) { System.err.println("usage: TrafficUtil [hosts] [requestIntervalMS] [threads] [... other args]"); return;/* www .j a va 2 s . co m*/ } String[] hostStrings = COMMA.split(args[0]); Preconditions.checkArgument(hostStrings.length >= 1); int requestIntervalMS = Integer.parseInt(args[1]); Preconditions.checkArgument(requestIntervalMS >= 0); int numThreads = Integer.parseInt(args[2]); Preconditions.checkArgument(numThreads >= 1); String[] otherArgs = new String[args.length - 3]; System.arraycopy(args, 3, otherArgs, 0, otherArgs.length); List<URI> hosts = Arrays.stream(hostStrings).map(URI::create).collect(Collectors.toList()); int perClientRequestIntervalMS = numThreads * requestIntervalMS; Endpoints alsEndpoints = new Endpoints(ALSEndpoint.buildALSEndpoints()); AtomicLong requestCount = new AtomicLong(); AtomicLong serverErrorCount = new AtomicLong(); AtomicLong clientErrorCount = new AtomicLong(); AtomicLong exceptionCount = new AtomicLong(); long start = System.currentTimeMillis(); ExecUtils.doInParallel(numThreads, numThreads, true, i -> { RandomGenerator random = RandomManager.getRandom(Integer.toString(i).hashCode() ^ System.nanoTime()); ExponentialDistribution msBetweenRequests; if (perClientRequestIntervalMS > 0) { msBetweenRequests = new ExponentialDistribution(random, perClientRequestIntervalMS); } else { msBetweenRequests = null; } ClientConfig clientConfig = new ClientConfig(); PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(); connectionManager.setMaxTotal(numThreads); connectionManager.setDefaultMaxPerRoute(numThreads); clientConfig.property(ApacheClientProperties.CONNECTION_MANAGER, connectionManager); clientConfig.connectorProvider(new ApacheConnectorProvider()); Client client = ClientBuilder.newClient(clientConfig); try { while (true) { try { WebTarget target = client.target("http://" + hosts.get(random.nextInt(hosts.size()))); Endpoint endpoint = alsEndpoints.chooseEndpoint(random); Invocation invocation = endpoint.makeInvocation(target, otherArgs, random); long startTime = System.currentTimeMillis(); Response response = invocation.invoke(); try { response.readEntity(String.class); } finally { response.close(); } long elapsedMS = System.currentTimeMillis() - startTime; int statusCode = response.getStatusInfo().getStatusCode(); if (statusCode >= 400) { if (statusCode >= 500) { serverErrorCount.incrementAndGet(); } else { clientErrorCount.incrementAndGet(); } } endpoint.recordTiming(elapsedMS); if (requestCount.incrementAndGet() % 10000 == 0) { long elapsed = System.currentTimeMillis() - start; log.info("{}ms:\t{} requests\t({} client errors\t{} server errors\t{} exceptions)", elapsed, requestCount.get(), clientErrorCount.get(), serverErrorCount.get(), exceptionCount.get()); for (Endpoint e : alsEndpoints.getEndpoints()) { log.info("{}", e); } } if (msBetweenRequests != null) { int desiredElapsedMS = (int) Math.round(msBetweenRequests.sample()); if (elapsedMS < desiredElapsedMS) { Thread.sleep(desiredElapsedMS - elapsedMS); } } } catch (Exception e) { exceptionCount.incrementAndGet(); log.warn("{}", e.getMessage()); } } } finally { client.close(); } }); }
From source file:DcmQR.java
@SuppressWarnings("unchecked") public static void main(String[] args) { CommandLine cl = parse(args);//from w ww . ja v a 2s . c o m DcmQR dcmqr = new DcmQR(); final List<String> argList = cl.getArgList(); String remoteAE = argList.get(0); String[] calledAETAddress = split(remoteAE, '@'); dcmqr.setCalledAET(calledAETAddress[0], cl.hasOption("reuseassoc")); if (calledAETAddress[1] == null) { dcmqr.setRemoteHost("127.0.0.1"); dcmqr.setRemotePort(104); } else { String[] hostPort = split(calledAETAddress[1], ':'); dcmqr.setRemoteHost(hostPort[0]); dcmqr.setRemotePort(toPort(hostPort[1])); } if (cl.hasOption("L")) { String localAE = cl.getOptionValue("L"); String[] localPort = split(localAE, ':'); if (localPort[1] != null) { dcmqr.setLocalPort(toPort(localPort[1])); } String[] callingAETHost = split(localPort[0], '@'); dcmqr.setCalling(callingAETHost[0]); if (callingAETHost[1] != null) { dcmqr.setLocalHost(callingAETHost[1]); } } if (cl.hasOption("username")) { String username = cl.getOptionValue("username"); UserIdentity userId; if (cl.hasOption("passcode")) { String passcode = cl.getOptionValue("passcode"); userId = new UserIdentity.UsernamePasscode(username, passcode.toCharArray()); } else { userId = new UserIdentity.Username(username); } userId.setPositiveResponseRequested(cl.hasOption("uidnegrsp")); dcmqr.setUserIdentity(userId); } if (cl.hasOption("connectTO")) dcmqr.setConnectTimeout(parseInt(cl.getOptionValue("connectTO"), "illegal argument of option -connectTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("reaper")) dcmqr.setAssociationReaperPeriod(parseInt(cl.getOptionValue("reaper"), "illegal argument of option -reaper", 1, Integer.MAX_VALUE)); if (cl.hasOption("cfindrspTO")) dcmqr.setDimseRspTimeout(parseInt(cl.getOptionValue("cfindrspTO"), "illegal argument of option -cfindrspTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("cmoverspTO")) dcmqr.setRetrieveRspTimeout(parseInt(cl.getOptionValue("cmoverspTO"), "illegal argument of option -cmoverspTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("cgetrspTO")) dcmqr.setRetrieveRspTimeout(parseInt(cl.getOptionValue("cgetrspTO"), "illegal argument of option -cgetrspTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("acceptTO")) dcmqr.setAcceptTimeout(parseInt(cl.getOptionValue("acceptTO"), "illegal argument of option -acceptTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("releaseTO")) dcmqr.setReleaseTimeout(parseInt(cl.getOptionValue("releaseTO"), "illegal argument of option -releaseTO", 1, Integer.MAX_VALUE)); if (cl.hasOption("soclosedelay")) dcmqr.setSocketCloseDelay(parseInt(cl.getOptionValue("soclosedelay"), "illegal argument of option -soclosedelay", 1, 10000)); if (cl.hasOption("rcvpdulen")) dcmqr.setMaxPDULengthReceive( parseInt(cl.getOptionValue("rcvpdulen"), "illegal argument of option -rcvpdulen", 1, 10000) * KB); if (cl.hasOption("sndpdulen")) dcmqr.setMaxPDULengthSend( parseInt(cl.getOptionValue("sndpdulen"), "illegal argument of option -sndpdulen", 1, 10000) * KB); if (cl.hasOption("sosndbuf")) dcmqr.setSendBufferSize( parseInt(cl.getOptionValue("sosndbuf"), "illegal argument of option -sosndbuf", 1, 10000) * KB); if (cl.hasOption("sorcvbuf")) dcmqr.setReceiveBufferSize( parseInt(cl.getOptionValue("sorcvbuf"), "illegal argument of option -sorcvbuf", 1, 10000) * KB); if (cl.hasOption("filebuf")) dcmqr.setFileBufferSize( parseInt(cl.getOptionValue("filebuf"), "illegal argument of option -filebuf", 1, 10000) * KB); dcmqr.setPackPDV(!cl.hasOption("pdv1")); dcmqr.setTcpNoDelay(!cl.hasOption("tcpdelay")); dcmqr.setMaxOpsInvoked(cl.hasOption("async") ? parseInt(cl.getOptionValue("async"), "illegal argument of option -async", 0, 0xffff) : 1); dcmqr.setMaxOpsPerformed(cl.hasOption("cstoreasync") ? parseInt(cl.getOptionValue("cstoreasync"), "illegal argument of option -cstoreasync", 0, 0xffff) : 0); if (cl.hasOption("C")) dcmqr.setCancelAfter( parseInt(cl.getOptionValue("C"), "illegal argument of option -C", 1, Integer.MAX_VALUE)); if (cl.hasOption("lowprior")) dcmqr.setPriority(CommandUtils.LOW); if (cl.hasOption("highprior")) dcmqr.setPriority(CommandUtils.HIGH); if (cl.hasOption("cstore")) { String[] storeTCs = cl.getOptionValues("cstore"); for (String storeTC : storeTCs) { String cuid; String[] tsuids; int colon = storeTC.indexOf(':'); if (colon == -1) { cuid = storeTC; tsuids = DEF_TS; } else { cuid = storeTC.substring(0, colon); String ts = storeTC.substring(colon + 1); try { tsuids = TS.valueOf(ts).uids; } catch (IllegalArgumentException e) { tsuids = ts.split(","); } } try { cuid = CUID.valueOf(cuid).uid; } catch (IllegalArgumentException e) { // assume cuid already contains UID } dcmqr.addStoreTransferCapability(cuid, tsuids); } if (cl.hasOption("cstoredest")) dcmqr.setStoreDestination(cl.getOptionValue("cstoredest")); } dcmqr.setCGet(cl.hasOption("cget")); if (cl.hasOption("cmove")) dcmqr.setMoveDest(cl.getOptionValue("cmove")); if (cl.hasOption("evalRetrieveAET")) dcmqr.setEvalRetrieveAET(true); if (cl.hasOption("P")) dcmqr.setQueryLevel(QueryRetrieveLevel.PATIENT); else if (cl.hasOption("S")) dcmqr.setQueryLevel(QueryRetrieveLevel.SERIES); else if (cl.hasOption("I")) dcmqr.setQueryLevel(QueryRetrieveLevel.IMAGE); else dcmqr.setQueryLevel(QueryRetrieveLevel.STUDY); if (cl.hasOption("noextneg")) dcmqr.setNoExtNegotiation(true); if (cl.hasOption("rel")) dcmqr.setRelationQR(true); if (cl.hasOption("datetime")) dcmqr.setDateTimeMatching(true); if (cl.hasOption("fuzzy")) dcmqr.setFuzzySemanticPersonNameMatching(true); if (!cl.hasOption("P")) { if (cl.hasOption("retall")) dcmqr.addPrivate(UID.PrivateStudyRootQueryRetrieveInformationModelFIND); if (cl.hasOption("blocked")) dcmqr.addPrivate(UID.PrivateBlockedStudyRootQueryRetrieveInformationModelFIND); if (cl.hasOption("vmf")) dcmqr.addPrivate(UID.PrivateVirtualMultiframeStudyRootQueryRetrieveInformationModelFIND); } if (cl.hasOption("q")) { String[] matchingKeys = cl.getOptionValues("q"); for (int i = 1; i < matchingKeys.length; i++, i++) dcmqr.addMatchingKey(Tag.toTagPath(matchingKeys[i - 1]), matchingKeys[i]); } if (cl.hasOption("r")) { String[] returnKeys = cl.getOptionValues("r"); for (int i = 0; i < returnKeys.length; i++) dcmqr.addReturnKey(Tag.toTagPath(returnKeys[i])); } dcmqr.configureTransferCapability(cl.hasOption("ivrle")); int repeat = cl.hasOption("repeat") ? parseInt(cl.getOptionValue("repeat"), "illegal argument of option -repeat", 1, Integer.MAX_VALUE) : 0; int interval = cl.hasOption("repeatdelay") ? parseInt(cl.getOptionValue("repeatdelay"), "illegal argument of option -repeatdelay", 1, Integer.MAX_VALUE) : 0; boolean closeAssoc = cl.hasOption("closeassoc"); if (cl.hasOption("tls")) { String cipher = cl.getOptionValue("tls"); if ("NULL".equalsIgnoreCase(cipher)) { dcmqr.setTlsWithoutEncyrption(); } else if ("3DES".equalsIgnoreCase(cipher)) { dcmqr.setTls3DES_EDE_CBC(); } else if ("AES".equalsIgnoreCase(cipher)) { dcmqr.setTlsAES_128_CBC(); } else { exit("Invalid parameter for option -tls: " + cipher); } if (cl.hasOption("nossl2")) { dcmqr.disableSSLv2Hello(); } dcmqr.setTlsNeedClientAuth(!cl.hasOption("noclientauth")); if (cl.hasOption("keystore")) { dcmqr.setKeyStoreURL(cl.getOptionValue("keystore")); } if (cl.hasOption("keystorepw")) { dcmqr.setKeyStorePassword(cl.getOptionValue("keystorepw")); } if (cl.hasOption("keypw")) { dcmqr.setKeyPassword(cl.getOptionValue("keypw")); } if (cl.hasOption("truststore")) { dcmqr.setTrustStoreURL(cl.getOptionValue("truststore")); } if (cl.hasOption("truststorepw")) { dcmqr.setTrustStorePassword(cl.getOptionValue("truststorepw")); } long t1 = System.currentTimeMillis(); try { dcmqr.initTLS(); } catch (Exception e) { System.err.println("ERROR: Failed to initialize TLS context:" + e.getMessage()); System.exit(2); } long t2 = System.currentTimeMillis(); LOG.info("Initialize TLS context in {} s", Float.valueOf((t2 - t1) / 1000f)); } try { dcmqr.start(); } catch (Exception e) { System.err.println( "ERROR: Failed to start server for receiving " + "requested objects:" + e.getMessage()); System.exit(2); } try { long t1 = System.currentTimeMillis(); try { dcmqr.open(); } catch (Exception e) { LOG.error("Failed to establish association:", e); System.exit(2); } long t2 = System.currentTimeMillis(); LOG.info("Connected to {} in {} s", remoteAE, Float.valueOf((t2 - t1) / 1000f)); for (;;) { List<DicomObject> result = dcmqr.query(); long t3 = System.currentTimeMillis(); LOG.info("Received {} matching entries in {} s", Integer.valueOf(result.size()), Float.valueOf((t3 - t2) / 1000f)); if (dcmqr.isCMove() || dcmqr.isCGet()) { if (dcmqr.isCMove()) dcmqr.move(result); else dcmqr.get(result); long t4 = System.currentTimeMillis(); LOG.info("Retrieved {} objects (warning: {}, failed: {}) in {}s", new Object[] { Integer.valueOf(dcmqr.getTotalRetrieved()), Integer.valueOf(dcmqr.getWarning()), Integer.valueOf(dcmqr.getFailed()), Float.valueOf((t4 - t3) / 1000f) }); } if (repeat == 0 || closeAssoc) { try { dcmqr.close(); } catch (InterruptedException e) { LOG.error(e.getMessage(), e); } LOG.info("Released connection to {}", remoteAE); } if (repeat-- == 0) break; Thread.sleep(interval); long t4 = System.currentTimeMillis(); dcmqr.open(); t2 = System.currentTimeMillis(); LOG.info("Reconnect or reuse connection to {} in {} s", remoteAE, Float.valueOf((t2 - t4) / 1000f)); } } catch (IOException e) { LOG.error(e.getMessage(), e); } catch (InterruptedException e) { LOG.error(e.getMessage(), e); } catch (ConfigurationException e) { LOG.error(e.getMessage(), e); } finally { dcmqr.stop(); } }
From source file:edu.msu.cme.rdp.abundstats.cli.AbundMain.java
public static void main(String[] args) throws IOException { File inputFile;//from w w w . ja v a 2s . c om File resultDir = new File("."); RPlotter plotter = null; boolean isClusterFile = true; List<AbundStatsCalculator> statCalcs = new ArrayList(); double clustCutoffFrom = Double.MIN_VALUE, clustCutoffTo = Double.MAX_VALUE; String usage = "Main [options] <cluster file>"; try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("result-dir")) { resultDir = new File(line.getOptionValue("result-dir")); if (!resultDir.exists() && !resultDir.mkdirs()) { throw new Exception( "Result directory " + resultDir + " does not exist and could not be created"); } } if (line.hasOption("R-location")) { plotter = new RPlotter(); plotter.setCommandTemplate(rplotterTemplate); plotter.setRPath(line.getOptionValue("R-location")); plotter.setOutFileExt(".png"); if (!new File(plotter.getRPath()).canExecute()) { throw new Exception(plotter.getRPath() + " does not exist or is not exectuable"); } } if (line.hasOption("lower-cutoff")) { clustCutoffFrom = Double.valueOf(line.getOptionValue("lower-cutoff")); } if (line.hasOption("upper-cutoff")) { clustCutoffTo = Double.valueOf(line.getOptionValue("upper-cutoff")); } if (line.hasOption("jaccard")) { statCalcs.add(new Jaccard(true)); } if (line.hasOption("sorensen")) { statCalcs.add(new Sorensen(true)); } if (line.hasOption("otu-table")) { isClusterFile = false; } if (statCalcs.isEmpty()) { throw new Exception("Must specify at least one stat to compute (jaccard, sorensen)"); } args = line.getArgs(); if (args.length != 1) { throw new Exception("Unexpected number of command line arguments"); } inputFile = new File(args[0]); } catch (Exception e) { new HelpFormatter().printHelp(usage, options); System.err.println("Error: " + e.getMessage()); return; } if (isClusterFile) { RDPClustParser parser; parser = new RDPClustParser(inputFile); try { if (parser.getClusterSamples().size() == 1) { throw new IOException("Cluster file must have more than one sample"); } List<Cutoff> cutoffs = parser.getCutoffs(clustCutoffFrom, clustCutoffTo); if (cutoffs.isEmpty()) { throw new IOException( "No cutoffs in cluster file in range [" + clustCutoffFrom + "-" + clustCutoffTo + "]"); } for (Cutoff cutoff : cutoffs) { List<Sample> samples = new ArrayList(); for (ClusterSample clustSample : parser.getClusterSamples()) { Sample s = new Sample(clustSample.getName()); for (Cluster clust : cutoff.getClusters().get(clustSample.getName())) { s.addSpecies(clust.getNumberOfSeqs()); } samples.add(s); } processSamples(samples, statCalcs, resultDir, cutoff.getCutoff() + "_", plotter); } } finally { parser.close(); } } else { List<Sample> samples = new ArrayList(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line = reader.readLine(); if (line == null || line.split("\\s+").length < 2) { throw new IOException("Must be 2 or more samples for abundance statistic calculations!"); } int numSamples = line.split("\\s+").length; boolean header = true; try { Integer.valueOf(line.split("\\s+")[0]); header = false; } catch (Exception e) { } if (header) { for (String s : line.split("\\s+")) { samples.add(new Sample(s)); } } else { int sample = 0; for (String s : line.split("\\s+")) { samples.add(new Sample("" + sample)); samples.get(sample).addSpecies(Integer.valueOf(s)); sample++; } } int lineno = 2; while ((line = reader.readLine()) != null) { if (line.trim().equals("")) { continue; } int sample = 0; if (line.split("\\s+").length != numSamples) { System.err.println( "Line number " + lineno + " didn't have the expected number of samples (contained " + line.split("\\s+").length + ", expected " + numSamples + ")"); } for (String s : line.split("\\s+")) { samples.get(sample).addSpecies(Integer.valueOf(s)); sample++; } lineno++; } processSamples(samples, statCalcs, resultDir, inputFile.getName(), plotter); } }
From source file:csv.parser.CSVParser.java
@SuppressWarnings("resource") public static void main(String[] args) throws Exception { String file_to_parse;//from ww w . j a v a2s .c o m String[] val_array; file_to_parse = "./input/E-library-data-3.csv"; //Build reader instance //Read CSV file CSVReader reader = new CSVReader(new FileReader(file_to_parse), ';', '"', 1); //Read all rows at once List<String[]> allRows = reader.readAll(); // Read CSV line by line and use the string array as you want for (String[] row : allRows) { for (int i = 0; i < row.length; i++) { //Removing all newlines, tabs and '&' characters(invalid XML character) row[i] = row[i].replaceAll("(\\r|\\n|\\r\\n)+", " "); row[i] = row[i].replaceAll(System.getProperty("line.separator"), "; "); row[i] = row[i].replaceAll("&", "and"); } System.out.println(Arrays.toString(row)); } //Get the input fields List<String[]> map = getMap(); String[] field; //Numbering for folders, folderNum is incremented for each new file long folderNum; folderNum = 0; for (String[] row : allRows) { //Creating new folder File file1 = new File("./output//newdir//folder" + folderNum + ""); file1.mkdirs(); //Creating content file PrintWriter writer_content = new PrintWriter("./output//newdir//folder" + folderNum + "//contents", "UTF-16"); //Creating metadata_lrmi.xml PrintWriter writer_lrmi = new PrintWriter( "./output//newdir//folder" + folderNum + "//metadata_lrmi.xml", "UTF-16"); //Creating content.xml PrintWriter writer = new PrintWriter("./output//newdir//folder" + folderNum + "//content.xml", "UTF-16"); writer.println("<?xml version=\"1.0\" encoding=\"utf-16\" standalone=\"no\"?>"); writer.println("<dublin_core schema=\"dc\">"); writer_lrmi.println("<?xml version=\"1.0\" encoding=\"utf-16\" standalone=\"no\"?>"); writer_lrmi.println("<dublin_core shema=\"lrmi\">"); for (int i = 0; i < row.length; i++) { //After snooping data, we have to change these setting for each new csv file, as the data fileds are many times mismatched //These if-else statements take care of mismatched steps. if (i == 43) { continue; } else if (i == 43) { field = map.get(42); } else if (i == 44) { field = map.get(43); } else if (i == 45 || i == 46) { continue; } else { field = map.get(i); } //Separate multiple values val_array = parseVal(row[i]); // if (val_array.length == 0) { // continue; // } PrintWriter useWriter = writer; if (field[0].equals("lrmi")) { useWriter = writer_lrmi; } switch (field.length) { case 2: writeXML(useWriter, field[1], "", val_array); break; case 3: writeXML(useWriter, field[1], field[2], val_array); break; default: } } folderNum++; writer.println("</dublin_core>"); writer_lrmi.println("</dublin_core>"); writer.close(); writer_lrmi.close(); writer_content.close(); } }