List of usage examples for java.lang Double parseDouble
public static double parseDouble(String s) throws NumberFormatException
From source file:imitationNLG.SFX.java
public static void main(String[] args) { boolean useDAggerArg = false; boolean useLolsWord = true; JDAggerForSFX.earlyStopMaxFurtherSteps = Integer.parseInt(args[0]); JDAggerForSFX.p = Double.parseDouble(args[1]); SFX sfx = new SFX(); sfx.runTestWithJAROW(useDAggerArg, useLolsWord); }
From source file:benchmarkio.controlcenter.LaunchRocket.java
public static void main(final String[] args) throws Exception { // create the parser final CommandLineParser parser = new BasicParser(); // parse the command line arguments final CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("u")) { displayHelp();//from w w w . j av a 2 s .c o m } final String host = cmd.getOptionValue("host"); final int port = Integer.parseInt(cmd.getOptionValue("port")); final BrokerType brokerType = BrokerType.valueOf(cmd.getOptionValue("broker-type")); final int numConsumers = Integer.parseInt(cmd.getOptionValue("num-consumers")); final int numProducers = Integer.parseInt(cmd.getOptionValue("num-producers")); final int totalNumberOfMessages = Integer.parseInt(cmd.getOptionValue("total-number-of-messages")); final double msgSizeInKB = Double.parseDouble(cmd.getOptionValue("msg-size-in-kb")); // Optional options final Optional<String> optionalBenchmarkType = Optional.fromNullable(cmd.getOptionValue("benchmark-type")); final Optional<String> optionalDurable = Optional.fromNullable(cmd.getOptionValue("durable")); // Kafka Specific final Optional<String> optionalZookeeper = Optional.fromNullable(cmd.getOptionValue("zookeeper")); Optional<String> optionalKafkaProducerType = Optional .fromNullable(cmd.getOptionValue("kafka-producer-type")); BenchmarkType benchmarkType; if (optionalBenchmarkType.isPresent()) { benchmarkType = BenchmarkType.valueOf(optionalBenchmarkType.get()); } else { log.info("Benchmark type was not specified, defaulting to: {}", BenchmarkType.PRODUCER_AND_CONSUMER); benchmarkType = BenchmarkType.PRODUCER_AND_CONSUMER; } boolean durable = false; if (optionalDurable.isPresent()) { durable = Boolean.valueOf(optionalDurable.get()); } else { log.info("Durable parameter was not specified, defaulting to: FALSE"); } if (brokerType == BrokerType.KAFKA) { if (!optionalZookeeper.isPresent()) { log.error("zookeeper is missing, it is a required property for KAFKA broker"); System.exit(0); } if (!optionalKafkaProducerType.isPresent()) { log.info("kafka-producer-type is not specified, defaulting to sync"); optionalKafkaProducerType = Optional.of("sync"); } else if (!optionalKafkaProducerType.get().equals("sync") && !optionalKafkaProducerType.get().equals("async")) { log.warn("kafka-producer-type is not one of the accepted sync | async values, defaulting to sync"); optionalKafkaProducerType = Optional.of("sync"); } } log.info("destination (topic or queue): {}", Consts.DESTINATION_NAME); log.info("host: {}", host); log.info("port: {}", port); log.info("broker-type: {}", brokerType); log.info("benchmark-type: {}", benchmarkType); log.info("durable: {}", durable); log.info("num-consumers: {}", numConsumers); log.info("num-producers: {}", numProducers); log.info("total-number-of-messages: {}", totalNumberOfMessages); log.info("msg-size-in-kb: {}", msgSizeInKB); if (brokerType == BrokerType.KAFKA) { log.info("zookeeper: {}", optionalZookeeper.get()); log.info("kafka-producer-type: {}", optionalKafkaProducerType.get()); } LaunchRocket.start(brokerType, benchmarkType, durable, host, port, numConsumers, numProducers, totalNumberOfMessages, msgSizeInKB, optionalZookeeper, optionalKafkaProducerType); System.exit(0); }
From source file:baldrickv.s3streamingtool.S3StreamingTool.java
public static void main(String args[]) throws Exception { BasicParser p = new BasicParser(); Options o = getOptions();/*from w w w .j av a 2 s .com*/ CommandLine cl = p.parse(o, args); if (cl.hasOption('h')) { HelpFormatter hf = new HelpFormatter(); hf.setWidth(80); StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("Upload:\n"); sb.append(" -u -r creds -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Download:\n"); sb.append(" -d -r creds -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Upload encrypted:\n"); sb.append(" -u -r creds -z -k secret_key -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Download encrypted:\n"); sb.append(" -d -r creds -z -k secret_key -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Cleanup in-progress multipart uploads\n"); sb.append(" -c -r creds -b my_bucket\n"); System.out.println(sb.toString()); hf.printHelp("See above", o); return; } int n = 0; if (cl.hasOption('d')) n++; if (cl.hasOption('u')) n++; if (cl.hasOption('c')) n++; if (cl.hasOption('m')) n++; if (n != 1) { System.err.println("Must specify at exactly one of -d, -u, -c or -m"); System.exit(-1); } if (cl.hasOption('m')) { //InputStream in = new java.io.BufferedInputStream(System.in,1024*1024*2); InputStream in = System.in; System.out.println(TreeHashGenerator.calculateTreeHash(in)); return; } require(cl, 'b'); if (cl.hasOption('d') || cl.hasOption('u')) { require(cl, 'f'); } if (cl.hasOption('z')) { require(cl, 'k'); } AWSCredentials creds = null; if (cl.hasOption('r')) { creds = Utils.loadAWSCredentails(cl.getOptionValue('r')); } else { if (cl.hasOption('i') && cl.hasOption('e')) { creds = new BasicAWSCredentials(cl.getOptionValue('i'), cl.getOptionValue('e')); } else { System.out.println("Must specify either credential file (-r) or AWS key ID and secret (-i and -e)"); System.exit(-1); } } S3StreamConfig config = new S3StreamConfig(); config.setEncryption(false); if (cl.hasOption('z')) { config.setEncryption(true); config.setSecretKey(Utils.loadSecretKey(cl.getOptionValue("k"))); } if (cl.hasOption("encryption-mode")) { config.setEncryptionMode(cl.getOptionValue("encryption-mode")); } config.setS3Bucket(cl.getOptionValue("bucket")); if (cl.hasOption("file")) { config.setS3File(cl.getOptionValue("file")); } if (cl.hasOption("threads")) { config.setIOThreads(Integer.parseInt(cl.getOptionValue("threads"))); } if (cl.hasOption("blocksize")) { String s = cl.getOptionValue("blocksize"); s = s.toUpperCase(); int multi = 1; int end = 0; while ((end < s.length()) && (s.charAt(end) >= '0') && (s.charAt(end) <= '9')) { end++; } int size = Integer.parseInt(s.substring(0, end)); if (end < s.length()) { String m = s.substring(end); if (m.equals("K")) multi = 1024; else if (m.equals("M")) multi = 1048576; else if (m.equals("G")) multi = 1024 * 1024 * 1024; else if (m.equals("KB")) multi = 1024; else if (m.equals("MB")) multi = 1048576; else if (m.equals("GB")) multi = 1024 * 1024 * 1024; else { System.out.println("Unknown suffix on block size. Only K,M and G understood."); System.exit(-1); } } size *= multi; config.setBlockSize(size); } Logger.getLogger("").setLevel(Level.FINE); S3StreamingDownload.log.setLevel(Level.FINE); S3StreamingUpload.log.setLevel(Level.FINE); config.setS3Client(new AmazonS3Client(creds)); config.setGlacierClient(new AmazonGlacierClient(creds)); config.getGlacierClient().setEndpoint("glacier.us-west-2.amazonaws.com"); if (cl.hasOption("glacier")) { config.setGlacier(true); config.setStorageInterface(new StorageGlacier(config.getGlacierClient())); } else { config.setStorageInterface(new StorageS3(config.getS3Client())); } if (cl.hasOption("bwlimit")) { config.setMaxBytesPerSecond(Double.parseDouble(cl.getOptionValue("bwlimit"))); } if (cl.hasOption('c')) { if (config.getGlacier()) { GlacierCleanupMultipart.cleanup(config); } else { S3CleanupMultipart.cleanup(config); } return; } if (cl.hasOption('d')) { config.setOutputStream(System.out); S3StreamingDownload.download(config); return; } if (cl.hasOption('u')) { config.setInputStream(System.in); S3StreamingUpload.upload(config); return; } }
From source file:com.basistech.ninja.Train.java
/** * Command line interface to train a model. * * <pre>//from w w w.j a v a2s. com * usage: Train [options] * --batch-size <arg> batch size (default = 10) * --epochs <arg> epochs (default = 5) * --examples <arg> input examples file (required) * --layer-sizes <arg> layer sizes, including input/output, e.g. 3 4 2 (required) * --learning-rate <arg> learning-rate (default = 0.7) * --model <arg> output model file (required) * </pre> * * @param args command line arguments * @throws IOException */ public static void main(String[] args) throws IOException { String defaultBatchSize = "10"; String deafaultEpochs = "5"; String defaultLearningRate = "0.7"; Options options = new Options(); Option option; option = new Option(null, "examples", true, "input examples file (required)"); option.setRequired(true); options.addOption(option); option = new Option(null, "model", true, "output model file (required)"); option.setRequired(true); options.addOption(option); option = new Option(null, "layer-sizes", true, "layer sizes, including input/output, e.g. 3 4 2 (required)"); option.setRequired(true); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); option = new Option(null, "batch-size", true, String.format("batch size (default = %s)", defaultBatchSize)); options.addOption(option); option = new Option(null, "epochs", true, String.format("epochs (default = %s)", deafaultEpochs)); options.addOption(option); option = new Option(null, "learning-rate", true, String.format("learning-rate (default = %s)", defaultLearningRate)); options.addOption(option); CommandLineParser parser = new GnuParser(); CommandLine cmdline = null; try { cmdline = parser.parse(options, args); } catch (org.apache.commons.cli.ParseException e) { System.err.println(e.getMessage()); usage(options); System.exit(1); } String[] remaining = cmdline.getArgs(); if (remaining == null) { usage(options); System.exit(1); } List<Integer> layerSizes = Lists.newArrayList(); for (String s : cmdline.getOptionValues("layer-sizes")) { layerSizes.add(Integer.parseInt(s)); } File examplesFile = new File(cmdline.getOptionValue("examples")); Train that = new Train(layerSizes, examplesFile); int batchSize = Integer.parseInt(cmdline.getOptionValue("batch-size", defaultBatchSize)); int epochs = Integer.parseInt(cmdline.getOptionValue("epochs", deafaultEpochs)); double learningRate = Double.parseDouble(cmdline.getOptionValue("learning-rate", defaultLearningRate)); File modelFile = new File(cmdline.getOptionValue("model")); that.train(batchSize, epochs, learningRate, modelFile); }
From source file:de.codesourcery.geoip.Main.java
public static void main(String[] args) throws Exception { final IGeoLocator<StringSubject> locator = createGeoLocator(); final JFrame frame = new JFrame("GeoIP"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.addWindowListener(new WindowAdapter() { public void windowClosing(java.awt.event.WindowEvent e) { try { locator.dispose();//from w w w . ja v a2s. c om } catch (Exception e1) { e1.printStackTrace(); } }; }); final MapImage image = MapImage.getRobinsonWorldMap(); // MapImage.getMillerWorldMap(); final MapCanvas canvas = new MapCanvas(image); for (GeoLocation<StringSubject> loc : locator.locate(getSpammers())) { if (loc.hasValidCoordinates()) { canvas.addCoordinate(PointRenderer.createPoint(loc, Color.YELLOW)); } } // canvas.addCoordinate( PointRenderer.createPoint( ZERO , Color.YELLOW ) ); // canvas.addCoordinate( PointRenderer.createPoint( WELLINGTON , Color.RED ) ); // canvas.addCoordinate( PointRenderer.createPoint( MELBOURNE , Color.RED ) ); // canvas.addCoordinate( PointRenderer.createPoint( HAMBURG , Color.RED ) ); final double heightToWidth = image.height() / (double) image.width(); // preserve aspect ratio of map canvas.setPreferredSize(new Dimension(640, (int) Math.round(640 * heightToWidth))); JPanel panel = new JPanel(); panel.setLayout(new FlowLayout()); panel.add(new JLabel("Scale-X")); final JTextField scaleX = new JTextField(Double.toString(image.getScaleX())); scaleX.setColumns(5); final JTextField scaleY = new JTextField(Double.toString(image.getScaleY())); scaleY.setColumns(5); final ActionListener listener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { double x = Double.parseDouble(scaleX.getText()); double y = Double.parseDouble(scaleY.getText()); image.setScale(x, y); canvas.repaint(); } }; scaleX.addActionListener(listener); scaleY.addActionListener(listener); panel.add(new JLabel("Scale-X")); panel.add(scaleX); panel.add(new JLabel("Scale-Y")); panel.add(scaleY); final JTextField ipAddress = new JTextField("www.kickstarter.com"); ipAddress.setColumns(20); final ActionListener ipListener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final String destinationIP = ipAddress.getText(); if (StringUtils.isBlank(destinationIP)) { return; } /* * Perform traceroute. */ final List<String> hops; try { if (TracePath.isPathTracingAvailable()) { hops = TracePath.trace(destinationIP); } else { System.err.println("tracepath not available."); if (TracePath.isValidAddress(destinationIP)) { hops = new ArrayList<>(); hops.add(destinationIP); } else { System.err.println(destinationIP + " is no valid IP"); return; } } } catch (Exception ex) { System.err.println("Failed to trace " + destinationIP); ex.printStackTrace(); return; } System.out.println("Trace contains " + hops.size() + " IPs"); /* * Gather locations. */ final List<StringSubject> subjects = new ArrayList<>(); for (String ip : hops) { subjects.add(new StringSubject(ip)); } final List<GeoLocation<StringSubject>> locations; try { long time = -System.currentTimeMillis(); locations = locator.locate(subjects); time += System.currentTimeMillis(); System.out.println("Locating hops for " + destinationIP + " returned " + locations.size() + " valid locations ( time: " + time + " ms)"); System.out.flush(); } catch (Exception e2) { e2.printStackTrace(); return; } /* * Weed-out invalid/unknown locations. */ { GeoLocation<StringSubject> previous = null; for (Iterator<GeoLocation<StringSubject>> it = locations.iterator(); it.hasNext();) { final GeoLocation<StringSubject> location = it.next(); if (!location.hasValidCoordinates() || (previous != null && previous.coordinate().equals(location.coordinate()))) { it.remove(); System.err.println("Ignoring invalid/duplicate location for " + location); } else { previous = location; } } } /* * Populate chart. */ System.out.println("Adding " + locations.size() + " hops to chart"); System.out.flush(); canvas.removeAllCoordinates(); if (locations.size() == 1) { canvas.addCoordinate( PointRenderer.createPoint(locations.get(0), getLabel(locations.get(0)), Color.BLACK)); } else if (locations.size() > 1) { GeoLocation<StringSubject> previous = locations.get(0); MapPoint previousPoint = PointRenderer.createPoint(previous, getLabel(previous), Color.BLACK); final int len = locations.size(); for (int i = 1; i < len; i++) { final GeoLocation<StringSubject> current = locations.get(i); // final MapPoint currentPoint = PointRenderer.createPoint( current , getLabel( current ) , Color.BLACK ); final MapPoint currentPoint = PointRenderer.createPoint(current, Color.BLACK); // canvas.addCoordinate( LineRenderer.createLine( previousPoint , currentPoint , Color.RED ) ); canvas.addCoordinate(CurvedLineRenderer.createLine(previousPoint, currentPoint, Color.RED)); previous = locations.get(i); previousPoint = currentPoint; } } System.out.println("Finished adding"); System.out.flush(); canvas.repaint(); } }; ipAddress.addActionListener(ipListener); panel.add(new JLabel("IP")); panel.add(ipAddress); frame.getContentPane().setLayout(new BorderLayout()); frame.getContentPane().add(panel, BorderLayout.NORTH); frame.getContentPane().add(canvas, BorderLayout.CENTER); frame.pack(); frame.setVisible(true); }
From source file:eqtlmappingpipeline.util.ModuleEqtWestraReplication.java
/** * @param args the command line arguments *///www .j av a2 s .c o m public static void main(String[] args) throws IOException, LdCalculatorException { System.out.println(HEADER); System.out.println(); System.out.flush(); //flush to make sure header is before errors try { Thread.sleep(25); //Allows flush to complete } catch (InterruptedException ex) { } CommandLineParser parser = new PosixParser(); final CommandLine commandLine; try { commandLine = parser.parse(OPTIONS, args, true); } catch (ParseException ex) { System.err.println("Invalid command line arguments: " + ex.getMessage()); System.err.println(); new HelpFormatter().printHelp(" ", OPTIONS); System.exit(1); return; } final String[] genotypesBasePaths = commandLine.getOptionValues("g"); final RandomAccessGenotypeDataReaderFormats genotypeDataType; final String replicationQtlFilePath = commandLine.getOptionValue("e"); final String interactionQtlFilePath = commandLine.getOptionValue("i"); final String outputFilePath = commandLine.getOptionValue("o"); final double ldCutoff = Double.parseDouble(commandLine.getOptionValue("ld")); final int window = Integer.parseInt(commandLine.getOptionValue("w")); System.out.println("Genotype: " + Arrays.toString(genotypesBasePaths)); System.out.println("Interaction file: " + interactionQtlFilePath); System.out.println("Replication file: " + replicationQtlFilePath); System.out.println("Output: " + outputFilePath); System.out.println("LD: " + ldCutoff); System.out.println("Window: " + window); try { if (commandLine.hasOption("G")) { genotypeDataType = RandomAccessGenotypeDataReaderFormats .valueOf(commandLine.getOptionValue("G").toUpperCase()); } else { if (genotypesBasePaths[0].endsWith(".vcf")) { System.err.println( "Only vcf.gz is supported. Please see manual on how to do create a vcf.gz file."); System.exit(1); return; } try { genotypeDataType = RandomAccessGenotypeDataReaderFormats .matchFormatToPath(genotypesBasePaths[0]); } catch (GenotypeDataException e) { System.err .println("Unable to determine input 1 type based on specified path. Please specify -G"); System.exit(1); return; } } } catch (IllegalArgumentException e) { System.err.println("Error parsing --genotypesFormat \"" + commandLine.getOptionValue("G") + "\" is not a valid input data format"); System.exit(1); return; } final RandomAccessGenotypeData genotypeData; try { genotypeData = genotypeDataType.createFilteredGenotypeData(genotypesBasePaths, 100, null, null, null, 0.8); } catch (TabixFileNotFoundException e) { LOGGER.fatal("Tabix file not found for input data at: " + e.getPath() + "\n" + "Please see README on how to create a tabix file"); System.exit(1); return; } catch (IOException e) { LOGGER.fatal("Error reading input data: " + e.getMessage(), e); System.exit(1); return; } catch (IncompatibleMultiPartGenotypeDataException e) { LOGGER.fatal("Error combining the impute genotype data files: " + e.getMessage(), e); System.exit(1); return; } catch (GenotypeDataException e) { LOGGER.fatal("Error reading input data: " + e.getMessage(), e); System.exit(1); return; } ChrPosTreeMap<ArrayList<ReplicationQtl>> replicationQtls = new ChrPosTreeMap<>(); CSVReader replicationQtlReader = new CSVReader(new FileReader(replicationQtlFilePath), '\t'); String[] replicationHeader = replicationQtlReader.readNext(); String[] replicationLine; while ((replicationLine = replicationQtlReader.readNext()) != null) { try { GeneticVariant variant = genotypeData.getSnpVariantByPos(replicationLine[REPLICATION_SNP_CHR_COL], Integer.parseInt(replicationLine[REPLICATION_SNP_POS_COL])); if (variant == null) { continue; } Alleles variantAlleles = variant.getVariantAlleles(); String[] replicationAllelesString = StringUtils.split(replicationLine[REPLICATION_ALLELES_COL], '/'); Alleles replicationAlleles = Alleles.createBasedOnString(replicationAllelesString[0], replicationAllelesString[1]); Allele assessedAlleleReplication = Allele.create(replicationLine[REPLICATION_ALLELE_ASSESSED_COL]); boolean isAmbigous = replicationAlleles.isAtOrGcSnp(); if (!variantAlleles.equals(replicationAlleles)) { if (variantAlleles.equals(replicationAlleles.getComplement())) { assessedAlleleReplication = assessedAlleleReplication.getComplement(); } else { continue; } } ReplicationQtl replicationQtl = new ReplicationQtl(replicationLine[REPLICATION_SNP_CHR_COL], Integer.parseInt(replicationLine[REPLICATION_SNP_POS_COL]), replicationLine[REPLICATION_GENE_COL], Double.parseDouble(replicationLine[REPLICATION_BETA_COL]), assessedAlleleReplication.getAlleleAsString(), replicationLine, isAmbigous); ArrayList<ReplicationQtl> posReplicationQtls = replicationQtls.get(replicationQtl.getChr(), replicationQtl.getPos()); if (posReplicationQtls == null) { posReplicationQtls = new ArrayList<>(); replicationQtls.put(replicationQtl.getChr(), replicationQtl.getPos(), posReplicationQtls); } posReplicationQtls.add(replicationQtl); } catch (Exception e) { System.out.println(Arrays.toString(replicationLine)); throw e; } } int interactionSnpNotInGenotypeData = 0; int noReplicationQtlsInWindow = 0; int noReplicationQtlsInLd = 0; int multipleReplicationQtlsInLd = 0; int replicationTopSnpNotInGenotypeData = 0; final CSVWriter outputWriter = new CSVWriter(new FileWriter(new File(outputFilePath)), '\t', '\0'); final String[] outputLine = new String[15 + EXTRA_COL_FROM_REPLICATION.length]; int c = 0; outputLine[c++] = "Chr"; outputLine[c++] = "Pos"; outputLine[c++] = "SNP"; outputLine[c++] = "Gene"; outputLine[c++] = "Module"; outputLine[c++] = "DiscoveryZ"; outputLine[c++] = "ReplicationZ"; outputLine[c++] = "DiscoveryZCorrected"; outputLine[c++] = "ReplicationZCorrected"; outputLine[c++] = "DiscoveryAlleleAssessed"; outputLine[c++] = "ReplicationAlleleAssessed"; outputLine[c++] = "bestLd"; outputLine[c++] = "bestLd_dist"; outputLine[c++] = "nextLd"; outputLine[c++] = "replicationAmbigous"; for (int i = 0; i < EXTRA_COL_FROM_REPLICATION.length; ++i) { outputLine[c++] = replicationHeader[EXTRA_COL_FROM_REPLICATION[i]]; } outputWriter.writeNext(outputLine); HashSet<String> notFound = new HashSet<>(); CSVReader interactionQtlReader = new CSVReader(new FileReader(interactionQtlFilePath), '\t'); interactionQtlReader.readNext();//skip header String[] interactionQtlLine; while ((interactionQtlLine = interactionQtlReader.readNext()) != null) { String snp = interactionQtlLine[1]; String chr = interactionQtlLine[2]; int pos = Integer.parseInt(interactionQtlLine[3]); String gene = interactionQtlLine[4]; String alleleAssessed = interactionQtlLine[9]; String module = interactionQtlLine[12]; double discoveryZ = Double.parseDouble(interactionQtlLine[10]); GeneticVariant interactionQtlVariant = genotypeData.getSnpVariantByPos(chr, pos); if (interactionQtlVariant == null) { System.err.println("Interaction QTL SNP not found in genotype data: " + chr + ":" + pos); ++interactionSnpNotInGenotypeData; continue; } ReplicationQtl bestMatch = null; double bestMatchR2 = Double.NaN; Ld bestMatchLd = null; double nextBestR2 = Double.NaN; ArrayList<ReplicationQtl> sameSnpQtls = replicationQtls.get(chr, pos); if (sameSnpQtls != null) { for (ReplicationQtl sameSnpQtl : sameSnpQtls) { if (sameSnpQtl.getGene().equals(gene)) { bestMatch = sameSnpQtl; bestMatchR2 = 1; } } } NavigableMap<Integer, ArrayList<ReplicationQtl>> potentionalReplicationQtls = replicationQtls .getChrRange(chr, pos - window, true, pos + window, true); for (ArrayList<ReplicationQtl> potentialReplicationQtls : potentionalReplicationQtls.values()) { for (ReplicationQtl potentialReplicationQtl : potentialReplicationQtls) { if (!potentialReplicationQtl.getGene().equals(gene)) { continue; } GeneticVariant potentialReplicationQtlVariant = genotypeData .getSnpVariantByPos(potentialReplicationQtl.getChr(), potentialReplicationQtl.getPos()); if (potentialReplicationQtlVariant == null) { notFound.add(potentialReplicationQtl.getChr() + ":" + potentialReplicationQtl.getPos()); ++replicationTopSnpNotInGenotypeData; continue; } Ld ld = interactionQtlVariant.calculateLd(potentialReplicationQtlVariant); double r2 = ld.getR2(); if (r2 > 1) { r2 = 1; } if (bestMatch == null) { bestMatch = potentialReplicationQtl; bestMatchR2 = r2; bestMatchLd = ld; } else if (r2 > bestMatchR2) { bestMatch = potentialReplicationQtl; nextBestR2 = bestMatchR2; bestMatchR2 = r2; bestMatchLd = ld; } } } double replicationZ = Double.NaN; double replicationZCorrected = Double.NaN; double discoveryZCorrected = Double.NaN; String replicationAlleleAssessed = null; if (bestMatch != null) { replicationZ = bestMatch.getBeta(); replicationAlleleAssessed = bestMatch.getAssessedAllele(); if (pos != bestMatch.getPos()) { String commonHap = null; double commonHapFreq = -1; for (Map.Entry<String, Double> hapFreq : bestMatchLd.getHaplotypesFreq().entrySet()) { double f = hapFreq.getValue(); if (f > commonHapFreq) { commonHapFreq = f; commonHap = hapFreq.getKey(); } } String[] commonHapAlleles = StringUtils.split(commonHap, '/'); discoveryZCorrected = commonHapAlleles[0].equals(alleleAssessed) ? discoveryZ : discoveryZ * -1; replicationZCorrected = commonHapAlleles[1].equals(replicationAlleleAssessed) ? replicationZ : replicationZ * -1; } else { discoveryZCorrected = discoveryZ; replicationZCorrected = alleleAssessed.equals(replicationAlleleAssessed) ? replicationZ : replicationZ * -1; //replicationZCorrected = alleleAssessed.equals(replicationAlleleAssessed) || alleleAssessed.equals(String.valueOf(Utils.getComplementNucleotide(replicationAlleleAssessed.charAt(0)))) ? replicationZ : replicationZ * -1; } } c = 0; outputLine[c++] = chr; outputLine[c++] = String.valueOf(pos); outputLine[c++] = snp; outputLine[c++] = gene; outputLine[c++] = module; outputLine[c++] = String.valueOf(discoveryZ); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZ); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(discoveryZCorrected); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(replicationZCorrected); outputLine[c++] = alleleAssessed; outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(bestMatch.getAssessedAllele()); outputLine[c++] = String.valueOf(bestMatchR2); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(Math.abs(pos - bestMatch.getPos())); outputLine[c++] = String.valueOf(nextBestR2); outputLine[c++] = bestMatch == null ? "NA" : String.valueOf(bestMatch.isIsAmbigous()); if (bestMatch == null) { for (int i = 0; i < EXTRA_COL_FROM_REPLICATION.length; ++i) { outputLine[c++] = "NA"; } } else { for (int i = 0; i < EXTRA_COL_FROM_REPLICATION.length; ++i) { outputLine[c++] = bestMatch.getLine()[EXTRA_COL_FROM_REPLICATION[i]]; } } outputWriter.writeNext(outputLine); } outputWriter.close(); for (String e : notFound) { System.err.println("Not found: " + e); } System.out.println("interactionSnpNotInGenotypeData: " + interactionSnpNotInGenotypeData); System.out.println("noReplicationQtlsInWindow: " + noReplicationQtlsInWindow); System.out.println("noReplicationQtlsInLd: " + noReplicationQtlsInLd); System.out.println("multipleReplicationQtlsInLd: " + multipleReplicationQtlsInLd); System.out.println("replicationTopSnpNotInGenotypeData: " + replicationTopSnpNotInGenotypeData); }
From source file:CSV_ReportingConsolidator.java
public static void main(String[] args) throws IOException { // Construct an array containing the list of files in the input folder String inputPath = "input/"; // Set the directory containing the CSV files String outputPath = "output/"; // Set the output directory for the consolidated report String outputFile = "Consolidated_CSV_Report.csv"; File folder = new File(inputPath); // Load the selected path File[] listOfFiles = folder.listFiles(); // Retrieve the list of files from the directory // Serialize the reference headers to write the output CSV header CSVReader referenceReader = new CSVReader(new FileReader("reference/example_fields.csv")); String[] referenceHeaders = referenceReader.readNext(); CSVWriter writer = new CSVWriter(new FileWriter(outputPath + outputFile), ',', CSVWriter.NO_QUOTE_CHARACTER); System.out.println("-- CSV parser initiated, found " + listOfFiles.length + " input files.\n"); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].isFile()) { String filename = listOfFiles[i].getName(); // Retrieve the file name if (!filename.endsWith("csv")) { // Check if the file has a CSV extension System.out.println("EE | Fatal error: The input path contains non-csv files: " + filename + ".\n Please remove them and try again."); writer.close();/*from w w w . j a v a 2 s. c o m*/ System.exit(1); // Exit if non-CSV files are found } String filePath = String.valueOf(inputPath + filename); // Combine the path with the filename File file = new File(filePath); CSVReader csvFile = new CSVReader(new FileReader(filePath)); String[] nextLine; // CSV line data container int rowIterator = 0; // Used to loop between rows int colIterator = 0; // Used to loop between columns int rowCount = 0; // Used to count the total number of rows int pageCount = 0; int f = 0; String[] pageName = new String[100]; // Holder for Page names double[] individualPRT = new double[100]; // Holder for Page Response Times String PTrun = ""; // Name of Performance Test Run String startTime = ""; // Test start time double PRT = 0; // Average Page Response Time double PRd = 0; // Page Response Time Standard Deviation double ERT = 0; // Average Element Response Time double ERd = 0; // Element Response Time Standard Deviation double MRT = 0; // Maximum Page Response Time double mRT = 0; // Minimum Page Response Time int elapsedTime = 0; // Test Elapsed Time int completedUsers = 0; // Number of Completed Users int TPA = 0; // Total Page Attempts int TPH = 0; // Total Page Hits int TEA = 0; // Total Element Attempts int TEH = 0; // Total Element Hits // Fetch the total row count: FileReader fr = new FileReader(file); LineNumberReader ln = new LineNumberReader(fr); while (ln.readLine() != null) { rowCount++; } ln.close(); // Close the file reader // Fetch test identification data: nextLine = csvFile.readNext(); PTrun = nextLine[1]; // Name of Performance Test Run nextLine = csvFile.readNext(); startTime = nextLine[1]; // Performance Test Start Time // Skip 9 uninteresting rows: while (rowIterator < 9) { nextLine = csvFile.readNext(); rowIterator++; } // Check if there are VP fails (adds another column) if (nextLine[9].equals("Total Page VPs Error For Run")) { f = 2; } else if (nextLine[8].equals("Total Page VPs Failed For Run") || nextLine[8].equals("Total Page VPs Error For Run")) { f = 1; } else { f = 0; } // Read the page titles: while (colIterator != -1) { pageName[colIterator] = nextLine[colIterator + 18 + f]; if ((pageName[colIterator].equals(pageName[0])) && colIterator > 0) { pageCount = colIterator; pageName[colIterator] = null; colIterator = -1; // Detects when the page titles start to repeat } else { colIterator++; } } // Retrieve non-continuous performance data, auto-detect gaps, auto-convert in seconds where needed nextLine = csvFile.readNext(); nextLine = csvFile.readNext(); while (rowIterator < rowCount - 3) { if (nextLine.length > 1) { if (nextLine[0].length() != 0) { elapsedTime = Integer.parseInt(nextLine[0]) / 1000; } } if (nextLine.length > 5) { if (nextLine[5].length() != 0) { completedUsers = Integer.parseInt(nextLine[5]); } } if (nextLine.length > 8 + f) { if (nextLine[8 + f].length() != 0) { TPA = (int) Double.parseDouble(nextLine[8 + f]); } } if (nextLine.length > 9 + f) { if (nextLine[9 + f].length() != 0) { TPH = (int) Double.parseDouble(nextLine[9 + f]); } } if (nextLine.length > 14 + f) { if (nextLine[14 + f].length() != 0) { TEA = (int) Double.parseDouble(nextLine[14 + f]); } } if (nextLine.length > 15 + f) { if (nextLine[15 + f].length() != 0) { TEH = (int) Double.parseDouble(nextLine[15 + f]); } } if (nextLine.length > 10 + f) { if (nextLine[10 + f].length() != 0) { PRT = Double.parseDouble(nextLine[10 + f]) / 1000; } } if (nextLine.length > 11 + f) { if (nextLine[11 + f].length() != 0) { PRd = Double.parseDouble(nextLine[11 + f]) / 1000; } } if (nextLine.length > 16 + f) { if (nextLine[16 + f].length() != 0) { ERT = Double.parseDouble(nextLine[16 + f]) / 1000; } } if (nextLine.length > 17 + f) { if (nextLine[17 + f].length() != 0) { ERd = Double.parseDouble(nextLine[17 + f]) / 1000; } } if (nextLine.length > 12 + f) { if (nextLine[12 + f].length() != 0) { MRT = Double.parseDouble(nextLine[12 + f]) / 1000; } } if (nextLine.length > 13 + f) { if (nextLine[13 + f].length() != 0) { mRT = Double.parseDouble(nextLine[13 + f]) / 1000; } } nextLine = csvFile.readNext(); rowIterator++; } // Convert the elapsed time from seconds to HH:MM:SS format int hours = elapsedTime / 3600, remainder = elapsedTime % 3600, minutes = remainder / 60, seconds = remainder % 60; String eTime = (hours < 10 ? "0" : "") + hours + ":" + (minutes < 10 ? "0" : "") + minutes + ":" + (seconds < 10 ? "0" : "") + seconds; csvFile.close(); // File recycled to reset the line parser CSVReader csvFile2 = new CSVReader(new FileReader(filePath)); // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Skip first 13 rows: while (rowIterator < 13) { nextLine = csvFile2.readNext(); rowIterator++; } // Dynamically retrieve individual page response times in seconds, correlate with page names: while (rowIterator < rowCount) { while (colIterator < pageCount) { if (nextLine.length > 18 + f) { if (nextLine[colIterator + 18 + f].length() != 0) { individualPRT[colIterator] = Double.parseDouble(nextLine[colIterator + 18 + f]) / 1000; } } colIterator++; } nextLine = csvFile2.readNext(); rowIterator++; colIterator = 0; } csvFile2.close(); // Final file closing // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Display statistics in console, enable only for debugging purposes: /* System.out.println(" Elapsed Time: " + elapsedTime + "\n Completed Users: " + completedUsers + "\n Total Page Attempts: " + TPA + "\n Total Page Hits: " + TPH + "\n Average Response Time For All Pages For Run: " + PRT + "\n Response Time Standard Deviation For All Pages For Run: " + PRd + "\n Maximum Response Time For All Pages For Run: " + MRT + "\n Minimum Response Time For All Pages For Run: " + mRT + "\n Total Page Element Attempts: " + TEA + "\n Total Page Element Hits: " + TEH + "\n Average Response Time For All Page Elements For Run: " + ERT + "\n Response Time Standard Deviation For All Page Elements For Run: " + ERd + "\n"); // Display individual page response times in console: while (colIterator < 9) { System.out.println("Page " + Page[colIterator] + " - Response Time: " + PagePRT[colIterator]); colIterator++; } */ // Serialize individual Page Response Times into CSV values StringBuffer individualPRTList = new StringBuffer(); if (individualPRT.length > 0) { individualPRTList.append(String.valueOf(individualPRT[0])); for (int k = 1; k < pageCount; k++) { individualPRTList.append(","); individualPRTList.append(String.valueOf(individualPRT[k])); } } // Serialize all retrieved performance parameters: String[] entries = { PTrun, startTime, String.valueOf(completedUsers), eTime, String.valueOf(TPA), String.valueOf(TPH), String.valueOf(PRT), String.valueOf(PRd), String.valueOf(MRT), String.valueOf(mRT), String.valueOf(TEA), String.valueOf(TEH), String.valueOf(ERT), String.valueOf(ERd), "", individualPRTList.toString(), }; // Define header and write it to the first CSV row Object[] headerConcatenator = ArrayUtils.addAll(referenceHeaders, pageName); String[] header = new String[referenceHeaders.length + pageCount]; header = Arrays.copyOf(headerConcatenator, header.length, String[].class); if (i == 0) { writer.writeNext(header); // Write CSV header } writer.writeNext(entries); // Write performance parameters in CSV format System.out.println("== Processed: " + filename + " ==========================="); } } writer.close(); // Close the CSV writer System.out.println("\n-- Done processing " + listOfFiles.length + " files." + "\n-- The consolidated report has been saved to " + outputPath + outputFile); }
From source file:CSV_ReportingConsolidator.java
public static void main(String[] args) throws IOException { // Construct an array containing the list of files in the input folder String inputPath = "input/"; // Set the directory containing the CSV files String outputPath = "output/"; // Set the output directory for the consolidated report String outputFile = "Consolidated_CSV_Report.csv"; File folder = new File(inputPath); // Load the selected path File[] listOfFiles = folder.listFiles(); // Retrieve the list of files from the directory // Serialize the reference headers to write the output CSV header CSVReader referenceReader = new CSVReader(new FileReader("reference/example_fields.csv")); String[] referenceHeaders = referenceReader.readNext(); CSVWriter writer = new CSVWriter(new FileWriter(outputPath + outputFile), ',', CSVWriter.NO_QUOTE_CHARACTER); System.out.println("-- CSV parser initiated, found " + listOfFiles.length + " input files.\n"); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].isFile()) { String filename = listOfFiles[i].getName(); // Retrieve the file name if (!filename.endsWith("csv")) { // Check if the file has a CSV extension System.out.println("EE | Fatal error: The input path contains non-csv files: " + filename + ".\n Please remove them and try again."); writer.close();//from w w w . ja v a 2s . co m System.exit(1); // Exit if non-CSV files are found } String filePath = String.valueOf(inputPath + filename); // Combine the path with the filename File file = new File(filePath); CSVReader csvFile = new CSVReader(new FileReader(filePath)); String[] nextLine; // CSV line data container int rowIterator = 0; // Used to loop between rows int colIterator = 0; // Used to loop between columns int rowCount = 0; // Used to count the total number of rows int pageCount = 0; int f = 0; String[] pageName = new String[100]; // Holder for Page names double[] individualPRT = new double[100]; // Holder for Page Response Times String PTrun = ""; // Name of Performance Test Run String startTime = ""; // Test start time double PRT = 0; // Average Page Response Time double PRd = 0; // Page Response Time Standard Deviation double ERT = 0; // Average Element Response Time double ERd = 0; // Element Response Time Standard Deviation double MRT = 0; // Maximum Page Response Time double mRT = 0; // Minimum Page Response Time int elapsedTime = 0; // Test Elapsed Time int completedUsers = 0; // Number of Completed Users int TPA = 0; // Total Page Attempts int TPH = 0; // Total Page Hits int TEA = 0; // Total Element Attempts int TEH = 0; // Total Element Hits // Fetch the total row count: FileReader fr = new FileReader(file); LineNumberReader ln = new LineNumberReader(fr); while (ln.readLine() != null) { rowCount++; } ln.close(); // Close the file reader // Fetch test identification data: nextLine = csvFile.readNext(); PTrun = nextLine[1]; // Name of Performance Test Run nextLine = csvFile.readNext(); startTime = nextLine[1]; // Performance Test Start Time // Skip 9 uninteresting rows: while (rowIterator < 9) { nextLine = csvFile.readNext(); rowIterator++; } // Check if there are VP fails (adds another column) if (nextLine[9].equals("Total Page VPs Error For Run")) { f = 2; } else if (nextLine[8].equals("Total Page VPs Failed For Run") || nextLine[8].equals("Total Page VPs Error For Run")) { f = 1; } else { f = 0; } // Read the page titles: while (colIterator != -1) { pageName[colIterator] = nextLine[colIterator + 16 + f]; if ((pageName[colIterator].equals(pageName[0])) && colIterator > 0) { pageCount = colIterator; pageName[colIterator] = null; colIterator = -1; // Detects when the page titles start to repeat } else { colIterator++; } } // Retrieve non-continuous performance data, auto-detect gaps, auto-convert in seconds where needed nextLine = csvFile.readNext(); nextLine = csvFile.readNext(); while (rowIterator < rowCount - 3) { if (nextLine.length > 1) { if (nextLine[0].length() != 0) { elapsedTime = Integer.parseInt(nextLine[0]) / 1000; } } if (nextLine.length > 4) { if (nextLine[4].length() != 0) { completedUsers = Integer.parseInt(nextLine[4]); } } if (nextLine.length > 6 + f) { if (nextLine[6 + f].length() != 0) { TPA = (int) Double.parseDouble(nextLine[6 + f]); } } if (nextLine.length > 7 + f) { if (nextLine[7 + f].length() != 0) { TPH = (int) Double.parseDouble(nextLine[7 + f]); } } if (nextLine.length > 12 + f) { if (nextLine[12 + f].length() != 0) { TEA = (int) Double.parseDouble(nextLine[12 + f]); } } if (nextLine.length > 13 + f) { if (nextLine[13 + f].length() != 0) { TEH = (int) Double.parseDouble(nextLine[13 + f]); } } if (nextLine.length > 8 + f) { if (nextLine[8 + f].length() != 0) { PRT = Double.parseDouble(nextLine[8 + f]) / 1000; } } if (nextLine.length > 9 + f) { if (nextLine[9 + f].length() != 0) { PRd = Double.parseDouble(nextLine[9 + f]) / 1000; } } if (nextLine.length > 14 + f) { if (nextLine[14 + f].length() != 0) { ERT = Double.parseDouble(nextLine[14 + f]) / 1000; } } if (nextLine.length > 15 + f) { if (nextLine[15 + f].length() != 0) { ERd = Double.parseDouble(nextLine[15 + f]) / 1000; } } if (nextLine.length > 10 + f) { if (nextLine[10 + f].length() != 0) { MRT = Double.parseDouble(nextLine[10 + f]) / 1000; } } if (nextLine.length > 11 + f) { if (nextLine[11 + f].length() != 0) { mRT = Double.parseDouble(nextLine[11 + f]) / 1000; } } nextLine = csvFile.readNext(); rowIterator++; } // Convert the elapsed time from seconds to HH:MM:SS format int hours = elapsedTime / 3600, remainder = elapsedTime % 3600, minutes = remainder / 60, seconds = remainder % 60; String eTime = (hours < 10 ? "0" : "") + hours + ":" + (minutes < 10 ? "0" : "") + minutes + ":" + (seconds < 10 ? "0" : "") + seconds; csvFile.close(); // File recycled to reset the line parser CSVReader csvFile2 = new CSVReader(new FileReader(filePath)); // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Skip first 13 rows: while (rowIterator < 13) { nextLine = csvFile2.readNext(); rowIterator++; } // Dynamically retrieve individual page response times in seconds, correlate with page names: while (rowIterator < rowCount) { while (colIterator < pageCount) { if (nextLine.length > 16 + f) { if (nextLine[colIterator + 16 + f].length() != 0) { individualPRT[colIterator] = Double.parseDouble(nextLine[colIterator + 16 + f]) / 1000; } } colIterator++; } nextLine = csvFile2.readNext(); rowIterator++; colIterator = 0; } csvFile2.close(); // Final file closing // Reset iterators to allow re-usage: rowIterator = 0; colIterator = 0; // Display statistics in console, enable only for debugging purposes: /* System.out.println(" Elapsed Time: " + elapsedTime + "\n Completed Users: " + completedUsers + "\n Total Page Attempts: " + TPA + "\n Total Page Hits: " + TPH + "\n Average Response Time For All Pages For Run: " + PRT + "\n Response Time Standard Deviation For All Pages For Run: " + PRd + "\n Maximum Response Time For All Pages For Run: " + MRT + "\n Minimum Response Time For All Pages For Run: " + mRT + "\n Total Page Element Attempts: " + TEA + "\n Total Page Element Hits: " + TEH + "\n Average Response Time For All Page Elements For Run: " + ERT + "\n Response Time Standard Deviation For All Page Elements For Run: " + ERd + "\n"); // Display individual page response times in console: while (colIterator < 9) { System.out.println("Page " + Page[colIterator] + " - Response Time: " + PagePRT[colIterator]); colIterator++; } */ // Serialize individual Page Response Times into CSV values StringBuffer individualPRTList = new StringBuffer(); if (individualPRT.length > 0) { individualPRTList.append(String.valueOf(individualPRT[0])); for (int k = 1; k < pageCount; k++) { individualPRTList.append(","); individualPRTList.append(String.valueOf(individualPRT[k])); } } // Serialize all retrieved performance parameters: String[] entries = { PTrun, startTime, String.valueOf(completedUsers), eTime, String.valueOf(TPA), String.valueOf(TPH), String.valueOf(PRT), String.valueOf(PRd), String.valueOf(MRT), String.valueOf(mRT), String.valueOf(TEA), String.valueOf(TEH), String.valueOf(ERT), String.valueOf(ERd), "", individualPRTList.toString(), }; // Define header and write it to the first CSV row Object[] headerConcatenator = ArrayUtils.addAll(referenceHeaders, pageName); String[] header = new String[referenceHeaders.length + pageCount]; header = Arrays.copyOf(headerConcatenator, header.length, String[].class); if (i == 0) { writer.writeNext(header); // Write CSV header } writer.writeNext(entries); // Write performance parameters in CSV format System.out.println("== Processed: " + filename + " ==========================="); } } writer.close(); // Close the CSV writer System.out.println("\n-- Done processing " + listOfFiles.length + " files." + "\n-- The consolidated report has been saved to " + outputPath + outputFile); }
From source file:mlbench.pagerank.PagerankNaive.java
@SuppressWarnings({ "rawtypes", "unchecked" }) public static void main(String[] args) throws IOException, InterruptedException { try {// ww w . j av a 2 s .c o m parseArgs(args); HashMap<String, String> conf = new HashMap<String, String>(); initConf(conf); MPI_D.Init(args, MPI_D.Mode.Common, conf); JobConf jobConf = new JobConf(confPath); if (MPI_D.COMM_BIPARTITE_O != null) { // O communicator int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_O); int size = MPI_D.Comm_size(MPI_D.COMM_BIPARTITE_O); if (rank == 0) { LOG.info(PagerankNaive.class.getSimpleName() + " O start."); } FileSplit[] inputs1 = DataMPIUtil.HDFSDataLocalLocator.getTaskInputs(MPI_D.COMM_BIPARTITE_O, jobConf, edgeDir, rank); FileSplit[] inputs2 = DataMPIUtil.HDFSDataLocalLocator.getTaskInputs(MPI_D.COMM_BIPARTITE_O, jobConf, vecDir, rank); FileSplit[] inputs = (FileSplit[]) ArrayUtils.addAll(inputs2, inputs1); for (int i = 0; i < inputs.length; i++) { FileSplit fsplit = inputs[i]; LineRecordReader kvrr = new LineRecordReader(jobConf, fsplit); LongWritable key = kvrr.createKey(); Text value = kvrr.createValue(); { IntWritable k = new IntWritable(); Text v = new Text(); while (kvrr.next(key, value)) { String line_text = value.toString(); // ignore comments in edge file if (line_text.startsWith("#")) continue; final String[] line = line_text.split("\t"); if (line.length < 2) continue; // vector : ROWID VALUE('vNNNN') if (line[1].charAt(0) == 'v') { k.set(Integer.parseInt(line[0])); v.set(line[1]); MPI_D.Send(k, v); } else { /* * In other matrix-vector multiplication, we * output (dst, src) here However, In PageRank, * the matrix-vector computation formula is M^T * * v. Therefore, we output (src,dst) here. */ int src_id = Integer.parseInt(line[0]); int dst_id = Integer.parseInt(line[1]); k.set(src_id); v.set(line[1]); MPI_D.Send(k, v); if (make_symmetric == 1) { k.set(dst_id); v.set(line[0]); MPI_D.Send(k, v); } } } } } } else if (MPI_D.COMM_BIPARTITE_A != null) { // A communicator int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_A); if (rank == 0) { LOG.info(PagerankNaive.class.getSimpleName() + " A start."); } HadoopWriter<IntWritable, Text> outrw = HadoopIOUtil.getNewWriter(jobConf, outDir, IntWritable.class, Text.class, TextOutputFormat.class, null, rank, MPI_D.COMM_BIPARTITE_A); IntWritable oldKey = null; int i; double cur_rank = 0; ArrayList<Integer> dst_nodes_list = new ArrayList<Integer>(); Object[] keyValue = MPI_D.Recv(); while (keyValue != null) { IntWritable key = (IntWritable) keyValue[0]; Text value = (Text) keyValue[1]; if (oldKey == null) { oldKey = key; } // A new key arrives if (!key.equals(oldKey)) { outrw.write(oldKey, new Text("s" + cur_rank)); int outdeg = dst_nodes_list.size(); if (outdeg > 0) { cur_rank = cur_rank / (double) outdeg; } for (i = 0; i < outdeg; i++) { outrw.write(new IntWritable(dst_nodes_list.get(i)), new Text("v" + cur_rank)); } oldKey = key; cur_rank = 0; dst_nodes_list = new ArrayList<Integer>(); } // common record String line_text = value.toString(); final String[] line = line_text.split("\t"); if (line.length == 1) { if (line_text.charAt(0) == 'v') { // vector : VALUE cur_rank = Double.parseDouble(line_text.substring(1)); } else { // edge : ROWID dst_nodes_list.add(Integer.parseInt(line[0])); } } keyValue = MPI_D.Recv(); } // write the left part if (cur_rank != 0) { outrw.write(oldKey, new Text("s" + cur_rank)); int outdeg = dst_nodes_list.size(); if (outdeg > 0) { cur_rank = cur_rank / (double) outdeg; } for (i = 0; i < outdeg; i++) { outrw.write(new IntWritable(dst_nodes_list.get(i)), new Text("v" + cur_rank)); } } outrw.close(); } MPI_D.Finalize(); } catch (MPI_D_Exception e) { e.printStackTrace(); } }
From source file:com.act.lcms.AnimateNetCDFAroundMass.java
public static void main(String[] args) throws Exception { if (args.length < 7 || !areNCFiles(Arrays.copyOfRange(args, 5, args.length))) { throw new RuntimeException( "Needs: \n" + "(1) mass value, e.g., 132.0772 \n" + "(2) time value, e.g., 39.2, (seconds), \n" + "(3) minimum Mz Precision, 0.04 \n" + "(4) max z axis, e.g., 20000 \n" + "(5) prefix for .data and rendered .pdf \n" + "(6..) 2 or more NetCDF .nc files"); }//from ww w . j av a 2 s.c om Double mz = Double.parseDouble(args[0]); Double time = Double.parseDouble(args[1]); Double minMzPrecision = Double.parseDouble(args[2]); Double maxZAxis = Double.parseDouble(args[3]); String outPrefix = args[4]; // the mz values go from 50-950, we start with a big window and exponentially narrow down double mzWin = 100; // time values go from 0-450, we start with a big window and exponentially narrow down double timeWin = 50; // the factor by which to zoom in every step (has to be >1, a value of 2 is good) double factor = 1.2; // the animation frame count int frame = 1; AnimateNetCDFAroundMass c = new AnimateNetCDFAroundMass(); String[] netCDFFnames = Arrays.copyOfRange(args, 5, args.length); List<List<XYZ>> spectra = c.getSpectra(netCDFFnames, time, timeWin, mz, mzWin); for (List<XYZ> s : spectra) { System.out.format("%d xyz datapoints in (initial narrowed) spectra\n", s.size()); } String[] labels = new String[netCDFFnames.length]; for (int i = 0; i < labels.length; i++) labels[i] = "Dataset: " + i; // you could set labels to netCDFFnames to get precise labels on the graphs Gnuplotter plotter = new Gnuplotter(); String fmt = "png"; List<String> outImgFiles = new ArrayList<>(), outDataFiles = new ArrayList<>(); while (mzWin > minMzPrecision) { // exponentially narrow windows down mzWin /= factor; timeWin /= factor; List<List<XYZ>> windowedSpectra = c.getSpectraInWindowAll(spectra, time, timeWin, mz, mzWin); String frameid = String.format("%03d", frame); String outPDF = outPrefix + frameid + "." + fmt; String outDATA = outPrefix + frameid + ".data"; outImgFiles.add(outPDF); outDataFiles.add(outDATA); frame++; // Write data output to outfile PrintStream out = new PrintStream(new FileOutputStream(outDATA)); // print out the spectra to outDATA for (List<XYZ> windowOfSpectra : windowedSpectra) { for (XYZ xyz : windowOfSpectra) { out.format("%.4f\t%.4f\t%.4f\n", xyz.time, xyz.mz, xyz.intensity); out.flush(); } // delimit this dataset from the rest out.print("\n\n"); } // close the .data out.close(); // render outDATA to outPDF using gnuplot plotter.plotMulti3D(outDATA, outPDF, fmt, labels, maxZAxis); } String outImgs = outPrefix + "*." + fmt; plotter.makeAnimatedGIF(outImgs, outPrefix + ".gif"); // all the frames are now in the animated gif, remove the intermediate files for (String f : outDataFiles) new File(f).delete(); for (String f : outImgFiles) new File(f).delete(); }