List of usage examples for java.lang Integer parseInt
public static int parseInt(String s) throws NumberFormatException
From source file:com.linkedin.pinotdruidbenchmark.PinotThroughput.java
@SuppressWarnings("InfiniteLoopStatement") public static void main(String[] args) throws Exception { if (args.length != 3 && args.length != 4) { System.err.println(//from w w w .j a va 2s . c om "3 or 4 arguments required: QUERY_DIR, RESOURCE_URL, NUM_CLIENTS, TEST_TIME (seconds)."); return; } File queryDir = new File(args[0]); String resourceUrl = args[1]; final int numClients = Integer.parseInt(args[2]); final long endTime; if (args.length == 3) { endTime = Long.MAX_VALUE; } else { endTime = System.currentTimeMillis() + Integer.parseInt(args[3]) * MILLIS_PER_SECOND; } File[] queryFiles = queryDir.listFiles(); assert queryFiles != null; Arrays.sort(queryFiles); final int numQueries = queryFiles.length; final HttpPost[] httpPosts = new HttpPost[numQueries]; for (int i = 0; i < numQueries; i++) { HttpPost httpPost = new HttpPost(resourceUrl); String query = new BufferedReader(new FileReader(queryFiles[i])).readLine(); httpPost.setEntity(new StringEntity("{\"pql\":\"" + query + "\"}")); httpPosts[i] = httpPost; } final AtomicInteger counter = new AtomicInteger(0); final AtomicLong totalResponseTime = new AtomicLong(0L); final ExecutorService executorService = Executors.newFixedThreadPool(numClients); for (int i = 0; i < numClients; i++) { executorService.submit(new Runnable() { @Override public void run() { try (CloseableHttpClient httpClient = HttpClients.createDefault()) { while (System.currentTimeMillis() < endTime) { long startTime = System.currentTimeMillis(); CloseableHttpResponse httpResponse = httpClient .execute(httpPosts[RANDOM.nextInt(numQueries)]); httpResponse.close(); long responseTime = System.currentTimeMillis() - startTime; counter.getAndIncrement(); totalResponseTime.getAndAdd(responseTime); } } catch (IOException e) { e.printStackTrace(); } } }); } executorService.shutdown(); long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() < endTime) { Thread.sleep(REPORT_INTERVAL_MILLIS); double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND; int count = counter.get(); double avgResponseTime = ((double) totalResponseTime.get()) / count; System.out.println("Time Passed: " + timePassedSeconds + "s, Query Executed: " + count + ", QPS: " + count / timePassedSeconds + ", Avg Response Time: " + avgResponseTime + "ms"); } }
From source file:com.damon.rocketmq.example.operation.Producer.java
public static void main(String[] args) throws MQClientException, InterruptedException { CommandLine commandLine = buildCommandline(args); if (commandLine != null) { String group = commandLine.getOptionValue('g'); String topic = commandLine.getOptionValue('t'); String tags = commandLine.getOptionValue('a'); String keys = commandLine.getOptionValue('k'); String msgCount = commandLine.getOptionValue('c'); DefaultMQProducer producer = new DefaultMQProducer(group); producer.setInstanceName(Long.toString(System.currentTimeMillis())); producer.start();// ww w . j av a 2s . c o m for (int i = 0; i < Integer.parseInt(msgCount); i++) { try { Message msg = new Message(topic, tags, keys, ("Hello RocketMQ " + i).getBytes(RemotingHelper.DEFAULT_CHARSET)); SendResult sendResult = producer.send(msg); System.out.printf("%-8d %s%n", i, sendResult); } catch (Exception e) { e.printStackTrace(); Thread.sleep(1000); } } producer.shutdown(); } }
From source file:com.iveely.computing.Program.java
/** * @param args the command line arguments * @throws java.io.IOException/* ww w . ja va 2 s . c om*/ */ public static void main(String[] args) throws IOException { if (args != null && args.length > 0) { logger.info("start computing with arguments:" + String.join(",", args)); String type = args[0].toLowerCase(Locale.CHINESE); switch (type) { case "master": launchMaster(); return; case "slave": if (args.length == 4) { ConfigWrapper.get().getSlave().setPort(Integer.parseInt(args[1])); ConfigWrapper.get().getSlave().setSlot(Integer.parseInt(args[2])); ConfigWrapper.get().getSlave().setSlotCount(Integer.parseInt(args[3])); } launchSlave(); return; case "supervisor": launchSupervisor(); return; case "console": launchConsole(); return; } } logger.error("arguments error,example [master | supervisor | slave | console]"); System.out.println("press any keys to exit..."); new BufferedReader(new InputStreamReader(System.in)).readLine(); }
From source file:main.java.gov.wa.wsdot.candidate.evaluation.App.java
public static void main(String[] args) { System.out.println("Hello WSDOT ITS 3 Candidate!"); System.out.println("Hello WSDOT ITS 3 Interviewers!"); if (args.length != 3) { System.out.println("Parameters: latitude longitude radius\n"); } else {/*from ww w .j av a 2 s . c o m*/ App app = new App(Double.parseDouble(args[0]), Double.parseDouble(args[1]), Integer.parseInt(args[2])); try { app.run(); } catch (Exception e) { e.printStackTrace(); System.exit(0); } } }
From source file:SparkKMer.java
public static void main(String[] args) throws Exception { //Setup//w w w . ja va 2 s .c o m SparkConf sparkConf = new SparkConf().setAppName("SparkKMer"); JavaSparkContext jsc = new JavaSparkContext(sparkConf); //Agrument parsing if (args.length < 2) { System.err.println("Usage: SparkKMer <accession> <kmer-length>"); System.exit(1); } final String acc = args[0]; final int KMER_LENGTH = Integer.parseInt(args[1]); //Check accession and split ReadCollection run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc); long numreads = run.getReadCount(); //Slice the job int chunk = 20000; /** amount of reads per 1 map operation **/ int slices = (int) (numreads / chunk / 1); if (slices == 0) slices = 1; List<LongRange> sub = new ArrayList<LongRange>(); for (long first = 1; first <= numreads;) { long last = first + chunk - 1; if (last > numreads) last = numreads; sub.add(new LongRange(first, last)); first = last + 1; } System.err.println("Prepared ranges: \n" + sub); JavaRDD<LongRange> jobs = jsc.parallelize(sub, slices); //Map // JavaRDD<String> kmers = jobs.flatMap(new FlatMapFunction<LongRange, String>() { ReadCollection run = null; @Override public Iterable<String> call(LongRange s) { //Executes on task nodes List<String> ret = new ArrayList<String>(); try { long first = s.getMinimumLong(); long last = s.getMaximumLong(); if (run == null) { run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc); } ReadIterator it = run.getReadRange(first, last - first + 1, Read.all); while (it.nextRead()) { //iterate through fragments while (it.nextFragment()) { String bases = it.getFragmentBases(); //iterate through kmers for (int i = 0; i < bases.length() - KMER_LENGTH; i++) { ret.add(bases.substring(i, i + KMER_LENGTH)); } } } } catch (ErrorMsg x) { System.err.println(x.toString()); x.printStackTrace(); } return ret; } }); //Initiate kmer counting; JavaPairRDD<String, Integer> kmer_ones = kmers.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }); //Reduce counts JavaPairRDD<String, Integer> counts = kmer_ones.reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); //Collect the output List<Tuple2<String, Integer>> output = counts.collect(); for (Tuple2<String, Integer> tuple : output) { System.out.println(tuple._1() + ": " + tuple._2()); } jsc.stop(); }
From source file:com.trovit.hdfstree.HdfsTree.java
public static void main(String... args) { Options options = new Options(); options.addOption("l", false, "Use local filesystem."); options.addOption("p", true, "Path used as root for the tree."); options.addOption("s", false, "Display the size of the directory"); options.addOption("d", true, "Maximum depth of the tree (when displaying)"); CommandLineParser parser = new PosixParser(); TreeBuilder treeBuilder;// w w w.j a v a2 s . c o m FSInspector fsInspector = null; String rootPath = null; Displayer displayer = new ConsoleDisplayer(); try { CommandLine cmd = parser.parse(options, args); // local or hdfs. if (cmd.hasOption("l")) { fsInspector = new LocalFSInspector(); } else { fsInspector = new HDFSInspector(); } // check that it has the root path. if (cmd.hasOption("p")) { rootPath = cmd.getOptionValue("p"); } else { throw new ParseException("Mandatory option (-p) is not specified."); } if (cmd.hasOption("d")) { displayer.setMaxDepth(Integer.parseInt(cmd.getOptionValue("d"))); } if (cmd.hasOption("s")) { displayer.setDisplaySize(); } } catch (ParseException e) { System.out.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("hdfstree", options); System.exit(1); } treeBuilder = new TreeBuilder(rootPath, fsInspector); TreeNode tree = treeBuilder.buildTree(); displayer.display(tree); }
From source file:com.kinesis.datavis.writer.BidRequestStreamWriter.java
/** * Start a number of threads and send randomly generated {@link }s to a Kinesis Stream until the * program is terminated.//from ww w. j a v a 2 s.c o m * * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send * data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources * exist or should be created. * @throws InterruptedException If this application is interrupted while sending records to Kinesis. */ public static void main(String[] args) throws InterruptedException { int numberOfThreads = Integer.parseInt(args[0]); AppProperties appProps = new AppProperties("bidrq", args[1]); String streamName = appProps.streamName(); Region region = AppUtils.parseRegion(appProps.getRegion()); AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); ClientConfiguration clientConfig = AppUtils.configureUserAgentForSample(new ClientConfiguration()); AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig); kinesis.setRegion(region); // The more resources we declare the higher write IOPS we need on our DynamoDB table. // We write a record for each resource every interval. // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum. List<String> resources = new ArrayList<>(); resources.add("300x200"); resources.add("500x200"); resources.add("400x600"); resources.add("800x600"); List<String> bidRequestIds = new ArrayList<>(); // bidRequestIds.add(UUID.randomUUID().toString()); // bidRequestIds.add(UUID.randomUUID().toString()); // bidRequestIds.add(UUID.randomUUID().toString()); // bidRequestIds.add("11111111111"); // bidRequestIds.add("22222222222"); // bannerIds.add("33333333333"); // bidRequestIds.add("44444444444"); bidRequestIds.add("92b9b9d9-2d6d-454a-b80a-d6a318aca9ec"); BidRequestFactory bdFactory = new BidRequestFactory(bidRequestIds, resources); // Creates a stream to write to with 2 shards if it doesn't exist StreamUtils streamUtils = new StreamUtils(kinesis); streamUtils.createStreamIfNotExists(streamName, 2); LOG.info(String.format("%s stream is ready for use", streamName)); final BidRequestPutter putter = new BidRequestPutter(bdFactory, kinesis, streamName); GeneralStreamWriter streamWriter = new GeneralStreamWriter(numberOfThreads, putter); streamWriter.doWrite(); }
From source file:edu.cmu.lti.oaqa.bio.index.medline.annotated.query.SimpleQueryApp.java
public static void main(String[] args) { Options options = new Options(); options.addOption("u", null, true, "Solr URI"); options.addOption("n", null, true, "Max # of results"); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); try {// w w w . ja v a 2s .c o m CommandLine cmd = parser.parse(options, args); String solrURI = null; solrURI = cmd.getOptionValue("u"); if (solrURI == null) { Usage("Specify Solr URI"); } SolrServerWrapper solr = new SolrServerWrapper(solrURI); int numRet = 10; if (cmd.hasOption("n")) { numRet = Integer.parseInt(cmd.getOptionValue("n")); } List<String> fieldList = new ArrayList<String>(); fieldList.add(UtilConstMedline.ID_FIELD); fieldList.add(UtilConstMedline.SCORE_FIELD); fieldList.add(UtilConstMedline.ARTICLE_TITLE_FIELD); fieldList.add(UtilConstMedline.ENTITIES_DESC_FIELD); fieldList.add(UtilConstMedline.ABSTRACT_TEXT_FIELD); BufferedReader sysInReader = new BufferedReader(new InputStreamReader(System.in)); Joiner commaJoiner = Joiner.on(','); while (true) { System.out.println("Input query: "); String query = sysInReader.readLine(); if (null == query) break; QueryTransformer qt = new QueryTransformer(query); String tranQuery = qt.getQuery(); System.out.println("Translated query:"); System.out.println(tranQuery); System.out.println("========================="); SolrDocumentList res = solr.runQuery(tranQuery, fieldList, numRet); System.out.println("Found " + res.getNumFound() + " entries"); for (SolrDocument doc : res) { String id = (String) doc.getFieldValue(UtilConstMedline.ID_FIELD); float score = (Float) doc.getFieldValue(UtilConstMedline.SCORE_FIELD); String title = (String) doc.getFieldValue(UtilConstMedline.ARTICLE_TITLE_FIELD); String titleAbstract = (String) doc.getFieldValue(UtilConstMedline.ABSTRACT_TEXT_FIELD); System.out.println(score + " PMID=" + id + " " + titleAbstract); String entityDesc = (String) doc.getFieldValue(UtilConstMedline.ENTITIES_DESC_FIELD); System.out.println("Entities:"); for (EntityEntry e : EntityEntry.parseEntityDesc(entityDesc)) { System.out.println(String.format("[%d %d] concept=%s concept_ids=%s", e.mStart, e.mEnd, e.mConcept, commaJoiner.join(e.mConceptIds))); } } } solr.close(); } catch (ParseException e) { Usage("Cannot parse arguments"); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:Pong.java
public static void main(String... args) throws Exception { System.setProperty("os.max.pid.bits", "16"); Options options = new Options(); options.addOption("i", true, "Input chronicle path"); options.addOption("n", true, "Number of entries to write"); options.addOption("w", true, "Number of writer threads"); options.addOption("r", true, "Number of reader threads"); options.addOption("x", false, "Delete the output chronicle at startup"); CommandLine cmd = new DefaultParser().parse(options, args); final Path output = Paths.get(cmd.getOptionValue("o", "/tmp/__test/chr")); final long maxCount = Long.parseLong(cmd.getOptionValue("n", "10000000")); final int writerThreadCount = Integer.parseInt(cmd.getOptionValue("w", "4")); final int readerThreadCount = Integer.parseInt(cmd.getOptionValue("r", "4")); final boolean deleteOnStartup = cmd.hasOption("x"); if (deleteOnStartup) { FileUtil.removeRecursive(output); }// w w w . j ava2 s . c om final Chronicle chr = ChronicleQueueBuilder.vanilla(output.toFile()).build(); final ExecutorService executor = Executors.newFixedThreadPool(4); final List<Future<?>> futures = new ArrayList<>(); final long totalCount = writerThreadCount * maxCount; final long t0 = System.nanoTime(); for (int i = 0; i != readerThreadCount; ++i) { final int tid = i; futures.add(executor.submit((Runnable) () -> { try { IntLongMap counts = HashIntLongMaps.newMutableMap(); ExcerptTailer tailer = chr.createTailer(); final StringBuilder sb1 = new StringBuilder(); final StringBuilder sb2 = new StringBuilder(); long count = 0; while (count != totalCount) { if (!tailer.nextIndex()) continue; final int id = tailer.readInt(); final long val = tailer.readStopBit(); final long longValue = tailer.readLong(); sb1.setLength(0); sb2.setLength(0); tailer.read8bitText(sb1); tailer.read8bitText(sb2); if (counts.addValue(id, 1) - 1 != val || longValue != 0x0badcafedeadbeefL || !StringInterner.isEqual("FooBar", sb1) || !StringInterner.isEqual("AnotherFooBar", sb2)) { System.out.println("Unexpected value " + id + ", " + val + ", " + Long.toHexString(longValue) + ", " + sb1.toString() + ", " + sb2.toString()); return; } ++count; if (count % 1_000_000 == 0) { long t1 = System.nanoTime(); System.out.println(tid + " " + (t1 - t0) / 1e6 + " ms"); } } } catch (IOException e) { e.printStackTrace(); } })); } for (Future f : futures) { f.get(); } executor.shutdownNow(); final long t1 = System.nanoTime(); System.out.println("Done. Rough time=" + (t1 - t0) / 1e6 + " ms"); }
From source file:cu.uci.gws.sdlcrawler.PdfCrawlController.java
public static void main(String[] args) throws Exception { Properties cm = PdfCrawlerConfigManager.getInstance().loadConfigFile(); long startTime = System.currentTimeMillis(); DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); Date date = new Date(); System.out.println(dateFormat.format(date)); int numberOfCrawlers = Integer.parseInt(cm.getProperty("sdlcrawler.NumberOfCrawlers")); String pdfFolder = cm.getProperty("sdlcrawler.CrawlPdfFolder"); CrawlConfig config = new CrawlConfig(); config.setCrawlStorageFolder(cm.getProperty("sdlcrawler.CrawlStorageFolder")); config.setProxyHost(cm.getProperty("sdlcrawler.ProxyHost")); if (!"".equals(cm.getProperty("sdlcrawler.ProxyPort"))) { config.setProxyPort(Integer.parseInt(cm.getProperty("sdlcrawler.ProxyPort"))); }/*from w w w . j a va 2 s . co m*/ config.setProxyUsername(cm.getProperty("sdlcrawler.ProxyUser")); config.setProxyPassword(cm.getProperty("sdlcrawler.ProxyPass")); config.setMaxDownloadSize(Integer.parseInt(cm.getProperty("sdlcrawler.MaxDownloadSize"))); config.setIncludeBinaryContentInCrawling( Boolean.parseBoolean(cm.getProperty("sdlcrawler.IncludeBinaryContent"))); config.setFollowRedirects(Boolean.parseBoolean(cm.getProperty("sdlcrawler.Redirects"))); config.setUserAgentString(cm.getProperty("sdlcrawler.UserAgent")); config.setMaxDepthOfCrawling(Integer.parseInt(cm.getProperty("sdlcrawler.MaxDepthCrawl"))); config.setMaxConnectionsPerHost(Integer.parseInt(cm.getProperty("sdlcrawler.MaxConnectionsPerHost"))); config.setSocketTimeout(Integer.parseInt(cm.getProperty("sdlcrawler.SocketTimeout"))); config.setMaxOutgoingLinksToFollow(Integer.parseInt(cm.getProperty("sdlcrawler.MaxOutgoingLinks"))); config.setResumableCrawling(Boolean.parseBoolean(cm.getProperty("sdlcrawler.ResumableCrawling"))); config.setIncludeHttpsPages(Boolean.parseBoolean(cm.getProperty("sdlcrawler.IncludeHttpsPages"))); config.setMaxTotalConnections(Integer.parseInt(cm.getProperty("sdlcrawler.MaxTotalConnections"))); config.setMaxPagesToFetch(Integer.parseInt(cm.getProperty("sdlcrawler.MaxPagesToFetch"))); config.setPolitenessDelay(Integer.parseInt(cm.getProperty("sdlcrawler.PolitenessDelay"))); config.setConnectionTimeout(Integer.parseInt(cm.getProperty("sdlcrawler.ConnectionTimeout"))); System.out.println(config.toString()); Collection<BasicHeader> defaultHeaders = new HashSet<>(); defaultHeaders .add(new BasicHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")); defaultHeaders.add(new BasicHeader("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.3")); defaultHeaders.add(new BasicHeader("Accept-Language", "en-US,en,es-ES,es;q=0.8")); defaultHeaders.add(new BasicHeader("Connection", "keep-alive")); config.setDefaultHeaders(defaultHeaders); List<String> list = Files.readAllLines(Paths.get("config/" + cm.getProperty("sdlcrawler.SeedFile")), StandardCharsets.UTF_8); String[] crawlDomains = list.toArray(new String[list.size()]); PageFetcher pageFetcher = new PageFetcher(config); RobotstxtConfig robotstxtConfig = new RobotstxtConfig(); RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher); CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer); for (String domain : crawlDomains) { controller.addSeed(domain); } PdfCrawler.configure(crawlDomains, pdfFolder); controller.start(PdfCrawler.class, numberOfCrawlers); DateFormat dateFormat1 = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); Date date1 = new Date(); System.out.println(dateFormat1.format(date1)); long endTime = System.currentTimeMillis(); long totalTime = endTime - startTime; System.out.println("Total time:" + totalTime); }