List of usage examples for java.lang Integer parseInt
public static int parseInt(String s) throws NumberFormatException
From source file:com.linkedin.bowser.tool.REPLCommandLine.java
public static void main(String... args) throws Exception { Logger root = Logger.getRootLogger(); Options options = new Options(); options.addOption("v", "verbose", false, "verbose output"); options.addOption("t", "threads", true, "parallel threads"); CommandLineParser parser = new PosixParser(); CommandLine cmdline = parser.parse(options, args); if (cmdline.hasOption("v")) root.setLevel(Level.DEBUG); else//from w w w. java 2 s . c o m root.setLevel(Level.INFO); REPLCommandLine cmd = new REPLCommandLine(cmdline.hasOption("v"), cmdline.hasOption("t") ? Integer.parseInt(cmdline.getOptionValue("t")) : null); cmd.doRun(); }
From source file:com.xandrev.altafitcalendargenerator.Main.java
public static void main(String[] args) { CalendarPrinter printer = new CalendarPrinter(); XLSExtractor extractor = new XLSExtractor(); if (args != null && args.length > 0) { try {/*from w w w . j a v a2s .c o m*/ Options opt = new Options(); opt.addOption("f", true, "Filepath of the XLS file"); opt.addOption("t", true, "Type name of activities"); opt.addOption("m", true, "Month index"); opt.addOption("o", true, "Output filename of the generated ICS"); BasicParser parser = new BasicParser(); CommandLine cliParser = parser.parse(opt, args); if (cliParser.hasOption("f")) { String fileName = cliParser.getOptionValue("f"); LOG.debug("File name to be imported: " + fileName); String activityNames = cliParser.getOptionValue("t"); LOG.debug("Activity type names: " + activityNames); ArrayList<String> nameList = new ArrayList<>(); String[] actNames = activityNames.split(","); if (actNames != null) { nameList.addAll(Arrays.asList(actNames)); } LOG.debug("Sucessfully activities parsed: " + nameList.size()); if (cliParser.hasOption("m")) { String monthIdx = cliParser.getOptionValue("m"); LOG.debug("Month index: " + monthIdx); int month = Integer.parseInt(monthIdx) - 1; if (cliParser.hasOption("o")) { String outputfilePath = cliParser.getOptionValue("o"); LOG.debug("Output file to be generated: " + monthIdx); LOG.debug("Starting to extract the spreadsheet"); HashMap<Integer, ArrayList<TimeTrack>> result = extractor.importExcelSheet(fileName); LOG.debug("Extracted the spreadsheet done"); LOG.debug("Starting the filter of the data"); HashMap<Date, String> cal = printer.getCalendaryByItem(result, nameList, month); LOG.debug("Finished the filter of the data"); LOG.debug("Creating the ics Calendar"); net.fortuna.ical4j.model.Calendar calendar = printer.createICSCalendar(cal); LOG.debug("Finished the ics Calendar"); LOG.debug("Printing the ICS file to: " + outputfilePath); printer.saveCalendar(calendar, outputfilePath); LOG.debug("Finished the ICS file to: " + outputfilePath); } } } } catch (ParseException ex) { LOG.error("Error parsing the argument list: ", ex); } } }
From source file:dashboard.ImportCDN.java
public static void main(String[] args) { int n = 0;// w w w.ja v a 2 s. co m String propertiesFileName = ""; // First argument - number of events to import if (args.length > 0) { try { n = Integer.parseInt(args[0]); } catch (NumberFormatException e) { System.err.println("First argument must be an integer"); System.exit(1); } } else { System.err.println("Please specify number of events to import."); System.exit(1); } // Second argument - properties file name if (args.length > 1) propertiesFileName = args[1]; else propertiesFileName = "gigaDashboard.properties"; // Read Properties file Properties prop = new Properties(); try { //load a properties file prop.load(new FileInputStream(propertiesFileName)); // Another option - load default properties from the Jar //prop.load(ImportCDN.class.getResourceAsStream("/gigaDashboard.properties")); //get the property values TOKEN = prop.getProperty("MIXPANEL_GIGA_PROJECT_TOKEN"); API_KEY = prop.getProperty("MIXPANEL_GIGA_API_KEY"); bucketName = prop.getProperty("S3_BUCKET_NAME"); AWS_USER = prop.getProperty("AWS_USER"); AWS_PASS = prop.getProperty("AWS_PASS"); DELETE_PROCESSED_LOGS = prop.getProperty("DELETE_PROCESSED_LOGS"); //System.out.println("MIXPANEL PROJECT TOKEN = " + TOKEN); //System.out.println("MIXPANEL API KEY = " + API_KEY); System.out.println("DELETE_PROCESSED_LOGS = " + DELETE_PROCESSED_LOGS); System.out.println("S3_BUCKET_NAME = " + prop.getProperty("S3_BUCKET_NAME")); //System.out.println("AWS_USER = " + prop.getProperty("AWS_USER")); //System.out.println("AWS_PASS = " + prop.getProperty("AWS_PASS")); System.out.println("==================="); } catch (IOException ex) { //ex.printStackTrace(); System.err.println("Can't find Propertie file - " + propertiesFileName); System.err.println("Second argument must be properties file name"); System.exit(1); } try { System.out.println("\n>>> Starting to import " + n + " events... \n"); readAmazonLogs(n); } catch (Exception e) { e.printStackTrace(); } }
From source file:fr.tpt.atlanalyser.tests.TestOldAGTExpPost2Pre.java
@SuppressWarnings("static-access") public static void main(String[] args) throws IOException { // URL resource = Thread.currentThread().getContextClassLoader() // .getResource("OldAGTExp"); // System.out.println(resource.toString()); // File f = new File(resource.getPath()); // System.out.println(f.toString()); // System.out.println(f.isDirectory()); // System.exit(0); Options options = new Options(); options.addOption(/* w ww . ja v a 2s. com*/ OptionBuilder.hasArg().withArgName("N").withDescription("Number of parallel jobs").create("j")); options.addOption(OptionBuilder.withDescription("Display help").create("h")); CommandLineParser parser = new BasicParser(); int jobs = 1; try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { new HelpFormatter().printHelp(TestOldAGTExpPost2Pre.class.getSimpleName(), options); System.exit(0); } if (cmd.hasOption("j")) { jobs = Integer.parseInt(cmd.getOptionValue("j")); } } catch (Exception e) { System.out.println("Incorrect command line arguments"); new HelpFormatter().printHelp(TestOldAGTExpPost2Pre.class.getSimpleName(), options); System.exit(1); } new TestOldAGTExpPost2Pre(models().iterator().next()[0], jobs).testPost2Pre(); }
From source file:io.milton.grizzly.GrizzlyServer.java
public static void main(String[] args) throws IOException, InterruptedException { int port = 8080; if (args.length > 0) { port = Integer.parseInt(args[0]); }// www . j a v a 2 s . c o m Integer sslPort = null; if (args.length > 1) { sslPort = Integer.parseInt(args[1]); } GrizzlyServer k = new GrizzlyServer(); k.start(); System.out.println("Press any key to stop the server..."); System.in.read(); }
From source file:fr.tpt.atlanalyser.tests.TestForPaperPost2Pre.java
@SuppressWarnings("static-access") public static void main(String[] args) throws IOException { // URL resource = Thread.currentThread().getContextClassLoader() // .getResource("OldAGTExp"); // System.out.println(resource.toString()); // File f = new File(resource.getPath()); // System.out.println(f.toString()); // System.out.println(f.isDirectory()); // System.exit(0); Options options = new Options(); options.addOption(// w w w .j a va 2s . c o m OptionBuilder.hasArg().withArgName("N").withDescription("Number of parallel jobs").create("j")); options.addOption(OptionBuilder.withDescription("Display help").create("h")); CommandLineParser parser = new BasicParser(); int jobs = 1; try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { new HelpFormatter().printHelp(TestForPaperPost2Pre.class.getSimpleName(), options); System.exit(0); } if (cmd.hasOption("j")) { jobs = Integer.parseInt(cmd.getOptionValue("j")); } } catch (Exception e) { System.out.println("Incorrect command line arguments"); new HelpFormatter().printHelp(TestForPaperPost2Pre.class.getSimpleName(), options); System.exit(1); } new TestForPaperPost2Pre(models().get(0)[0], jobs).testPost2Pre(); }
From source file:clustering.KMeans.java
public static void main(String[] args) throws UnknownHostException { if (args.length != 1) { System.out.println("Usage : KMeans <nrClusters>"); System.exit(-1);/* w ww .j a v a2s. c o m*/ } int kClusters = Integer.parseInt(args[0]); ArrayList<Artist> artists = new ArrayList<Artist>(); DBHelper dbHelper = DBHelper.getInstance(); DBCursor result = dbHelper.findArtistsWithFBandTW(); while (result.hasNext()) { DBObject currentArtist = result.next(); artists.add(Artist.fromDBObject(currentArtist)); } //System.out.println(artists.size()); KMeansPlusPlusClusterer<Artist> clusterer = new KMeansPlusPlusClusterer<Artist>(kClusters); List<CentroidCluster<Artist>> clusters = clusterer.cluster(artists); //System.out.println(clusters.size()); dbHelper.emptyClusterCenters(); for (CentroidCluster<Artist> cluster : clusters) { double[] center = cluster.getCenter().getPoint(); ObjectId centerId = dbHelper.insertClusterCenter(center[0], center[1], center[2]); List<Artist> artC = cluster.getPoints(); for (Artist artist : artC) { dbHelper.updateMatrixRowCluster(artist.getDBObject(), centerId); //System.out.print("("+artist.fb_likes+","+artist.twitter_followers+","+artist.album_count+") "); } } }
From source file:BasicCrawlController.java
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println("Needed parameters: "); System.out.println("\t rootFolder (it will contain intermediate crawl data)"); System.out.println("\t numberOfCralwers (number of concurrent threads)"); logger.info("\t numberOfCralwers (number of concurrent threads)"); return;// w w w. j a v a2 s .c om } /* * crawlStorageFolder is a folder where intermediate crawl data is * stored. */ String crawlStorageFolder = args[0]; /* * numberOfCrawlers shows the number of concurrent threads that should * be initiated for crawling. */ int numberOfCrawlers = Integer.parseInt(args[1]); CrawlConfig config = new CrawlConfig(); config.setCrawlStorageFolder(crawlStorageFolder); /* * Be polite: Make sure that we don't send more than 1 request per * second (1000 milliseconds between requests). */ config.setPolitenessDelay(500); config.setUserAgentString("UCI WebCrawler 33762324-78325036-74647602"); /* * You can set the maximum crawl depth here. The default value is -1 for * unlimited depth */ config.setMaxDepthOfCrawling(-1); /* * You can set the maximum number of pages to crawl. The default value * is -1 for unlimited number of pages */ // config.setMaxPagesToFetch(10); /** * Do you want crawler4j to crawl also binary data ? * example: the contents of pdf, or the metadata of images etc */ config.setIncludeBinaryContentInCrawling(false); /* * Do you need to set a proxy? If so, you can use: * config.setProxyHost("proxyserver.example.com"); * config.setProxyPort(8080); * * If your proxy also needs authentication: * config.setProxyUsername(username); config.getProxyPassword(password); */ /* * This config parameter can be used to set your crawl to be resumable * (meaning that you can resume the crawl from a previously * interrupted/crashed crawl). Note: if you enable resuming feature and * want to start a fresh crawl, you need to delete the contents of * rootFolder manually. */ config.setResumableCrawling(true); /* * Instantiate the controller for this crawl. */ PageFetcher pageFetcher = new PageFetcher(config); RobotstxtConfig robotstxtConfig = new RobotstxtConfig(); RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher); CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer); /* * For each crawl, you need to add some seed urls. These are the first * URLs that are fetched and then the crawler starts following links * which are found in these pages */ controller.addSeed("http://www.ics.uci.edu/"); /* * Start the crawl. This is a blocking operation, meaning that your code * will reach the line after this only when crawling is finished. */ controller.start(BasicCrawler.class, numberOfCrawlers); }
From source file:com.linkedin.pinot.perf.MultiValueReaderWriterBenchmark.java
public static void main(String[] args) throws Exception { List<String> lines = IOUtils.readLines(new FileReader(new File(args[0]))); int totalDocs = lines.size(); int max = Integer.MIN_VALUE; int maxNumberOfMultiValues = Integer.MIN_VALUE; int totalNumValues = 0; int data[][] = new int[totalDocs][]; for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); String[] split = line.split(","); totalNumValues = totalNumValues + split.length; if (split.length > maxNumberOfMultiValues) { maxNumberOfMultiValues = split.length; }//from ww w . ja va2 s . c om data[i] = new int[split.length]; for (int j = 0; j < split.length; j++) { String token = split[j]; int val = Integer.parseInt(token); data[i][j] = val; if (val > max) { max = val; } } } int maxBitsNeeded = (int) Math.ceil(Math.log(max) / Math.log(2)); int size = 2048; int[] offsets = new int[size]; int bitMapSize = 0; File outputFile = new File("output.mv.fwd"); FixedBitSkipListSCMVWriter fixedBitSkipListSCMVWriter = new FixedBitSkipListSCMVWriter(outputFile, totalDocs, totalNumValues, maxBitsNeeded); for (int i = 0; i < totalDocs; i++) { fixedBitSkipListSCMVWriter.setIntArray(i, data[i]); if (i % size == size - 1) { MutableRoaringBitmap rr1 = MutableRoaringBitmap.bitmapOf(offsets); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); rr1.serialize(dos); dos.close(); //System.out.println("Chunk " + i / size + " bitmap size:" + bos.size()); bitMapSize += bos.size(); } else if (i == totalDocs - 1) { MutableRoaringBitmap rr1 = MutableRoaringBitmap.bitmapOf(Arrays.copyOf(offsets, i % size)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); rr1.serialize(dos); dos.close(); //System.out.println("Chunk " + i / size + " bitmap size:" + bos.size()); bitMapSize += bos.size(); } } fixedBitSkipListSCMVWriter.close(); System.out.println("Output file size:" + outputFile.length()); System.out.println("totalNumberOfDoc\t\t\t:" + totalDocs); System.out.println("totalNumberOfValues\t\t\t:" + totalNumValues); System.out.println("chunk size\t\t\t\t:" + size); System.out.println("Num chunks\t\t\t\t:" + totalDocs / size); int numChunks = totalDocs / size + 1; int totalBits = (totalNumValues * maxBitsNeeded); int dataSizeinBytes = (totalBits + 7) / 8; System.out.println("Raw data size with fixed bit encoding\t:" + dataSizeinBytes); System.out.println("\nPer encoding size"); System.out.println(); System.out.println("size (offset + length)\t\t\t:" + ((totalDocs * (4 + 4)) + dataSizeinBytes)); System.out.println(); System.out.println("size (offset only)\t\t\t:" + ((totalDocs * (4)) + dataSizeinBytes)); System.out.println(); System.out.println("bitMapSize\t\t\t\t:" + bitMapSize); System.out.println("size (with bitmap)\t\t\t:" + (bitMapSize + (numChunks * 4) + dataSizeinBytes)); System.out.println(); System.out.println("Custom Bitset\t\t\t\t:" + (totalNumValues + 7) / 8); System.out.println("size (with custom bitset)\t\t\t:" + (((totalNumValues + 7) / 8) + (numChunks * 4) + dataSizeinBytes)); }
From source file:fr.tpt.atlanalyser.tests.TestPointsToLinesPost2Pre.java
@SuppressWarnings("static-access") public static void main(String[] args) throws IOException { // URL resource = Thread.currentThread().getContextClassLoader() // .getResource("OldAGTExp"); // System.out.println(resource.toString()); // File f = new File(resource.getPath()); // System.out.println(f.toString()); // System.out.println(f.isDirectory()); // System.exit(0); Options options = new Options(); options.addOption(//from ww w . j a v a2 s. com OptionBuilder.hasArg().withArgName("N").withDescription("Number of parallel jobs").create("j")); options.addOption(OptionBuilder.withDescription("Display help").create("h")); CommandLineParser parser = new BasicParser(); int jobs = 1; try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { new HelpFormatter().printHelp(TestPointsToLinesPost2Pre.class.getSimpleName(), options); System.exit(0); } if (cmd.hasOption("j")) { jobs = Integer.parseInt(cmd.getOptionValue("j")); } } catch (Exception e) { System.out.println("Incorrect command line arguments"); new HelpFormatter().printHelp(TestPointsToLinesPost2Pre.class.getSimpleName(), options); System.exit(1); } new TestPointsToLinesPost2Pre(models().get(1)[0], jobs).testPost2Pre(); }