Example usage for java.util.concurrent Executors newFixedThreadPool

List of usage examples for java.util.concurrent Executors newFixedThreadPool

Introduction

In this page you can find the example usage for java.util.concurrent Executors newFixedThreadPool.

Prototype

public static ExecutorService newFixedThreadPool(int nThreads) 

Source Link

Document

Creates a thread pool that reuses a fixed number of threads operating off a shared unbounded queue.

Usage

From source file:avantssar.aslanpp.testing.Tester.java

public static void main(String[] args) {

    Debug.initLog(LogLevel.INFO);//from  ww  w  .  j a  va2s . co  m

    TesterCommandLineOptions options = new TesterCommandLineOptions();
    try {
        options.getParser().parseArgument(args);
        options.ckeckAtEnd();
    } catch (CmdLineException ex) {
        reportException("Inconsistent options.", ex, System.err);
        options.showShortHelp(System.err);
        return;
    }

    if (options.isShowHelp()) {
        options.showLongHelp(System.out);
        return;
    }

    ASLanPPConnectorImpl translator = new ASLanPPConnectorImpl();

    if (options.isShowVersion()) {
        System.out.println(translator.getFullTitleLine());
        return;
    }

    ISpecificationBundleProvider sbp;
    File realInDir;
    if (options.isLibrary()) {
        if (options.getHornClausesLevel() != HornClausesLevel.ALL) {
            System.out.println("When checking the internal library we output all Horn clauses.");
            options.setHornClausesLevel(HornClausesLevel.ALL);
        }
        if (!options.isStripOutput()) {
            System.out.println(
                    "When checking the internal library, the ouput is stripped of comments and line information.");
            options.setStripOutput(true);
        }
        File modelsDir = new File(FilenameUtils.concat(options.getOut().getAbsolutePath(), "_models"));
        try {
            FileUtils.forceMkdir(modelsDir);
        } catch (IOException e1) {
            System.out.println("Failed to create models folder: " + e1.getMessage());
            Debug.logger.error("Failed to create models folder.", e1);
        }
        realInDir = modelsDir;
        sbp = new LibrarySpecificationsProvider(modelsDir.getAbsolutePath());
    } else {
        realInDir = options.getIn();
        sbp = new DiskSpecificationsProvider(options.getIn().getAbsolutePath());
    }

    System.setProperty(EntityManager.ASLAN_ENVVAR, sbp.getASLanPath());
    // try {
    // EntityManager.loadASLanPath();
    // }
    // catch (IOException e) {
    // System.out.println("Exception while reloading ASLANPATH: " +
    // e.getMessage());
    // Debug.logger.error("Exception while loading ASLANPATH.", e);
    // }

    try {
        bm = BackendsManager.instance();
        if (bm != null) {
            for (IBackendRunner br : bm.getBackendRunners()) {
                System.out.println(br.getFullDescription());
                if (br.getTimeout() > finalTimeout) {
                    finalTimeout = br.getTimeout();
                }
            }
        }
    } catch (IOException e) {
        System.out.println("Failed to load backends: " + e);
    }

    int threadsCount = 50;
    if (options.getThreads() > 0) {
        threadsCount = options.getThreads();
    }
    System.out.println("Will launch " + threadsCount
            + " threads in parallel (+ will show that a thread starts, - that a thread ends).");
    TranslationReport rep = new TranslationReport(
            bm != null ? bm.getBackendRunners() : new ArrayList<IBackendRunner>(), options.getOut());
    long startTime = System.currentTimeMillis();
    int specsCount = 0;
    pool = Executors.newFixedThreadPool(threadsCount);
    for (ITestTask task : sbp) {
        doTest(rep, task, realInDir, options.getOut(), translator, options, System.err);
        specsCount++;
    }
    pool.shutdown();
    String reportFile = FilenameUtils.concat(options.getOut().getAbsolutePath(), "index.html");
    try {
        while (!pool.awaitTermination(finalTimeout, TimeUnit.SECONDS)) {
        }
    } catch (InterruptedException e) {
        Debug.logger.error("Interrupted while waiting for pool termination.", e);
        System.out.println("Interrupted while waiting for pool termination: " + e.getMessage());
        System.out.println("The report may be incomplete.");
    }
    long endTime = System.currentTimeMillis();
    long duration = (endTime - startTime) / 1000;
    System.out.println();
    System.out.println(specsCount + " specifications checked in " + duration + " seconds.");
    rep.report(reportFile);
    System.out.println("You can find an overview report at '" + reportFile + "'.");
}

From source file:se.symsoft.codecamp.SmsTerminatorService.java

public static void main(String[] args) throws IOException, InterruptedException {

    ExecutorService executor = Executors.newFixedThreadPool(1);
    SmsTerminatorService service = new SmsTerminatorService();
    executor.execute(() -> {/*from w  ww  .  j  av a2s. c o m*/
        try {
            service.start();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

    });

}

From source file:DIA_Umpire_To_Skyline.DIA_Umpire_To_Skyline.java

/**
 * @param args the command line arguments
 */// w ww .  java 2s . co m
public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    System.out.println(
            "=================================================================================================");
    System.out.println("DIA-Umpire_To_Skyline (version: " + UmpireInfo.GetInstance().Version + ")");
    if (args.length < 1) {
        System.out.println(
                "command format error, it should be like: java -jar -Xmx20G DIA_Umpire_To_Skyline.jar Path NoThreads");
        System.out.println("command : java -jar -Xmx20G DIA_Umpire_To_Skyline.jar Path [Option]\n");
        System.out.println("\nOptions");
        System.out.println("\t-t\tNo. of threads, Ex: -t4 (using four threads, default value)");
        System.out.println(
                "\t-cP\tPath of msconvert.exe for mzXML conversion, Ex: -cP (using four threads, default value)");
        return;
    }
    try {
        ConsoleLogger.SetConsoleLogger(Level.DEBUG);
        ConsoleLogger.SetFileLogger(Level.DEBUG,
                FilenameUtils.getFullPath(args[0]) + "diaumpire_to_skyline.log");
    } catch (Exception e) {
        System.out.println("Logger initialization failed");
    }

    Logger.getRootLogger().info("Path:" + args[0]);
    String msconvertpath = "C:/inetpub/tpp-bin/msconvert";

    String WorkFolder = args[0];
    int NoCPUs = 4;

    for (int i = 1; i < args.length; i++) {
        if (args[i].startsWith("-")) {
            if (args[i].startsWith("-cP")) {
                msconvertpath = args[i].substring(3);
                Logger.getRootLogger().info("MSConvert path: " + msconvertpath);
            }
            if (args[i].startsWith("-t")) {
                NoCPUs = Integer.parseInt(args[i].substring(2));
                Logger.getRootLogger().info("No. of threads: " + NoCPUs);
            }
        }
    }

    HashMap<String, File> AssignFiles = new HashMap<>();

    try {
        File folder = new File(WorkFolder);
        if (!folder.exists()) {
            Logger.getRootLogger().info("Path: " + folder.getAbsolutePath() + " cannot be found.");
        }
        for (final File fileEntry : folder.listFiles()) {
            if (fileEntry.isFile() && fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry);
            }
            if (fileEntry.isDirectory()) {
                for (final File fileEntry2 : fileEntry.listFiles()) {
                    if (fileEntry2.isFile() && fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                        AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2);
                    }
                }
            }
        }

        Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size());
        for (File fileEntry : AssignFiles.values()) {
            Logger.getRootLogger().info(fileEntry.getAbsolutePath());
        }

        ExecutorService executorPool = null;
        executorPool = Executors.newFixedThreadPool(3);

        for (File fileEntry : AssignFiles.values()) {
            String mzXMLFile = fileEntry.getAbsolutePath();
            FileThread thread = new FileThread(mzXMLFile, NoCPUs, msconvertpath);
            executorPool.execute(thread);
        }
        executorPool.shutdown();
        try {
            executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
        } catch (InterruptedException e) {
            Logger.getRootLogger().info("interrupted..");
        }
    } catch (Exception e) {
        Logger.getRootLogger().error(e.getMessage());
        throw e;
    }
    Logger.getRootLogger().info("Job done");
    Logger.getRootLogger().info(
            "=================================================================================================");

}

From source file:fr.tpt.s3.mcdag.bench.MainBench.java

public static void main(String[] args) throws IOException, InterruptedException {

    // Command line options
    Options options = new Options();

    Option input = new Option("i", "input", true, "MC-DAG XML models");
    input.setRequired(true);/*  w  w  w . jav a  2  s  . c  o  m*/
    input.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(input);

    Option output = new Option("o", "output", true, "Folder where results have to be written.");
    output.setRequired(true);
    options.addOption(output);

    Option uUti = new Option("u", "utilization", true, "Utilization.");
    uUti.setRequired(true);
    options.addOption(uUti);

    Option output2 = new Option("ot", "output-total", true, "File where total results are being written");
    output2.setRequired(true);
    options.addOption(output2);

    Option oCores = new Option("c", "cores", true, "Cores given to the test");
    oCores.setRequired(true);
    options.addOption(oCores);

    Option oLvls = new Option("l", "levels", true, "Levels tested for the system");
    oLvls.setRequired(true);
    options.addOption(oLvls);

    Option jobs = new Option("j", "jobs", true, "Number of threads to be launched.");
    jobs.setRequired(false);
    options.addOption(jobs);

    Option debug = new Option("d", "debug", false, "Debug logs.");
    debug.setRequired(false);
    options.addOption(debug);

    /*
     * Parsing of the command line
     */
    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        formatter.printHelp("Benchmarks MultiDAG", options);
        System.exit(1);
        return;
    }

    String inputFilePath[] = cmd.getOptionValues("input");
    String outputFilePath = cmd.getOptionValue("output");
    String outputFilePathTotal = cmd.getOptionValue("output-total");
    double utilization = Double.parseDouble(cmd.getOptionValue("utilization"));
    boolean boolDebug = cmd.hasOption("debug");
    int nbLvls = Integer.parseInt(cmd.getOptionValue("levels"));
    int nbJobs = 1;
    int nbFiles = inputFilePath.length;

    if (cmd.hasOption("jobs"))
        nbJobs = Integer.parseInt(cmd.getOptionValue("jobs"));

    int nbCores = Integer.parseInt(cmd.getOptionValue("cores"));

    /*
     *  While files need to be allocated
     *  run the tests in the pool of threads
     */

    // For dual-criticality systems we call a specific thread
    if (nbLvls == 2) {

        System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls);

        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile,
                    nbCores, boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int fedTotal = 0;
        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int fedPreempts = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int fedActiv = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            fedTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            fedPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            fedActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 11) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 12) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 13) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double fedPerc = (double) fedTotal / nbFiles;
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double fedPercPreempts = (double) fedPreempts / fedActiv;
        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; "
                + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts
                + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; "
                + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else if (nbLvls > 2) {
        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores,
                    boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; "
                + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts
                + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else {
        System.err.println("Wrong number of levels");
        System.exit(-1);
    }

    System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls);
}

From source file:gobblin.restli.throttling.LocalStressTest.java

public static void main(String[] args) throws Exception {

    CommandLine cli = StressTestUtils.parseCommandLine(OPTIONS, args);

    int stressorThreads = Integer.parseInt(
            cli.getOptionValue(STRESSOR_THREADS.getOpt(), Integer.toString(DEFAULT_STRESSOR_THREADS)));
    int processorThreads = Integer.parseInt(
            cli.getOptionValue(PROCESSOR_THREADS.getOpt(), Integer.toString(DEFAULT_PROCESSOR_THREADS)));
    int artificialLatency = Integer.parseInt(
            cli.getOptionValue(ARTIFICIAL_LATENCY.getOpt(), Integer.toString(DEFAULT_ARTIFICIAL_LATENCY)));
    long targetQps = Integer.parseInt(cli.getOptionValue(QPS.getOpt(), Integer.toString(DEFAULT_TARGET_QPS)));

    Configuration configuration = new Configuration();
    StressTestUtils.populateConfigFromCli(configuration, cli);

    String resourceLimited = LocalStressTest.class.getSimpleName();

    Map<String, String> configMap = Maps.newHashMap();

    ThrottlingPolicyFactory factory = new ThrottlingPolicyFactory();
    SharedLimiterKey res1key = new SharedLimiterKey(resourceLimited);
    configMap.put(BrokerConfigurationKeyGenerator.generateKey(factory, res1key, null,
            ThrottlingPolicyFactory.POLICY_KEY), QPSPolicy.FACTORY_ALIAS);
    configMap.put(BrokerConfigurationKeyGenerator.generateKey(factory, res1key, null, QPSPolicy.QPS),
            Long.toString(targetQps));

    ThrottlingGuiceServletConfig guiceServletConfig = new ThrottlingGuiceServletConfig();
    guiceServletConfig.initialize(ConfigFactory.parseMap(configMap));
    LimiterServerResource limiterServer = guiceServletConfig.getInjector()
            .getInstance(LimiterServerResource.class);

    RateComputingLimiterContainer limiterContainer = new RateComputingLimiterContainer();

    Class<? extends Stressor> stressorClass = configuration.getClass(StressTestUtils.STRESSOR_CLASS,
            StressTestUtils.DEFAULT_STRESSOR_CLASS, Stressor.class);

    ExecutorService executorService = Executors.newFixedThreadPool(stressorThreads);

    SharedResourcesBroker broker = guiceServletConfig.getInjector().getInstance(
            Key.get(SharedResourcesBroker.class, Names.named(LimiterServerResource.BROKER_INJECT_NAME)));
    ThrottlingPolicy policy = (ThrottlingPolicy) broker.getSharedResource(new ThrottlingPolicyFactory(),
            new SharedLimiterKey(resourceLimited));
    ScheduledExecutorService reportingThread = Executors.newSingleThreadScheduledExecutor();
    reportingThread.scheduleAtFixedRate(new Reporter(limiterContainer, policy), 0, 15, TimeUnit.SECONDS);

    Queue<Future<?>> futures = new LinkedList<>();
    MockRequester requester = new MockRequester(limiterServer, artificialLatency, processorThreads);

    requester.start();//from   w  w w  . j  ava  2 s . co  m
    for (int i = 0; i < stressorThreads; i++) {
        RestliServiceBasedLimiter restliLimiter = RestliServiceBasedLimiter.builder()
                .resourceLimited(resourceLimited).requestSender(requester).serviceIdentifier("stressor" + i)
                .build();

        Stressor stressor = stressorClass.newInstance();
        stressor.configure(configuration);
        futures.add(executorService
                .submit(new StressorRunner(limiterContainer.decorateLimiter(restliLimiter), stressor)));
    }
    int stressorFailures = 0;
    for (Future<?> future : futures) {
        try {
            future.get();
        } catch (ExecutionException ee) {
            stressorFailures++;
        }
    }
    requester.stop();

    executorService.shutdownNow();

    if (stressorFailures > 0) {
        log.error("There were " + stressorFailures + " failed stressor threads.");
    }
    System.exit(stressorFailures);
}

From source file:io.bfscan.clueweb12.BuildWarcTrecIdMapping.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("bz2 Wikipedia XML dump file")
            .create(INPUT_OPTION));/*w  w  w  . ja v a2  s. c  o m*/
    options.addOption(
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("maximum number of documents to index").create(MAX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of indexing threads")
            .create(THREADS_OPTION));

    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(BuildWarcTrecIdMapping.class.getCanonicalName(), options);
        System.exit(-1);
    }

    String indexPath = cmdline.getOptionValue(INDEX_OPTION);
    int maxdocs = cmdline.hasOption(MAX_OPTION) ? Integer.parseInt(cmdline.getOptionValue(MAX_OPTION))
            : Integer.MAX_VALUE;
    int threads = cmdline.hasOption(THREADS_OPTION) ? Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION))
            : DEFAULT_NUM_THREADS;

    long startTime = System.currentTimeMillis();

    String path = cmdline.getOptionValue(INPUT_OPTION);
    PrintStream out = new PrintStream(System.out, true, "UTF-8");

    Directory dir = FSDirectory.open(new File(indexPath));
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, ANALYZER);
    config.setOpenMode(OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    LOG.info("Creating index at " + indexPath);
    LOG.info("Indexing with " + threads + " threads");

    FileInputStream fis = null;
    BufferedReader br = null;

    try {
        fis = new FileInputStream(new File(path));
        byte[] ignoreBytes = new byte[2];
        fis.read(ignoreBytes); // "B", "Z" bytes from commandline tools
        br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fis), "UTF8"));

        ExecutorService executor = Executors.newFixedThreadPool(threads);
        int cnt = 0;
        String s;
        while ((s = br.readLine()) != null) {
            Runnable worker = new AddDocumentRunnable(writer, s);
            executor.execute(worker);

            cnt++;
            if (cnt % 1000000 == 0) {
                LOG.info(cnt + " articles added");
            }
            if (cnt >= maxdocs) {
                break;
            }
        }

        executor.shutdown();
        // Wait until all threads are finish
        while (!executor.isTerminated()) {
        }

        LOG.info("Total of " + cnt + " articles indexed.");

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        out.close();
        br.close();
        fis.close();
    }
}

From source file:cc.wikitools.lucene.IndexWikipediaDump.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("bz2 Wikipedia XML dump file")
            .create(INPUT_OPTION));/*w  w w  . ja v  a  2 s  .  co  m*/
    options.addOption(
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("maximum number of documents to index").create(MAX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of indexing threads")
            .create(THREADS_OPTION));

    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndexWikipediaDump.class.getCanonicalName(), options);
        System.exit(-1);
    }

    String indexPath = cmdline.getOptionValue(INDEX_OPTION);
    int maxdocs = cmdline.hasOption(MAX_OPTION) ? Integer.parseInt(cmdline.getOptionValue(MAX_OPTION))
            : Integer.MAX_VALUE;
    int threads = cmdline.hasOption(THREADS_OPTION) ? Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION))
            : DEFAULT_NUM_THREADS;

    long startTime = System.currentTimeMillis();

    String path = cmdline.getOptionValue(INPUT_OPTION);
    PrintStream out = new PrintStream(System.out, true, "UTF-8");
    WikiClean cleaner = new WikiCleanBuilder().withTitle(true).build();

    Directory dir = FSDirectory.open(new File(indexPath));
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, ANALYZER);
    config.setOpenMode(OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    LOG.info("Creating index at " + indexPath);
    LOG.info("Indexing with " + threads + " threads");

    try {
        WikipediaBz2DumpInputStream stream = new WikipediaBz2DumpInputStream(path);

        ExecutorService executor = Executors.newFixedThreadPool(threads);
        int cnt = 0;
        String page;
        while ((page = stream.readNext()) != null) {
            String title = cleaner.getTitle(page);

            // These are heuristic specifically for filtering out non-articles in enwiki-20120104.
            if (title.startsWith("Wikipedia:") || title.startsWith("Portal:") || title.startsWith("File:")) {
                continue;
            }

            if (page.contains("#REDIRECT") || page.contains("#redirect") || page.contains("#Redirect")) {
                continue;
            }

            Runnable worker = new AddDocumentRunnable(writer, cleaner, page);
            executor.execute(worker);

            cnt++;
            if (cnt % 10000 == 0) {
                LOG.info(cnt + " articles added");
            }
            if (cnt >= maxdocs) {
                break;
            }
        }

        executor.shutdown();
        // Wait until all threads are finish
        while (!executor.isTerminated()) {
        }

        LOG.info("Total of " + cnt + " articles indexed.");

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        out.close();
    }
}

From source file:com.wouterbreukink.onedrive.Main.java

public static void main(String[] args) throws Exception {

    // Parse command line args
    try {/*from w ww  . j a  v  a2s  . c  o m*/
        CommandLineOpts.initialise(args);
    } catch (ParseException ex) {
        log.error("Unable to parse command line arguments - " + ex.getMessage());
        CommandLineOpts.printHelp();
        return;
    }

    if (getCommandLineOpts().help()) {
        CommandLineOpts.printHelp();
        return;
    }

    if (getCommandLineOpts().version()) {
        String version = getCommandLineOpts().getClass().getPackage().getImplementationVersion();
        log.info("onedrive-java-client version " + (version != null ? version : "DEVELOPMENT"));
        return;
    }

    // Initialise a log file (if set)
    if (getCommandLineOpts().getLogFile() != null) {
        String logFileName = LogUtils.addFileLogger(getCommandLineOpts().getLogFile());
        log.info(String.format("Writing log output to %s", logFileName));
    }

    if (getCommandLineOpts().isAuthorise()) {
        AuthorisationProvider.FACTORY.printAuthInstructions();
        return;
    }

    if (getCommandLineOpts().getLocalPath() == null || getCommandLineOpts().getRemotePath() == null
            || getCommandLineOpts().getDirection() == null) {
        log.error("Must specify --local, --remote and --direction");
        CommandLineOpts.printHelp();
        return;
    }

    // Initialise the OneDrive authorisation
    AuthorisationProvider authoriser;
    try {
        authoriser = AuthorisationProvider.FACTORY.create(getCommandLineOpts().getKeyFile());
        authoriser.getAccessToken();
    } catch (OneDriveAPIException ex) {
        log.error("Unable to authorise client: " + ex.getMessage());
        log.error("Re-run the application with --authorise");
        return;
    }

    // Initialise the providers
    OneDriveProvider api;
    FileSystemProvider fileSystem;
    if (getCommandLineOpts().isDryRun()) {
        log.warn("This is a dry run - no changes will be made");
        api = OneDriveProvider.FACTORY.readOnlyApi(authoriser);
        fileSystem = FileSystemProvider.FACTORY.readOnlyProvider();
    } else {
        api = OneDriveProvider.FACTORY.readWriteApi(authoriser);
        fileSystem = FileSystemProvider.FACTORY.readWriteProvider();
    }

    // Report on progress
    TaskReporter reporter = new TaskReporter();

    // Get the primary drive
    Drive primary = api.getDefaultDrive();

    // Report quotas
    log.info(String.format("Using drive with id '%s' (%s). Usage %s of %s (%.2f%%)", primary.getId(),
            primary.getDriveType(), readableFileSize(primary.getQuota().getUsed()),
            readableFileSize(primary.getQuota().getTotal()),
            ((double) primary.getQuota().getUsed() / primary.getQuota().getTotal()) * 100));

    // Check the given root folder
    OneDriveItem rootFolder = api.getPath(getCommandLineOpts().getRemotePath());

    if (!rootFolder.isDirectory()) {
        log.error(String.format("Specified root '%s' is not a folder", rootFolder.getFullName()));
        return;
    }

    File localRoot = new File(getCommandLineOpts().getLocalPath());

    log.info(String.format("Local folder '%s'", localRoot.getAbsolutePath()));
    log.info(String.format("Remote folder '<onedrive>%s'", rootFolder.getFullName()));

    // Start synchronisation operation at the root
    final TaskQueue queue = new TaskQueue();
    queue.add(new CheckTask(new Task.TaskOptions(queue, api, fileSystem, reporter), rootFolder, localRoot));

    // Get a bunch of threads going
    ExecutorService executorService = Executors.newFixedThreadPool(getCommandLineOpts().getThreads());

    for (int i = 0; i < getCommandLineOpts().getThreads(); i++) {
        executorService.submit(new Runnable() {
            @Override
            public void run() {
                try {
                    //noinspection InfiniteLoopStatement
                    while (true) {
                        Task taskToRun = null;
                        try {
                            taskToRun = queue.take();
                            taskToRun.run();
                        } finally {
                            if (taskToRun != null) {
                                queue.done(taskToRun);
                            }
                        }
                    }
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                }
            }
        });
    }

    queue.waitForCompletion();
    log.info("Synchronisation complete");
    reporter.report();
}

From source file:acmi.l2.clientmod.l2_version_switcher.Main.java

public static void main(String[] args) {
    if (args.length != 3 && args.length != 4) {
        System.out.println("USAGE: l2_version_switcher.jar host game version <--splash> <filter>");
        System.out.println("EXAMPLE: l2_version_switcher.jar " + L2.NCWEST_HOST + " " + L2.NCWEST_GAME
                + " 1 \"system\\*\"");
        System.out.println(/*from w ww .j a  v a 2 s.  c o m*/
                "         l2_version_switcher.jar " + L2.PLAYNC_TEST_HOST + " " + L2.PLAYNC_TEST_GAME + " 48");
        System.exit(0);
    }

    List<String> argsList = new ArrayList<>(Arrays.asList(args));
    String host = argsList.get(0);
    String game = argsList.get(1);
    int version = Integer.parseInt(argsList.get(2));
    Helper helper = new Helper(host, game, version);
    boolean available = false;

    try {
        available = helper.isAvailable();
    } catch (IOException e) {
        System.err.print(e.getClass().getSimpleName());
        if (e.getMessage() != null) {
            System.err.print(": " + e.getMessage());
        }

        System.err.println();
    }

    System.out.println(String.format("Version %d available: %b", version, available));
    if (!available) {
        System.exit(0);
    }

    List<FileInfo> fileInfoList = null;
    try {
        fileInfoList = helper.getFileInfoList();
    } catch (IOException e) {
        System.err.println("Couldn\'t get file info map");
        System.exit(1);
    }

    boolean splash = argsList.remove("--splash");
    if (splash) {
        Optional<FileInfo> splashObj = fileInfoList.stream()
                .filter(fi -> fi.getPath().contains("sp_32b_01.bmp")).findAny();
        if (splashObj.isPresent()) {
            try (InputStream is = new FilterInputStream(
                    Util.getUnzipStream(helper.getDownloadStream(splashObj.get().getPath()))) {
                @Override
                public int read() throws IOException {
                    int b = super.read();
                    if (b >= 0)
                        b ^= 0x36;
                    return b;
                }

                @Override
                public int read(byte[] b, int off, int len) throws IOException {
                    int r = super.read(b, off, len);
                    if (r >= 0) {
                        for (int i = 0; i < r; i++)
                            b[off + i] ^= 0x36;
                    }
                    return r;
                }
            }) {
                new DataInputStream(is).readFully(new byte[28]);
                BufferedImage bi = ImageIO.read(is);

                JFrame frame = new JFrame("Lineage 2 [" + version + "] " + splashObj.get().getPath());
                frame.setContentPane(new JComponent() {
                    {
                        setPreferredSize(new Dimension(bi.getWidth(), bi.getHeight()));
                    }

                    @Override
                    protected void paintComponent(Graphics g) {
                        g.drawImage(bi, 0, 0, null);
                    }
                });
                frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
                frame.pack();
                frame.setLocationRelativeTo(null);
                frame.setVisible(true);
            } catch (IOException e) {
                e.printStackTrace();
            }
        } else {
            System.out.println("Splash not found");
        }
        return;
    }

    String filter = argsList.size() > 3 ? separatorsToSystem(argsList.get(3)) : null;

    File l2Folder = new File(System.getProperty("user.dir"));
    List<FileInfo> toUpdate = fileInfoList.parallelStream().filter(fi -> {
        String filePath = separatorsToSystem(fi.getPath());

        if (filter != null && !wildcardMatch(filePath, filter, IOCase.INSENSITIVE))
            return false;
        File file = new File(l2Folder, filePath);

        try {
            if (file.exists() && file.length() == fi.getSize() && Util.hashEquals(file, fi.getHash())) {
                System.out.println(filePath + ": OK");
                return false;
            }
        } catch (IOException e) {
            System.out.println(filePath + ": couldn't check hash: " + e);
            return true;
        }

        System.out.println(filePath + ": need update");
        return true;
    }).collect(Collectors.toList());

    List<String> errors = Collections.synchronizedList(new ArrayList<>());
    ExecutorService executor = Executors.newFixedThreadPool(16);
    CompletableFuture[] tasks = toUpdate.stream().map(fi -> CompletableFuture.runAsync(() -> {
        String filePath = separatorsToSystem(fi.getPath());
        File file = new File(l2Folder, filePath);

        File folder = file.getParentFile();
        if (!folder.exists()) {
            if (!folder.mkdirs()) {
                errors.add(filePath + ": couldn't create parent dir");
                return;
            }
        }

        try (InputStream input = Util
                .getUnzipStream(new BufferedInputStream(helper.getDownloadStream(fi.getPath())));
                OutputStream output = new BufferedOutputStream(new FileOutputStream(file))) {
            byte[] buffer = new byte[Math.min(fi.getSize(), 1 << 24)];
            int pos = 0;
            int r;
            while ((r = input.read(buffer, pos, buffer.length - pos)) >= 0) {
                pos += r;
                if (pos == buffer.length) {
                    output.write(buffer, 0, pos);
                    pos = 0;
                }
            }
            if (pos != 0) {
                output.write(buffer, 0, pos);
            }
            System.out.println(filePath + ": OK");
        } catch (IOException e) {
            String msg = filePath + ": FAIL: " + e.getClass().getSimpleName();
            if (e.getMessage() != null) {
                msg += ": " + e.getMessage();
            }
            errors.add(msg);
        }
    }, executor)).toArray(CompletableFuture[]::new);
    CompletableFuture.allOf(tasks).thenRun(() -> {
        for (String err : errors)
            System.err.println(err);
        executor.shutdown();
    });
}

From source file:general.Main.java

/**
 * Selects the files to be processed and specifies the files to write to.
 *
 * @param args Arguments to specify runtime behavior.
 *//*from   ww w. j  a v a 2  s  .c  o  m*/
public static void main(String[] args) throws InvocationTargetException, NoSuchMethodException,
        InstantiationException, IllegalAccessException {
    Options options = new Options();
    options.addOption("l", "logging", false, "enables file logging");
    options.addOption("j", "jena", false, "uses the Jena SPARQL Parser");
    options.addOption("o", "openrdf", false, "uses the OpenRDF SPARQL Parser");
    options.addOption("f", "file", true, "defines the input file prefix");
    options.addOption("h", "help", false, "displays this help");
    options.addOption("t", "tsv", false, "reads from .tsv-files");
    // options.addOption("p", "parquet", false, "read from .parquet-files");
    options.addOption("n", "numberOfThreads", true, "number of used threads, default 1");
    options.addOption("b", "withBots", false, "enables metric calculation for bot queries+");
    options.addOption("p", "readPreprocessed", false, "enables reading of preprocessed files");

    //some parameters which can be changed through parameters
    //QueryHandler queryHandler = new OpenRDFQueryHandler();
    String inputFilePrefix;
    String inputFileSuffix = ".tsv";
    String queryParserName = "OpenRDF";
    Class inputHandlerClass = null;
    Class queryHandlerClass = null;
    int numberOfThreads = 1;

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
        if (cmd.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("help", options);
            return;
        }
        if (cmd.hasOption("openrdf")) {
            queryHandlerClass = OpenRDFQueryHandler.class;
        }
        if (cmd.hasOption("tsv")) {
            inputFileSuffix = ".tsv";
            inputHandlerClass = InputHandlerTSV.class;
        }
        if (cmd.hasOption("parquet")) {
            inputFileSuffix = ".parquet";
            Logger.getLogger("org").setLevel(Level.WARN);
            Logger.getLogger("akka").setLevel(Level.WARN);
            SparkConf conf = new SparkConf().setAppName("SPARQLQueryAnalyzer").setMaster("local");
            JavaSparkContext sc = new JavaSparkContext(conf);
            inputHandlerClass = InputHandlerParquet.class;
        }
        if (inputHandlerClass == null) {
            System.out.println("Please specify which parser to use, either -t for TSV or -p for parquet.");
        }
        if (cmd.hasOption("file")) {
            inputFilePrefix = cmd.getOptionValue("file").trim();
        } else {
            System.out.println(
                    "Please specify at least the file which we should work on using the option '--file PREFIX' or 'f PREFIX'");
            return;
        }
        if (cmd.hasOption("logging")) {
            LoggingHandler.initFileLog(queryParserName, inputFilePrefix);
        }
        if (cmd.hasOption("numberOfThreads")) {
            numberOfThreads = Integer.parseInt(cmd.getOptionValue("numberOfThreads"));
        }
        if (cmd.hasOption("withBots")) {
            withBots = true;
        }
        if (cmd.hasOption("readPreprocessed")) {
            readPreprocessed = true;
        }
    } catch (UnrecognizedOptionException e) {
        System.out.println("Unrecognized commandline option: " + e.getOption());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("help", options);
        return;
    } catch (ParseException e) {
        System.out.println(
                "There was an error while parsing your command line input. Did you rechecked your syntax before running?");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("help", options);
        return;
    }

    LoggingHandler.initConsoleLog();

    loadPreBuildQueryTypes();

    long startTime = System.nanoTime();

    ExecutorService executor = Executors.newFixedThreadPool(numberOfThreads);

    for (int day = 1; day <= 31; day++) {
        String inputFile = inputFilePrefix + String.format("%02d", day) + inputFileSuffix;
        Runnable parseOneMonthWorker = new ParseOneMonthWorker(inputFile, inputFilePrefix, inputHandlerClass,
                queryParserName, queryHandlerClass, day);
        executor.execute(parseOneMonthWorker);
    }
    executor.shutdown();

    while (!executor.isTerminated()) {
        //wait until all workers are finished
    }

    writeQueryTypes(inputFilePrefix);

    long stopTime = System.nanoTime();
    long millis = TimeUnit.MILLISECONDS.convert(stopTime - startTime, TimeUnit.NANOSECONDS);
    Date date = new Date(millis);
    System.out.println("Finished executing with all threads: "
            + new SimpleDateFormat("mm-dd HH:mm:ss:SSSSSSS").format(date));
}