Example usage for java.util.concurrent ExecutorService shutdown

List of usage examples for java.util.concurrent ExecutorService shutdown

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService shutdown.

Prototype

void shutdown();

Source Link

Document

Initiates an orderly shutdown in which previously submitted tasks are executed, but no new tasks will be accepted.

Usage

From source file:io.bfscan.clueweb12.BuildWarcTrecIdMapping.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("bz2 Wikipedia XML dump file")
            .create(INPUT_OPTION));//from   ww w  . j a v a2 s  .c  o  m
    options.addOption(
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("maximum number of documents to index").create(MAX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of indexing threads")
            .create(THREADS_OPTION));

    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(BuildWarcTrecIdMapping.class.getCanonicalName(), options);
        System.exit(-1);
    }

    String indexPath = cmdline.getOptionValue(INDEX_OPTION);
    int maxdocs = cmdline.hasOption(MAX_OPTION) ? Integer.parseInt(cmdline.getOptionValue(MAX_OPTION))
            : Integer.MAX_VALUE;
    int threads = cmdline.hasOption(THREADS_OPTION) ? Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION))
            : DEFAULT_NUM_THREADS;

    long startTime = System.currentTimeMillis();

    String path = cmdline.getOptionValue(INPUT_OPTION);
    PrintStream out = new PrintStream(System.out, true, "UTF-8");

    Directory dir = FSDirectory.open(new File(indexPath));
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, ANALYZER);
    config.setOpenMode(OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    LOG.info("Creating index at " + indexPath);
    LOG.info("Indexing with " + threads + " threads");

    FileInputStream fis = null;
    BufferedReader br = null;

    try {
        fis = new FileInputStream(new File(path));
        byte[] ignoreBytes = new byte[2];
        fis.read(ignoreBytes); // "B", "Z" bytes from commandline tools
        br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fis), "UTF8"));

        ExecutorService executor = Executors.newFixedThreadPool(threads);
        int cnt = 0;
        String s;
        while ((s = br.readLine()) != null) {
            Runnable worker = new AddDocumentRunnable(writer, s);
            executor.execute(worker);

            cnt++;
            if (cnt % 1000000 == 0) {
                LOG.info(cnt + " articles added");
            }
            if (cnt >= maxdocs) {
                break;
            }
        }

        executor.shutdown();
        // Wait until all threads are finish
        while (!executor.isTerminated()) {
        }

        LOG.info("Total of " + cnt + " articles indexed.");

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        out.close();
        br.close();
        fis.close();
    }
}

From source file:general.Main.java

/**
 * Selects the files to be processed and specifies the files to write to.
 *
 * @param args Arguments to specify runtime behavior.
 *///  w  w w  . java  2  s . c o m
public static void main(String[] args) throws InvocationTargetException, NoSuchMethodException,
        InstantiationException, IllegalAccessException {
    Options options = new Options();
    options.addOption("l", "logging", false, "enables file logging");
    options.addOption("j", "jena", false, "uses the Jena SPARQL Parser");
    options.addOption("o", "openrdf", false, "uses the OpenRDF SPARQL Parser");
    options.addOption("f", "file", true, "defines the input file prefix");
    options.addOption("h", "help", false, "displays this help");
    options.addOption("t", "tsv", false, "reads from .tsv-files");
    // options.addOption("p", "parquet", false, "read from .parquet-files");
    options.addOption("n", "numberOfThreads", true, "number of used threads, default 1");
    options.addOption("b", "withBots", false, "enables metric calculation for bot queries+");
    options.addOption("p", "readPreprocessed", false, "enables reading of preprocessed files");

    //some parameters which can be changed through parameters
    //QueryHandler queryHandler = new OpenRDFQueryHandler();
    String inputFilePrefix;
    String inputFileSuffix = ".tsv";
    String queryParserName = "OpenRDF";
    Class inputHandlerClass = null;
    Class queryHandlerClass = null;
    int numberOfThreads = 1;

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
        if (cmd.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("help", options);
            return;
        }
        if (cmd.hasOption("openrdf")) {
            queryHandlerClass = OpenRDFQueryHandler.class;
        }
        if (cmd.hasOption("tsv")) {
            inputFileSuffix = ".tsv";
            inputHandlerClass = InputHandlerTSV.class;
        }
        if (cmd.hasOption("parquet")) {
            inputFileSuffix = ".parquet";
            Logger.getLogger("org").setLevel(Level.WARN);
            Logger.getLogger("akka").setLevel(Level.WARN);
            SparkConf conf = new SparkConf().setAppName("SPARQLQueryAnalyzer").setMaster("local");
            JavaSparkContext sc = new JavaSparkContext(conf);
            inputHandlerClass = InputHandlerParquet.class;
        }
        if (inputHandlerClass == null) {
            System.out.println("Please specify which parser to use, either -t for TSV or -p for parquet.");
        }
        if (cmd.hasOption("file")) {
            inputFilePrefix = cmd.getOptionValue("file").trim();
        } else {
            System.out.println(
                    "Please specify at least the file which we should work on using the option '--file PREFIX' or 'f PREFIX'");
            return;
        }
        if (cmd.hasOption("logging")) {
            LoggingHandler.initFileLog(queryParserName, inputFilePrefix);
        }
        if (cmd.hasOption("numberOfThreads")) {
            numberOfThreads = Integer.parseInt(cmd.getOptionValue("numberOfThreads"));
        }
        if (cmd.hasOption("withBots")) {
            withBots = true;
        }
        if (cmd.hasOption("readPreprocessed")) {
            readPreprocessed = true;
        }
    } catch (UnrecognizedOptionException e) {
        System.out.println("Unrecognized commandline option: " + e.getOption());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("help", options);
        return;
    } catch (ParseException e) {
        System.out.println(
                "There was an error while parsing your command line input. Did you rechecked your syntax before running?");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("help", options);
        return;
    }

    LoggingHandler.initConsoleLog();

    loadPreBuildQueryTypes();

    long startTime = System.nanoTime();

    ExecutorService executor = Executors.newFixedThreadPool(numberOfThreads);

    for (int day = 1; day <= 31; day++) {
        String inputFile = inputFilePrefix + String.format("%02d", day) + inputFileSuffix;
        Runnable parseOneMonthWorker = new ParseOneMonthWorker(inputFile, inputFilePrefix, inputHandlerClass,
                queryParserName, queryHandlerClass, day);
        executor.execute(parseOneMonthWorker);
    }
    executor.shutdown();

    while (!executor.isTerminated()) {
        //wait until all workers are finished
    }

    writeQueryTypes(inputFilePrefix);

    long stopTime = System.nanoTime();
    long millis = TimeUnit.MILLISECONDS.convert(stopTime - startTime, TimeUnit.NANOSECONDS);
    Date date = new Date(millis);
    System.out.println("Finished executing with all threads: "
            + new SimpleDateFormat("mm-dd HH:mm:ss:SSSSSSS").format(date));
}

From source file:fr.tpt.s3.mcdag.bench.MainBench.java

public static void main(String[] args) throws IOException, InterruptedException {

    // Command line options
    Options options = new Options();

    Option input = new Option("i", "input", true, "MC-DAG XML models");
    input.setRequired(true);/*from w w w.j  a va2s. co  m*/
    input.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(input);

    Option output = new Option("o", "output", true, "Folder where results have to be written.");
    output.setRequired(true);
    options.addOption(output);

    Option uUti = new Option("u", "utilization", true, "Utilization.");
    uUti.setRequired(true);
    options.addOption(uUti);

    Option output2 = new Option("ot", "output-total", true, "File where total results are being written");
    output2.setRequired(true);
    options.addOption(output2);

    Option oCores = new Option("c", "cores", true, "Cores given to the test");
    oCores.setRequired(true);
    options.addOption(oCores);

    Option oLvls = new Option("l", "levels", true, "Levels tested for the system");
    oLvls.setRequired(true);
    options.addOption(oLvls);

    Option jobs = new Option("j", "jobs", true, "Number of threads to be launched.");
    jobs.setRequired(false);
    options.addOption(jobs);

    Option debug = new Option("d", "debug", false, "Debug logs.");
    debug.setRequired(false);
    options.addOption(debug);

    /*
     * Parsing of the command line
     */
    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        formatter.printHelp("Benchmarks MultiDAG", options);
        System.exit(1);
        return;
    }

    String inputFilePath[] = cmd.getOptionValues("input");
    String outputFilePath = cmd.getOptionValue("output");
    String outputFilePathTotal = cmd.getOptionValue("output-total");
    double utilization = Double.parseDouble(cmd.getOptionValue("utilization"));
    boolean boolDebug = cmd.hasOption("debug");
    int nbLvls = Integer.parseInt(cmd.getOptionValue("levels"));
    int nbJobs = 1;
    int nbFiles = inputFilePath.length;

    if (cmd.hasOption("jobs"))
        nbJobs = Integer.parseInt(cmd.getOptionValue("jobs"));

    int nbCores = Integer.parseInt(cmd.getOptionValue("cores"));

    /*
     *  While files need to be allocated
     *  run the tests in the pool of threads
     */

    // For dual-criticality systems we call a specific thread
    if (nbLvls == 2) {

        System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls);

        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile,
                    nbCores, boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int fedTotal = 0;
        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int fedPreempts = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int fedActiv = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            fedTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            fedPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            fedActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 11) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 12) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 13) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double fedPerc = (double) fedTotal / nbFiles;
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double fedPercPreempts = (double) fedPreempts / fedActiv;
        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; "
                + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts
                + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; "
                + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else if (nbLvls > 2) {
        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores,
                    boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; "
                + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts
                + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else {
        System.err.println("Wrong number of levels");
        System.exit(-1);
    }

    System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls);
}

From source file:kymr.github.io.training.scheduler.SchedulerEx1.java

public static void main(String[] args) {
    Publisher<Integer> pub = sub -> {
        sub.onSubscribe(new Subscription() {
            @Override/*from   w  w w.  j  av a 2 s  . c  o m*/
            public void request(long n) {
                log.debug("request()");
                sub.onNext(1);
                sub.onNext(2);
                sub.onNext(3);
                sub.onNext(4);
                sub.onNext(5);
                sub.onComplete();
            }

            @Override
            public void cancel() {

            }
        });
    };

    Publisher<Integer> subOnPub = sub -> {
        ExecutorService es = Executors.newSingleThreadExecutor(new CustomizableThreadFactory() {
            @Override
            public String getThreadNamePrefix() {
                return "subOn-";
            }
        });

        pub.subscribe(new Subscriber<Integer>() {
            @Override
            public void onSubscribe(Subscription s) {
                es.execute(() -> sub.onSubscribe(s));
            }

            @Override
            public void onNext(Integer integer) {
                sub.onNext(integer);
            }

            @Override
            public void onError(Throwable t) {
                sub.onError(t);
                es.shutdown();
            }

            @Override
            public void onComplete() {
                sub.onComplete();
                es.shutdown();
            }
        });
    };

    /*Publisher<Integer> subOnPub = sub -> {
       ExecutorService es = Executors.newSingleThreadExecutor(new CustomizableThreadFactory() {
    @Override
    public String getThreadNamePrefix() {
       return "subOn-";
    }
       });
            
       es.execute(() -> pub.subscribe(sub));
    };
    */
    Publisher<Integer> pubOnPub = sub -> {
        subOnPub.subscribe(new Subscriber<Integer>() {
            ExecutorService es = Executors.newSingleThreadExecutor(new CustomizableThreadFactory() {
                @Override
                public String getThreadNamePrefix() {
                    return "pubOn-";
                }
            });

            @Override
            public void onSubscribe(Subscription s) {
                sub.onSubscribe(s);
            }

            @Override
            public void onNext(Integer integer) {
                es.execute(() -> sub.onNext(integer));
            }

            @Override
            public void onError(Throwable t) {
                es.execute(() -> sub.onError(t));
                es.shutdown();
            }

            @Override
            public void onComplete() {
                es.execute(() -> sub.onComplete());
                es.shutdown();
            }
        });
    };

    pubOnPub.subscribe(new Subscriber<Integer>() {
        @Override
        public void onSubscribe(Subscription s) {
            log.debug("onSubscribe");
            s.request(Long.MAX_VALUE);
        }

        @Override
        public void onNext(Integer integer) {
            log.debug("onNext : {}", integer);
        }

        @Override
        public void onError(Throwable t) {
            log.debug("onError : {}", t);
        }

        @Override
        public void onComplete() {
            log.debug("onComplete");
        }
    });

    log.debug("exit");
}

From source file:com.amazonaws.services.kinesis.samples.datavis.HttpReferrerStreamWriter.java

/**
 * Start a number of threads and send randomly generated {@link HttpReferrerPair}s to a Kinesis Stream until the
 * program is terminated./*from w  w w  .j av  a2 s.c o  m*/
 *
 * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send
 *        data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources
 *        exist or should be created.
 * @throws InterruptedException If this application is interrupted while sending records to Kinesis.
 */
public static void main(String[] args) throws InterruptedException {
    if (args.length != 3) {
        System.err.println("Usage: " + HttpReferrerStreamWriter.class.getSimpleName()
                + " <number of threads> <stream name> <region>");
        System.exit(1);
    }

    int numberOfThreads = Integer.parseInt(args[0]);
    String streamName = args[1];
    Region region = SampleUtils.parseRegion(args[2]);

    AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
    ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration());
    AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig);
    kinesis.setRegion(region);

    // The more resources we declare the higher write IOPS we need on our DynamoDB table.
    // We write a record for each resource every interval.
    // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum.
    List<String> resources = new ArrayList<>();
    resources.add("/index.html");

    // These are the possible referrers to use when generating pairs
    List<String> referrers = new ArrayList<>();
    referrers.add("http://www.amazon.com");
    referrers.add("http://www.google.com");
    referrers.add("http://www.yahoo.com");
    referrers.add("http://www.bing.com");
    referrers.add("http://www.stackoverflow.com");
    referrers.add("http://www.reddit.com");

    HttpReferrerPairFactory pairFactory = new HttpReferrerPairFactory(resources, referrers);

    // Creates a stream to write to with 2 shards if it doesn't exist
    StreamUtils streamUtils = new StreamUtils(kinesis);
    streamUtils.createStreamIfNotExists(streamName, 2);
    LOG.info(String.format("%s stream is ready for use", streamName));

    final HttpReferrerKinesisPutter putter = new HttpReferrerKinesisPutter(pairFactory, kinesis, streamName);

    ExecutorService es = Executors.newCachedThreadPool();

    Runnable pairSender = new Runnable() {
        @Override
        public void run() {
            try {
                putter.sendPairsIndefinitely(DELAY_BETWEEN_RECORDS_IN_MILLIS, TimeUnit.MILLISECONDS);
            } catch (Exception ex) {
                LOG.warn(
                        "Thread encountered an error while sending records. Records will no longer be put by this thread.",
                        ex);
            }
        }
    };

    for (int i = 0; i < numberOfThreads; i++) {
        es.submit(pairSender);
    }

    LOG.info(String.format("Sending pairs with a %dms delay between records with %d thread(s).",
            DELAY_BETWEEN_RECORDS_IN_MILLIS, numberOfThreads));

    es.shutdown();
    es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
}

From source file:com.alertlogic.aws.kinesis.test1.StreamWriter.java

/**
 * Start a number of threads and send randomly generated {@link HttpReferrerPair}s to a Kinesis Stream until the
 * program is terminated./*  w w  w  .j  a  v a 2  s.  c o m*/
 *
 * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send
 *        data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources
 *        exist or should be created.
 * @throws InterruptedException If this application is interrupted while sending records to Kinesis.
 */
public static void main(String[] args) throws InterruptedException {
    if (args.length != 3) {
        System.err.println(
                "Usage: " + StreamWriter.class.getSimpleName() + " <number of threads> <stream name> <region>");
        System.exit(1);
    }

    int numberOfThreads = Integer.parseInt(args[0]);
    String streamName = args[1];
    Region region = SampleUtils.parseRegion(args[2]);

    AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
    ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration());
    AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig);
    kinesis.setRegion(region);

    // The more resources we declare the higher write IOPS we need on our DynamoDB table.
    // We write a record for each resource every interval.
    // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum.
    List<String> resources = new ArrayList<>();
    resources.add("/index.html");

    // These are the possible referrers to use when generating pairs
    List<String> referrers = new ArrayList<>();
    referrers.add("http://www.amazon.com");
    referrers.add("http://www.google.com");
    referrers.add("http://www.yahoo.com");
    referrers.add("http://www.bing.com");
    referrers.add("http://www.stackoverflow.com");
    referrers.add("http://www.reddit.com");

    HttpReferrerPairFactory pairFactory = new HttpReferrerPairFactory(resources, referrers);

    // Creates a stream to write to with 2 shards if it doesn't exist
    StreamUtils streamUtils = new StreamUtils(kinesis);
    streamUtils.createStreamIfNotExists(streamName, 2);
    LOG.info(String.format("%s stream is ready for use", streamName));

    final HttpReferrerKinesisPutter putter = new HttpReferrerKinesisPutter(pairFactory, kinesis, streamName);

    ExecutorService es = Executors.newCachedThreadPool();

    Runnable pairSender = new Runnable() {
        @Override
        public void run() {
            try {
                putter.sendPairsIndefinitely(DELAY_BETWEEN_RECORDS_IN_MILLIS, TimeUnit.MILLISECONDS);
            } catch (Exception ex) {
                LOG.warn(
                        "Thread encountered an error while sending records. Records will no longer be put by this thread.",
                        ex);
            }
        }
    };

    for (int i = 0; i < numberOfThreads; i++) {
        es.submit(pairSender);
    }

    LOG.info(String.format("Sending pairs with a %dms delay between records with %d thread(s).",
            DELAY_BETWEEN_RECORDS_IN_MILLIS, numberOfThreads));

    es.shutdown();
    es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
}

From source file:com.linkedin.pinotdruidbenchmark.PinotThroughput.java

@SuppressWarnings("InfiniteLoopStatement")
public static void main(String[] args) throws Exception {
    if (args.length != 3 && args.length != 4) {
        System.err.println(//from   w  ww. j a  v  a2 s  .co  m
                "3 or 4 arguments required: QUERY_DIR, RESOURCE_URL, NUM_CLIENTS, TEST_TIME (seconds).");
        return;
    }

    File queryDir = new File(args[0]);
    String resourceUrl = args[1];
    final int numClients = Integer.parseInt(args[2]);
    final long endTime;
    if (args.length == 3) {
        endTime = Long.MAX_VALUE;
    } else {
        endTime = System.currentTimeMillis() + Integer.parseInt(args[3]) * MILLIS_PER_SECOND;
    }

    File[] queryFiles = queryDir.listFiles();
    assert queryFiles != null;
    Arrays.sort(queryFiles);

    final int numQueries = queryFiles.length;
    final HttpPost[] httpPosts = new HttpPost[numQueries];
    for (int i = 0; i < numQueries; i++) {
        HttpPost httpPost = new HttpPost(resourceUrl);
        String query = new BufferedReader(new FileReader(queryFiles[i])).readLine();
        httpPost.setEntity(new StringEntity("{\"pql\":\"" + query + "\"}"));
        httpPosts[i] = httpPost;
    }

    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(numClients);

    for (int i = 0; i < numClients; i++) {
        executorService.submit(new Runnable() {
            @Override
            public void run() {
                try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
                    while (System.currentTimeMillis() < endTime) {
                        long startTime = System.currentTimeMillis();
                        CloseableHttpResponse httpResponse = httpClient
                                .execute(httpPosts[RANDOM.nextInt(numQueries)]);
                        httpResponse.close();
                        long responseTime = System.currentTimeMillis() - startTime;
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(responseTime);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }
    executorService.shutdown();

    long startTime = System.currentTimeMillis();
    while (System.currentTimeMillis() < endTime) {
        Thread.sleep(REPORT_INTERVAL_MILLIS);
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.get();
        double avgResponseTime = ((double) totalResponseTime.get()) / count;
        System.out.println("Time Passed: " + timePassedSeconds + "s, Query Executed: " + count + ", QPS: "
                + count / timePassedSeconds + ", Avg Response Time: " + avgResponseTime + "ms");
    }
}

From source file:com.linkedin.pinotdruidbenchmark.DruidThroughput.java

@SuppressWarnings("InfiniteLoopStatement")
public static void main(String[] args) throws Exception {
    if (args.length != 3 && args.length != 4) {
        System.err.println(/* w ww .j  a  v  a 2s. co  m*/
                "3 or 4 arguments required: QUERY_DIR, RESOURCE_URL, NUM_CLIENTS, TEST_TIME (seconds).");
        return;
    }

    File queryDir = new File(args[0]);
    String resourceUrl = args[1];
    final int numClients = Integer.parseInt(args[2]);
    final long endTime;
    if (args.length == 3) {
        endTime = Long.MAX_VALUE;
    } else {
        endTime = System.currentTimeMillis() + Integer.parseInt(args[3]) * MILLIS_PER_SECOND;
    }

    File[] queryFiles = queryDir.listFiles();
    assert queryFiles != null;
    Arrays.sort(queryFiles);

    final int numQueries = queryFiles.length;
    final HttpPost[] httpPosts = new HttpPost[numQueries];
    for (int i = 0; i < numQueries; i++) {
        HttpPost httpPost = new HttpPost(resourceUrl);
        httpPost.addHeader("content-type", "application/json");
        StringBuilder stringBuilder = new StringBuilder();
        try (BufferedReader bufferedReader = new BufferedReader(new FileReader(queryFiles[i]))) {
            int length;
            while ((length = bufferedReader.read(CHAR_BUFFER)) > 0) {
                stringBuilder.append(new String(CHAR_BUFFER, 0, length));
            }
        }
        String query = stringBuilder.toString();
        httpPost.setEntity(new StringEntity(query));
        httpPosts[i] = httpPost;
    }

    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(numClients);

    for (int i = 0; i < numClients; i++) {
        executorService.submit(new Runnable() {
            @Override
            public void run() {
                try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
                    while (System.currentTimeMillis() < endTime) {
                        long startTime = System.currentTimeMillis();
                        CloseableHttpResponse httpResponse = httpClient
                                .execute(httpPosts[RANDOM.nextInt(numQueries)]);
                        httpResponse.close();
                        long responseTime = System.currentTimeMillis() - startTime;
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(responseTime);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }
    executorService.shutdown();

    long startTime = System.currentTimeMillis();
    while (System.currentTimeMillis() < endTime) {
        Thread.sleep(REPORT_INTERVAL_MILLIS);
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.get();
        double avgResponseTime = ((double) totalResponseTime.get()) / count;
        System.out.println("Time Passed: " + timePassedSeconds + "s, Query Executed: " + count + ", QPS: "
                + count / timePassedSeconds + ", Avg Response Time: " + avgResponseTime + "ms");
    }
}

From source file:cc.wikitools.lucene.IndexWikipediaDump.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("bz2 Wikipedia XML dump file")
            .create(INPUT_OPTION));//from   w w  w  .j  av a  2  s .  co m
    options.addOption(
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("maximum number of documents to index").create(MAX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of indexing threads")
            .create(THREADS_OPTION));

    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndexWikipediaDump.class.getCanonicalName(), options);
        System.exit(-1);
    }

    String indexPath = cmdline.getOptionValue(INDEX_OPTION);
    int maxdocs = cmdline.hasOption(MAX_OPTION) ? Integer.parseInt(cmdline.getOptionValue(MAX_OPTION))
            : Integer.MAX_VALUE;
    int threads = cmdline.hasOption(THREADS_OPTION) ? Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION))
            : DEFAULT_NUM_THREADS;

    long startTime = System.currentTimeMillis();

    String path = cmdline.getOptionValue(INPUT_OPTION);
    PrintStream out = new PrintStream(System.out, true, "UTF-8");
    WikiClean cleaner = new WikiCleanBuilder().withTitle(true).build();

    Directory dir = FSDirectory.open(new File(indexPath));
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, ANALYZER);
    config.setOpenMode(OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    LOG.info("Creating index at " + indexPath);
    LOG.info("Indexing with " + threads + " threads");

    try {
        WikipediaBz2DumpInputStream stream = new WikipediaBz2DumpInputStream(path);

        ExecutorService executor = Executors.newFixedThreadPool(threads);
        int cnt = 0;
        String page;
        while ((page = stream.readNext()) != null) {
            String title = cleaner.getTitle(page);

            // These are heuristic specifically for filtering out non-articles in enwiki-20120104.
            if (title.startsWith("Wikipedia:") || title.startsWith("Portal:") || title.startsWith("File:")) {
                continue;
            }

            if (page.contains("#REDIRECT") || page.contains("#redirect") || page.contains("#Redirect")) {
                continue;
            }

            Runnable worker = new AddDocumentRunnable(writer, cleaner, page);
            executor.execute(worker);

            cnt++;
            if (cnt % 10000 == 0) {
                LOG.info(cnt + " articles added");
            }
            if (cnt >= maxdocs) {
                break;
            }
        }

        executor.shutdown();
        // Wait until all threads are finish
        while (!executor.isTerminated()) {
        }

        LOG.info("Total of " + cnt + " articles indexed.");

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        out.close();
    }
}

From source file:gov.nasa.ensemble.common.functional.ParTileExample.java

/**
 * @param args/* ww w . ja v  a 2s  .c  om*/
 */
public static void main(String[] args) {

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    final ExecutorService pool = Executors.newFixedThreadPool(NUM_THREADS);
    final ParModule pm = ParModule.parModule(Strategy.<Unit>executorStrategy(pool));

    final Actor<Tree<Tile>> tileWriter = pm.effect(new Effect<Tree<Tile>>() {
        @Override
        public void e(Tree<Tile> tree) {
            final List<Tile> nodes = tree.flatten().toList();
            final Actor<String> callback = pm.actor(new Effect<String>() {
                final int totalTiles = nodes.length();
                int counter = 0;

                @Override
                public void e(final String response) {
                    //                  System.err.println(response);
                    if (++counter >= totalTiles) {
                        final String msg = MessageFormat.format(
                                "All done! Made {0} tiles for a {1} pixel image in {2} seconds", counter,
                                IMAGE_WIDTH * IMAGE_HEIGHT, stopWatch.getTime() / 1000.0);
                        System.err.println(msg);
                        pool.shutdown();
                    }
                }
            }).asActor();

            nodes.foreach(Actors.act(pm.effect(new Effect<Tile>() {
                @Override
                public void e(final Tile tile) {
                    ThreadUtils.sleep(SAVE_TIME);
                    callback.act("done saving " + tile);
                }
            })));
        }
    });

    final Image inputImage = new Image(V.v(IMAGE_WIDTH, IMAGE_HEIGHT));

    process(inputImage, V.v(0.0, 0.0), 0, pm).to(tileWriter);
}