Example usage for java.util.concurrent ExecutorService isTerminated

List of usage examples for java.util.concurrent ExecutorService isTerminated

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService isTerminated.

Prototype

boolean isTerminated();

Source Link

Document

Returns true if all tasks have completed following shut down.

Usage

From source file:watch.oms.omswatch.parser.OMSConfigDBParser.java

/**
 * Parses Service response and stores into respective DB table.
 * //from  www .ja va  2 s  . c  o  m
 * @param pStringReader
 */
private void readJsonStream(Reader pStringReader) {
    double latestModifiedTimeStamp = 0.0f;
    JsonReader reader = null;
    List<ContentValues> rows = null;
    String tableName = null;
    ExecutorService executor = Executors.newFixedThreadPool(10);
    final String VISITED_DATE = "visiteddate";
    OMSServerMapperHelper servermapperhelper = new OMSServerMapperHelper();
    final String DB_PROCESS_DURATION = "dbprocessduration";
    final String SERVER_PROCESS_DURATION = "serverprocessduration";

    try {
        Log.d(TAG, "@@@@@@@@@@ Config DB Tables Start @@@@@@@@@@");
        reader = new JsonReader(pStringReader);
        reader.setLenient(true);
        reader.beginObject();
        // Iterate through each table data
        while (reader.hasNext()) {
            tableName = reader.nextName();
            if (tableName.equals(VISITED_DATE)) {

                latestModifiedTimeStamp = reader.nextDouble();

                /*servermapperhelper.updateModifiedTimeStampForAppsTable(
                       latestModifiedTimeStamp);*/
                /*if (Integer.parseInt(OMSApplication
                      .getInstance().getAppId()) == 10) {
                   servermapperhelper
                .updateModifiedTimeStampForVisitedDateMapper(
                      OMSApplication
                            .getInstance()
                            .getEditTextHiddenVal(),
                      latestModifiedTimeStamp);
                }*/
                continue;
            }
            if (tableName.equals(OMSConstants.NULL_STRING)) {
                continue;
            }
            //Fetch dbprocess duration serverprocess duration
            else if (DB_PROCESS_DURATION.equalsIgnoreCase(tableName)) {
                String dbDuration = reader.nextString();
                OMSApplication.getInstance().setDatabaseProcessDuration(dbDuration);
                Log.i(TAG, "DB Process Duration" + dbDuration);
                continue;
            }
            if (SERVER_PROCESS_DURATION.equalsIgnoreCase(tableName)) {
                String serverProcessDuration = reader.nextString();
                OMSApplication.getInstance().setServerProcessDuration(serverProcessDuration);
                Log.i(TAG, "server process duration " + serverProcessDuration);
                continue;
            }
            rows = readAllRowDataForTable(reader, tableName);

            Runnable worker = new DbWorkerThread(tableName, rows);
            executor.execute(worker);
        }
        reader.endObject();
        executor.shutdown();
        while (!executor.isTerminated()) {
        }
        Log.d(TAG, "@@@@@@@@@@ Config DB Tables End @@@@@@@@@@");
        // Update Apps Table
        Log.d(TAG, "@@@@@@@@@@ Updating AppsTable with ConfigLastModifieddate:" + latestModifiedTimeStamp);
        servermapperhelper.updateModifiedTimeStampForAppsTable(latestModifiedTimeStamp);

    } catch (IOException e) {
        e.printStackTrace();
    } finally {

        executor.shutdown();
        while (!executor.isTerminated()) {
        }
        try {
            if (reader != null) {
                reader.close();
            }
        } catch (IOException e) {
            Log.e(TAG, "IOException occurred while loading file from Assets folder." + e.getMessage());
            e.printStackTrace();
        }

    }

}

From source file:com.dumontierlab.pdb2rdf.Pdb2Rdf.java

private static void printRdf(final CommandLine cmd, final Map<String, Double> stats) {
    final File outDir = getOutputDirectory(cmd);
    final RDFWriter writer = getWriter(cmd);
    final ProgressMonitor monitor = getProgressMonitor();
    Pdb2RdfInputIterator i = processInput(cmd);
    final int inputSize = i.size();
    final AtomicInteger progressCount = new AtomicInteger();
    ExecutorService pool = null;
    if (outDir != null) {
        pool = getThreadPool(cmd);/*from   www.j  a v a2 s.c om*/
    } else {
        // if output is going to the STDOUT then we need to do process in
        // sequential mode.
        pool = Executors.newSingleThreadExecutor();
    }

    final Object lock = new Object();

    while (i.hasNext()) {
        final InputSource input = i.next();
        pool.execute(new Runnable() {
            @Override
            public void run() {
                OutputStream out = System.out;
                PdbXmlParser parser = new PdbXmlParser();
                PdbRdfModel model = null;
                try {
                    if (cmd.hasOption("detailLevel")) {
                        try {
                            DetailLevel detailLevel = Enum.valueOf(DetailLevel.class,
                                    cmd.getOptionValue("detailLevel"));
                            model = parser.parse(input, new PdbRdfModel(), detailLevel);
                        } catch (IllegalArgumentException e) {
                            LOG.fatal("Invalid argument value for detailLevel option", e);
                            System.exit(1);
                        }
                    } else {
                        model = parser.parse(input, new PdbRdfModel());
                    }
                    // add the input file information
                    model.addInputFileInformation();
                    // add the outputFile information();
                    model.addRDFFileInformation();
                    if (outDir != null) {
                        File directory = new File(outDir, model.getPdbId().substring(1, 3));
                        synchronized (lock) {
                            if (!directory.exists()) {
                                directory.mkdir();
                            }
                        }
                        File file = new File(directory, model.getPdbId() + ".rdf.gz");
                        out = new GZIPOutputStream(new FileOutputStream(file));
                    }
                    if (cmd.hasOption("format")) {
                        if (cmd.getOptionValue("format").equalsIgnoreCase("NQUADs")) {
                            Dataset ds = TDBFactory.createDataset();
                            ds.addNamedModel(model.getDatasetResource().toString(), model);
                            StringWriter sw = new StringWriter();
                            RDFDataMgr.write(sw, ds, Lang.NQUADS);

                            out.write(sw.toString().getBytes(Charset.forName("UTF-8")));
                            ds.close();

                        }
                    }

                    writer.write(model, out, null);

                    if (stats != null) {
                        updateStats(stats, model);
                    }
                    if (monitor != null) {
                        monitor.setProgress(progressCount.incrementAndGet(), inputSize);
                    }

                } catch (Exception e) {
                    String id = null;
                    if (model != null) {
                        id = model.getPdbId();
                    }
                    LOG.error("Unable to parse input for PDB: " + id, e);
                } finally {
                    try {
                        out.close();
                    } catch (IOException e) {
                        LOG.error("Unable to close output stream", e);
                    }
                }
            }
        });
    }
    pool.shutdown();
    while (!pool.isTerminated()) {
        try {
            pool.awaitTermination(1, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            break;
        }
    }
}

From source file:org.apache.accumulo.test.merkle.cli.GenerateHashes.java

public void run(final Connector conn, final String inputTableName, final String outputTableName,
        final String digestName, int numThreads, final boolean iteratorPushdown, final Collection<Range> ranges)
        throws TableNotFoundException, AccumuloSecurityException, AccumuloException, NoSuchAlgorithmException {
    if (!conn.tableOperations().exists(outputTableName)) {
        throw new IllegalArgumentException(outputTableName + " does not exist, please create it");
    }//from   ww w  . ja v  a 2s  .  com

    ExecutorService svc = Executors.newFixedThreadPool(numThreads);
    final BatchWriter bw = conn.createBatchWriter(outputTableName, new BatchWriterConfig());

    try {
        for (final Range range : ranges) {
            final MessageDigest digest = getDigestAlgorithm(digestName);

            svc.execute(new Runnable() {

                @Override
                public void run() {
                    Scanner s;
                    try {
                        s = conn.createScanner(inputTableName, Authorizations.EMPTY);
                    } catch (Exception e) {
                        log.error("Could not get scanner for " + inputTableName, e);
                        throw new RuntimeException(e);
                    }

                    s.setRange(range);

                    Value v = null;
                    Mutation m = null;
                    if (iteratorPushdown) {
                        IteratorSetting cfg = new IteratorSetting(50, DigestIterator.class);
                        cfg.addOption(DigestIterator.HASH_NAME_KEY, digestName);
                        s.addScanIterator(cfg);

                        // The scanner should only ever return us one Key-Value, otherwise this approach won't work
                        Entry<Key, Value> entry = Iterables.getOnlyElement(s);

                        v = entry.getValue();
                        m = RangeSerialization.toMutation(range, v);
                    } else {
                        ByteArrayOutputStream baos = new ByteArrayOutputStream();
                        for (Entry<Key, Value> entry : s) {
                            DataOutputStream out = new DataOutputStream(baos);
                            try {
                                entry.getKey().write(out);
                                entry.getValue().write(out);
                            } catch (Exception e) {
                                log.error("Error writing {}", entry, e);
                                throw new RuntimeException(e);
                            }

                            digest.update(baos.toByteArray());
                            baos.reset();
                        }

                        v = new Value(digest.digest());
                        m = RangeSerialization.toMutation(range, v);
                    }

                    // Log some progress
                    log.info("{} computed digest for {} of {}", Thread.currentThread().getName(), range,
                            Hex.encodeHexString(v.get()));

                    try {
                        bw.addMutation(m);
                    } catch (MutationsRejectedException e) {
                        log.error("Could not write mutation", e);
                        throw new RuntimeException(e);
                    }
                }
            });
        }

        svc.shutdown();

        // Wait indefinitely for the scans to complete
        while (!svc.isTerminated()) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                log.error("Interrupted while waiting for executor service to gracefully complete. Exiting now");
                svc.shutdownNow();
                return;
            }
        }
    } finally {
        // We can only safely close this when we're exiting or we've completely all tasks
        bw.close();
    }
}

From source file:org.apache.accumulo.test.replication.merkle.cli.GenerateHashes.java

public void run(final Connector conn, final String inputTableName, final String outputTableName,
        final String digestName, int numThreads, final boolean iteratorPushdown, final Collection<Range> ranges)
        throws TableNotFoundException, AccumuloSecurityException, AccumuloException, NoSuchAlgorithmException {
    if (!conn.tableOperations().exists(outputTableName)) {
        throw new IllegalArgumentException(outputTableName + " does not exist, please create it");
    }/*w w  w . jav  a2 s  .  c o  m*/

    // Get some parallelism
    ExecutorService svc = Executors.newFixedThreadPool(numThreads);
    final BatchWriter bw = conn.createBatchWriter(outputTableName, new BatchWriterConfig());

    try {
        for (final Range range : ranges) {
            final MessageDigest digest = getDigestAlgorithm(digestName);

            svc.execute(new Runnable() {

                @Override
                public void run() {
                    Scanner s;
                    try {
                        s = conn.createScanner(inputTableName, Authorizations.EMPTY);
                    } catch (Exception e) {
                        log.error("Could not get scanner for " + inputTableName, e);
                        throw new RuntimeException(e);
                    }

                    s.setRange(range);

                    Value v = null;
                    Mutation m = null;
                    if (iteratorPushdown) {
                        IteratorSetting cfg = new IteratorSetting(50, DigestIterator.class);
                        cfg.addOption(DigestIterator.HASH_NAME_KEY, digestName);
                        s.addScanIterator(cfg);

                        // The scanner should only ever return us one Key-Value, otherwise this approach won't work
                        Entry<Key, Value> entry = Iterables.getOnlyElement(s);

                        v = entry.getValue();
                        m = RangeSerialization.toMutation(range, v);
                    } else {
                        ByteArrayOutputStream baos = new ByteArrayOutputStream();
                        for (Entry<Key, Value> entry : s) {
                            DataOutputStream out = new DataOutputStream(baos);
                            try {
                                entry.getKey().write(out);
                                entry.getValue().write(out);
                            } catch (Exception e) {
                                log.error("Error writing {}", entry, e);
                                throw new RuntimeException(e);
                            }

                            digest.update(baos.toByteArray());
                            baos.reset();
                        }

                        v = new Value(digest.digest());
                        m = RangeSerialization.toMutation(range, v);
                    }

                    // Log some progress
                    log.info("{} computed digest for {} of {}", Thread.currentThread().getName(), range,
                            Hex.encodeHexString(v.get()));

                    try {
                        bw.addMutation(m);
                    } catch (MutationsRejectedException e) {
                        log.error("Could not write mutation", e);
                        throw new RuntimeException(e);
                    }
                }
            });
        }

        svc.shutdown();

        // Wait indefinitely for the scans to complete
        while (!svc.isTerminated()) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                log.error("Interrupted while waiting for executor service to gracefully complete. Exiting now");
                svc.shutdownNow();
                return;
            }
        }
    } finally {
        // We can only safely close this when we're exiting or we've completely all tasks
        bw.close();
    }
}

From source file:br.prof.salesfilho.oci.service.BodyWomanNudeClassifier.java

public void tClassify() {

    long startTime;
    long endTime;
    double totalTime = 0;

    EuclidianClassifier chestWorker;//from  ww w.  ja  v a2  s  . c  om
    EuclidianClassifier buttockWorker;
    EuclidianClassifier genitalWorker;

    ClassificationResult classificationResult;
    ExecutorService executor;

    bodyWomanDescriptorService.openDatabase(new File(this.databaseName));

    BodyPartDescriptor nudeChestDescriptor = bodyWomanDescriptorService
            .findNudeBodyPartDescriptorByName("Chest");
    BodyPartDescriptor notNudeChestDescriptor = bodyWomanDescriptorService
            .findNotNudeBodyPartDescriptorByName("Chest");

    BodyPartDescriptor nudeButtockDescriptor = bodyWomanDescriptorService
            .findNudeBodyPartDescriptorByName("Buttock");
    BodyPartDescriptor notNudeButtockDescriptor = bodyWomanDescriptorService
            .findNotNudeBodyPartDescriptorByName("Buttock");

    BodyPartDescriptor nudeGenitalDescriptor = bodyWomanDescriptorService
            .findNudeBodyPartDescriptorByName("Genital");
    BodyPartDescriptor notNudeGenitalDescriptor = bodyWomanDescriptorService
            .findNotNudeBodyPartDescriptorByName("Genital");

    fileList = OCIUtils.getImageFiles(this.inputDir);

    List<EuclidianClassifier> classifiers = new ArrayList();

    for (String imagePath : fileList) {
        startTime = System.currentTimeMillis();

        try {
            System.out.println(
                    "-------------------------------------------------------------------------------------");
            System.out.println("Classifying image...: ".concat(imagePath));
            System.out.println(
                    "-------------------------------------------------------------------------------------");

            BufferedImage img = ImageIO.read(new File(imagePath));
            ImageProcessor imageProcessor = new ImageProcessor(img);
            imageProcessorService = new ImageProcessorService(imageProcessor);
            List<BufferedImage> partImageList = imageProcessorService.getSubImages(128);

            //Create new thread pool to each image file
            executor = Executors.newFixedThreadPool(10);
            for (BufferedImage subImg : partImageList) {

                chestWorker = new EuclidianClassifier(nudeChestDescriptor, notNudeChestDescriptor, subImg,
                        this.kernelSize);
                chestWorker.setClassificationLevel(this.classificationLevel);
                executor.execute(chestWorker);
                classifiers.add(chestWorker);

                buttockWorker = new EuclidianClassifier(nudeButtockDescriptor, notNudeButtockDescriptor, subImg,
                        this.kernelSize);
                buttockWorker.setClassificationLevel(this.classificationLevel);
                executor.execute(buttockWorker);
                classifiers.add(buttockWorker);

                genitalWorker = new EuclidianClassifier(nudeGenitalDescriptor, notNudeGenitalDescriptor, subImg,
                        this.kernelSize);
                genitalWorker.setClassificationLevel(this.classificationLevel);
                executor.execute(genitalWorker);
                classifiers.add(genitalWorker);
            }
            //Wait finish
            executor.shutdown();
            while (!executor.isTerminated()) {
            }

            classificationResult = printResult(classifiers);
            classificationResult.setFileName(imagePath);

            endTime = System.currentTimeMillis();

            classificationResult.setExecutionTime(endTime - startTime);
            classificationResults.add(classificationResult);

            classifiers.clear();

        } catch (IOException ex) {
            Logger.getLogger(BodyWomanNudeClassifier.class.getName()).log(Level.SEVERE, null, ex);
        }

    }
    System.out.println("--------------------------------------------------------");
    System.out.println("Total time.: " + (totalTime) + " ms.");
    System.out.println("--------------------------------------------------------");
}

From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluationMultithreaded.java

private void evaluateDataProperties(final SparqlEndpointKS ks)
        throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException,
        InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException {
    Set<DatatypeProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllDataProperties();
    logger.info("Evaluating " + properties.size() + " data properties...");
    for (final Class<? extends AxiomLearningAlgorithm> algorithmClass : dataPropertyAlgorithms) {
        Thread.sleep(5000);/*from   w w w.  j av a2  s .  c  om*/
        int propCnt = 0;

        Set<OWLAxiom> axioms = new HashSet<OWLAxiom>();
        algorithm2Ontology.put(algorithmClass, axioms);

        ExecutorService threadPool = Executors.newFixedThreadPool(maxNrOfThreads);
        for (final DatatypeProperty property : properties) {

            threadPool.execute(new Runnable() {

                @Override
                public void run() {
                    String algName = "";
                    try {
                        AxiomLearningAlgorithm learner = algorithmClass.getConstructor(SparqlEndpointKS.class)
                                .newInstance(ks);
                        ((AbstractAxiomLearningAlgorithm) learner).setReasoner(sparqlReasoner);
                        ((AbstractAxiomLearningAlgorithm) learner).addFilterNamespace(NAMESPACE);
                        ConfigHelper.configure(learner, "propertyToDescribe", property.toString());
                        ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds);
                        learner.init();
                        algName = AnnComponentManager.getName(learner);

                        boolean emptyEntity = sparqlReasoner.getPopularity(property) == 0;
                        if (emptyEntity) {
                            logger.warn("Empty entity: " + property);
                        }

                        if (emptyEntity) {
                            writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName,
                                    "EMPTY_ENTITY", 0, 0, false);
                        } else {
                            applyLearningAlgorithm(learner, property);

                        }

                    } catch (Exception e) {
                        logger.error("Error occured for data property " + property.getName()
                                + " with algorithm " + algName, e);
                    }
                }
            });

            propCnt++;
            if (maxDataProperties != 0 && propCnt == maxDataProperties) {
                break;
            }

        }
        threadPool.shutdown();
        while (!threadPool.isTerminated()) {

        }
    }
}

From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluationMultithreaded.java

private void evaluateObjectProperties(final SparqlEndpointKS ks)
        throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException,
        InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException {
    Set<ObjectProperty> properties = new SPARQLTasks(ks.getEndpoint()).getAllObjectProperties();
    logger.info("Evaluating " + properties.size() + " object properties...");

    for (final Class<? extends AxiomLearningAlgorithm> algorithmClass : objectPropertyAlgorithms) {
        Thread.sleep(5000);/*www.j  a  va 2s .  co  m*/

        Set<OWLAxiom> axioms = new HashSet<OWLAxiom>();
        algorithm2Ontology.put(algorithmClass, axioms);
        int propCnt = 0;
        ExecutorService threadPool = Executors.newFixedThreadPool(maxNrOfThreads);
        for (final ObjectProperty property : properties) {

            threadPool.execute(new Runnable() {

                @Override
                public void run() {
                    String algName = "";
                    try {
                        AxiomLearningAlgorithm learner = algorithmClass.getConstructor(SparqlEndpointKS.class)
                                .newInstance(ks);
                        ((AbstractAxiomLearningAlgorithm) learner).setReasoner(sparqlReasoner);
                        ((AbstractAxiomLearningAlgorithm) learner).addFilterNamespace(NAMESPACE);
                        ConfigHelper.configure(learner, "propertyToDescribe", property.toString());
                        ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds);
                        learner.init();
                        algName = AnnComponentManager.getName(learner);

                        boolean emptyEntity = sparqlReasoner.getPopularity(property) == 0;
                        if (emptyEntity) {
                            logger.warn("Empty entity: " + property);
                        }

                        if (emptyEntity) {
                            writeToDB(property.toManchesterSyntaxString(baseURI, prefixes), algName,
                                    "EMPTY_ENTITY", 0, 0, false);
                        } else {
                            applyLearningAlgorithm(learner, property);

                        }

                    } catch (Exception e) {
                        logger.error("Error occured for object property " + property.getName()
                                + " with algorithm " + algName, e);
                    }
                }
            });

            propCnt++;
            if (maxObjectProperties != 0 && propCnt == maxObjectProperties) {
                break;
            }

        }
        threadPool.shutdown();
        while (!threadPool.isTerminated()) {

        }
    }
}

From source file:org.apache.mahout.freqtermsets.FPGrowthDriver.java

/**
 * Run TopK FPGrowth given the input file,
 *///ww  w  . j a v a2  s  .com
@Override
public int run(String[] args) throws Exception {
    addInputOption();
    addOutputOption();

    addOption("minSupport", "s",
            "(Optional) The minimum number of times a co-occurrence must be present." + " Default Value: 3",
            "3");
    addOption("maxHeapSize", "k",
            "(Optional) Maximum Heap Size k, to denote the requirement to mine top K items."
                    + " Default value: 50",
            "50");
    addOption(PFPGrowth.NUM_GROUPS, "g",
            "(Optional) Number of groups the features should be divided in the map-reduce version."
                    + " Doesn't work in sequential version Default Value:" + PFPGrowth.NUM_GROUPS_DEFAULT,
            Integer.toString(PFPGrowth.NUM_GROUPS_DEFAULT));
    // addOption("splitterPattern", "regex",
    // "Regular Expression pattern used to split given string transaction into"
    // + " itemsets. Default value splits comma separated itemsets.  Default Value:"
    // + " \"[ ,\\t]*[,|\\t][ ,\\t]*\" ", "[ ,\t]*[,|\t][ ,\t]*");
    addOption("numTreeCacheEntries", "tc", "(Optional) Number of entries in the tree cache to prevent duplicate"
            + " tree building. (Warning) a first level conditional FP-Tree might consume a lot of memory, "
            + "so keep this value small, but big enough to prevent duplicate tree building. "
            + "Default Value:5 Recommended Values: [5-10]", "5");
    // addOption("method", "method", "Method of processing: sequential|mapreduce", "mapreduce");
    // //"sequential");
    addOption("encoding", "e", "(Optional) The file encoding.  Default value: UTF-8", "UTF-8");
    // addFlag("useFPG2", "2", "Use an alternate FPG implementation");
    addOption(PFPGrowth.COUNT_IN, "cnt",
            "(Optional) In case of mapreduce, if this is set parallel counting will be skipped and counts will be read from the path specified");
    // addFlag(PFPGrowth.PSEUDO, "ps",
    // "Running on a Pseudo-Cluster (one machine). Uses hardcoded configurations for each job.");
    addOption(PFPGrowth.GROUP_FIS_IN, "gfis",
            "(Optional) In case of mapreduce, if this is set execution will start from the aggregation phase, and group dependent frequent itemsets will be read from the path specified");
    addFlag(AggregatorReducer.MUTUAL_INFO_FLAG, "mi",
            "Set to selec the top K patterns based on the Normalized Mutual Information rather than frequency of pattern");
    addOption(ParallelFPGrowthReducer.MIN_WORDS_FOR_LANG_ID, "lid",
            "The mimun length of a pattern that would be used for language identification");
    addOption(PFPGrowth.MIN_FREQ, "mf",
            "The minimum frequency of a token. Any token with less frequency will be pruned from the begining.");
    addOption(PFPGrowth.PRUNE_PCTILE, "pct",
            "The percentile of frequencies that will be considered; any token with a higher frequency will be pruned");
    //    addFlag("shift", "shift", "If set (and window must be set) it shifts the window by half");
    addFlag(TokenIterator.PARAM_REPEAT_HASHTAG, "rht",
            "If set, each hashtag is repeated, removing the # sign from the second token returned for the same hashtag");
    addOption(PFPGrowth.PARAM_INTERVAL_START, "st",
            "The start time of interval to be mined.. defaults to first known tweet time");
    addOption(PFPGrowth.PARAM_INTERVAL_END, "et",
            "The end time of interval to be mined.. defaults to long.maxvalue");
    addOption(PFPGrowth.PARAM_WINDOW_SIZE, "ws",
            "The duration of windows that will be mined.. defaults to end - start");
    addOption(PFPGrowth.PARAM_STEP_SIZE, "ss",
            "The step by which the window will be advanced.. defaults to windowSize");

    addOption(PARAM_NUM_THREADS, "j",
            "The number of PFP jobs, because in case of intervals resources are under utilized");

    // addOption(PFPGrowth.INDEX_OUT,
    // "ix",
    // "The local folder to which the frequent itemset index will be written");

    if (parseArguments(args) == null) {
        return -1;
    }

    Parameters params = new Parameters();

    if (hasOption("minSupport")) {
        String minSupportString = getOption("minSupport");
        params.set("minSupport", minSupportString);
    }
    if (hasOption("maxHeapSize")) {
        String maxHeapSizeString = getOption("maxHeapSize");
        params.set("maxHeapSize", maxHeapSizeString);
    }
    if (hasOption(PFPGrowth.NUM_GROUPS)) {
        String numGroupsString = getOption(PFPGrowth.NUM_GROUPS);
        params.set(PFPGrowth.NUM_GROUPS, numGroupsString);
    }

    if (hasOption("numTreeCacheEntries")) {
        String numTreeCacheString = getOption("numTreeCacheEntries");
        params.set("treeCacheSize", numTreeCacheString);
    }

    // if (hasOption("splitterPattern")) {
    // String patternString = getOption("splitterPattern");
    // params.set("splitPattern", patternString);
    // }

    String encoding = "UTF-8";
    if (hasOption("encoding")) {
        encoding = getOption("encoding");
    }
    params.set("encoding", encoding);

    // if (hasOption("useFPG2")) {
    // params.set(PFPGrowth.USE_FPG2, "true");
    // }

    // if (hasOption(PFPGrowth.COUNT_IN)) {
    // params.set(PFPGrowth.COUNT_IN, getOption(PFPGrowth.COUNT_IN));
    // }

    // if(hasOption(PFPGrowth.PSEUDO)){
    // params.set(PFPGrowth.PSEUDO, "true");
    // }

    // if (hasOption(PFPGrowth.GROUP_FIS_IN)) {
    // params.set(PFPGrowth.GROUP_FIS_IN, getOption(PFPGrowth.GROUP_FIS_IN));
    // }

    if (hasOption(AggregatorReducer.MUTUAL_INFO_FLAG)) {
        params.set(AggregatorReducer.MUTUAL_INFO_FLAG, "true");
    } else {
        params.set(AggregatorReducer.MUTUAL_INFO_FLAG, "false");
    }

    if (hasOption(ParallelFPGrowthReducer.MIN_WORDS_FOR_LANG_ID)) {
        params.set(ParallelFPGrowthReducer.MIN_WORDS_FOR_LANG_ID,
                getOption(ParallelFPGrowthReducer.MIN_WORDS_FOR_LANG_ID));
    }

    if (hasOption(PFPGrowth.MIN_FREQ)) {
        params.set(PFPGrowth.MIN_FREQ, getOption(PFPGrowth.MIN_FREQ));
    }

    if (hasOption(PFPGrowth.PRUNE_PCTILE)) {
        params.set(PFPGrowth.PRUNE_PCTILE, getOption(PFPGrowth.PRUNE_PCTILE));
    }

    // if (hasOption(PFPGrowth.PARAM_INTERVAL_END)) {
    params.set(PFPGrowth.PARAM_INTERVAL_END,
            getOption(PFPGrowth.PARAM_INTERVAL_END, Long.toString(Long.MAX_VALUE)));
    // }

    if (hasOption(PFPGrowth.PARAM_WINDOW_SIZE)) {
        params.set(PFPGrowth.PARAM_WINDOW_SIZE, getOption(PFPGrowth.PARAM_WINDOW_SIZE));
    }

    if (hasOption(PFPGrowth.PARAM_STEP_SIZE)) {
        params.set(PFPGrowth.PARAM_STEP_SIZE, getOption(PFPGrowth.PARAM_STEP_SIZE));
    }

    // if (hasOption(PFPGrowth.PARAM_INTERVAL_START)) {
    // params.set(PFPGrowth.PARAM_INTERVAL_START, getOption(PFPGrowth.PARAM_INTERVAL_START));
    // }

    // if (hasOption(PFPGrowth.INDEX_OUT)) {
    // params.set(PFPGrowth.INDEX_OUT, getOption(PFPGrowth.INDEX_OUT));
    // }

    if (hasOption(TokenIterator.PARAM_REPEAT_HASHTAG)) {
        params.set(TokenIterator.PARAM_REPEAT_HASHTAG, "true");
    }

    //    boolean shiftedWindow = hasOption("shift");

    Path inputDir = getInputPath();
    Path outputDir = getOutputPath();

    params.set(PFPGrowth.INPUT, inputDir.toString());
    params.set(PFPGrowth.OUTROOT, outputDir.toString());

    Configuration conf = new Configuration();
    //    HadoopUtil.delete(conf, outputDir);
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(outputDir)) {
        throw new IllegalArgumentException(
                "Output path already exists.. please delete it yourself: " + outputDir);
    }

    int nThreads = Integer.parseInt(getOption(PARAM_NUM_THREADS, DEFAULT_NUM_THREADS));
    if (!PFPGrowth.runMode.equals(RunningMode.Batch) && nThreads != 1) {
        throw new UnsupportedOperationException("We use mining results from earlier windows. j must be 1");
    }
    ExecutorService exec = Executors.newFixedThreadPool(nThreads);
    Future<Void> lastFuture = null;

    String startTimeStr = getOption(PFPGrowth.PARAM_INTERVAL_START);
    // params.get(PFPGrowth.PARAM_INTERVAL_START);
    if (startTimeStr == null) {
        // FIXME: Will fail if not running locally.. like many things now
        // FileSystem fs = FileSystem.getLocal(conf);
        // startTimeStr = fs.listStatus(inputDir)[0].getPath().getName();
        File[] startFolders = FileUtils.toFile(inputDir.toUri().toURL()).listFiles();
        Arrays.sort(startFolders);
        startTimeStr = startFolders[0].getName();
    }
    long startTime = Long.parseLong(startTimeStr);
    // Long.toString(PFPGrowth.TREC2011_MIN_TIMESTAMP)));// GMT23JAN2011)));
    long endTime = Long.parseLong(params.get(PFPGrowth.PARAM_INTERVAL_END));
    // Long.toString(Long.MAX_VALUE)));
    long windowSize = Long
            .parseLong(params.get(PFPGrowth.PARAM_WINDOW_SIZE, Long.toString(endTime - startTime)));
    long stepSize = Long.parseLong(params.get(PFPGrowth.PARAM_STEP_SIZE, Long.toString(windowSize)));

    // int numJobs = 0;
    while (startTime < endTime) {
        // if(++numJobs % 100 == 0){
        // Thread.sleep(60000);
        // }
        long shift = 0;
        //      if(shiftedWindow){
        //        shift = (long)Math.floor(windowSize / 2.0f);
        //      }
        params.set(PFPGrowth.PARAM_INTERVAL_START, Long.toString(startTime + shift));

        if (hasOption(PFPGrowth.GROUP_FIS_IN)) {
            String gfisIn = getOption(PFPGrowth.GROUP_FIS_IN);
            gfisIn = FilenameUtils.concat(gfisIn, Long.toString(startTime + shift));
            gfisIn = FilenameUtils.concat(gfisIn,
                    Long.toString(Math.min(endTime, startTime + windowSize) + shift));
            params.set(PFPGrowth.GROUP_FIS_IN, gfisIn);
        }

        if (hasOption(PFPGrowth.COUNT_IN)) {
            String countIn = getOption(PFPGrowth.COUNT_IN);
            //        countIn = FilenameUtils.concat(countIn, Long.toString(startTime + shift));
            //        countIn = FilenameUtils.concat(countIn,
            //            Long.toString(Math.min(endTime, startTime + windowSize) + shift));
            params.set(PFPGrowth.COUNT_IN, countIn);
        }

        String outPathStr = FilenameUtils.concat(outputDir.toString(), Long.toString(startTime + shift));
        outPathStr = FilenameUtils.concat(outPathStr,
                Long.toString(Math.min(endTime, startTime + windowSize) + shift));
        params.set(PFPGrowth.OUTPUT, outPathStr);

        // PFPGrowth.runPFPGrowth(params);
        lastFuture = exec.submit(new PFPGrowth(params));

        //      startTime += windowSize;
        startTime += stepSize;

        //      Thread.sleep(10000);
    }

    lastFuture.get();
    exec.shutdown();

    while (!exec.isTerminated()) {
        Thread.sleep(1000);
    }

    return 0;
}

From source file:com.atlauncher.data.Settings.java

public void downloadUpdatedFiles() {
    ArrayList<Downloadable> downloads = getLauncherFiles();
    if (downloads != null) {
        ExecutorService executor = Executors.newFixedThreadPool(this.concurrentConnections);
        for (final Downloadable download : downloads) {
            executor.execute(new Runnable() {
                @Override/*from  ww w  . j  a  va  2  s .  co  m*/
                public void run() {
                    if (download.needToDownload()) {
                        LogManager.info("Downloading Launcher File " + download.getFile().getName());
                        download.download(false);
                    }
                }
            });
        }
        executor.shutdown();
        while (!executor.isTerminated()) {
        }
    }

    LogManager.info("Finished downloading updated files!");

    if (Language.INSTANCE.getCurrent() != null) {
        try {
            Language.INSTANCE.reload(Language.INSTANCE.getCurrent());
        } catch (IOException e) {
            logStackTrace("Couldn't reload langauge " + Language.INSTANCE.getCurrent(), e);
        }
    }
}

From source file:com.atlauncher.data.Settings.java

/**
 * Downloads and loads all external libraries used by the launcher as specified in the Configs/JSON/libraries.json
 * file./*w ww  .j a  v  a 2s  .c  o m*/
 */
private void downloadExternalLibraries() {
    LogManager.debug("Downloading external libraries");

    FileReader fr = null;

    try {
        fr = new FileReader(new File(this.jsonDir, "libraries.json"));

        java.lang.reflect.Type type = new TypeToken<List<LauncherLibrary>>() {
        }.getType();

        this.launcherLibraries = Gsons.DEFAULT.fromJson(fr, type);
    } catch (Exception e) {
        logStackTrace(e);
    } finally {
        if (fr != null) {
            try {
                fr.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    ExecutorService executor = Executors.newFixedThreadPool(getConcurrentConnections());

    for (final LauncherLibrary library : this.launcherLibraries) {
        executor.execute(new Runnable() {

            @Override
            public void run() {
                Downloadable download = library.getDownloadable();

                if (download.needToDownload()) {
                    LogManager.info("Downloading library " + library.getFilename() + "!");
                    download.download(false);
                }
            }
        });
    }
    executor.shutdown();
    while (!executor.isTerminated()) {
    }

    for (LauncherLibrary library : this.launcherLibraries) {
        File file = library.getFile();

        if (library.shouldAutoLoad() && !Utils.addToClasspath(file)) {
            LogManager.error("Couldn't add " + file + " to the classpath!");
            if (library.shouldExitOnFail()) {
                LogManager.error("Library is necessary so launcher will exit!");
                System.exit(1);
            }
        }
    }

    LogManager.debug("Finished downloading external libraries");
}