Example usage for java.util.concurrent CompletionService take

List of usage examples for java.util.concurrent CompletionService take

Introduction

In this page you can find the example usage for java.util.concurrent CompletionService take.

Prototype

Future<V> take() throws InterruptedException;

Source Link

Document

Retrieves and removes the Future representing the next completed task, waiting if none are yet present.

Usage

From source file:net.arp7.HdfsPerfTest.WriteFile.java

private static void writeFiles(final Configuration conf, final FileIoStats stats)
        throws InterruptedException, IOException {
    final FileSystem fs = FileSystem.get(conf);
    final AtomicLong filesLeft = new AtomicLong(params.getNumFiles());
    final long runId = abs(rand.nextLong());
    final byte[] data = new byte[params.getIoSize()];
    Arrays.fill(data, (byte) 65);

    // Start the writers.
    final ExecutorService executor = Executors.newFixedThreadPool((int) params.getNumThreads());
    final CompletionService<Object> ecs = new ExecutorCompletionService<>(executor);
    LOG.info("NumFiles=" + params.getNumFiles() + ", FileSize="
            + FileUtils.byteCountToDisplaySize(params.getFileSize()) + ", IoSize="
            + FileUtils.byteCountToDisplaySize(params.getIoSize()) + ", BlockSize="
            + FileUtils.byteCountToDisplaySize(params.getBlockSize()) + ", ReplicationFactor="
            + params.getReplication() + ", isThrottled=" + (params.maxWriteBps() > 0));
    LOG.info("Starting " + params.getNumThreads() + " writer thread" + (params.getNumThreads() > 1 ? "s" : "")
            + ".");
    final long startTime = System.nanoTime();
    for (long t = 0; t < params.getNumThreads(); ++t) {
        final long threadIndex = t;
        Callable<Object> c = new Callable<Object>() {
            @Override//from   w ww.j ava2s.c o  m
            public Object call() throws Exception {
                long fileIndex = 0;
                while (filesLeft.addAndGet(-1) >= 0) {
                    final String fileName = "WriteFile-" + runId + "-" + (threadIndex + 1) + "-"
                            + (++fileIndex);
                    writeOneFile(new Path(params.getOutputDir(), fileName), fs, data, stats);
                }
                return null;
            }
        };
        ecs.submit(c);
    }

    // And wait for all writers to complete.
    for (long t = 0; t < params.getNumThreads(); ++t) {
        ecs.take();
    }
    final long endTime = System.nanoTime();
    stats.setElapsedTime(endTime - startTime);
    executor.shutdown();
}

From source file:org.pentaho.platform.plugin.services.metadata.PentahoMetadataDomainRepositoryConcurrencyTest.java

private void runTest(final List<? extends Callable<String>> actors) throws Exception {
    List<String> errors = new ArrayList<String>();
    ExecutorService executorService = Executors.newFixedThreadPool(actors.size());
    try {/*from  w  w w .j  av a  2s.  c o  m*/
        CompletionService<String> completionService = new ExecutorCompletionService<String>(executorService);
        for (Callable<String> reader : actors) {
            completionService.submit(reader);
        }

        for (int i = 0; i < actors.size(); i++) {
            Future<String> take = completionService.take();
            String result;
            try {
                result = take.get();
            } catch (ExecutionException e) {
                result = "Execution exception: " + e.getMessage();
            }
            if (result != null) {
                errors.add(result);
            }
        }
    } finally {
        executorService.shutdown();
    }

    if (!errors.isEmpty()) {
        StringBuilder builder = new StringBuilder();
        builder.append("The following errors occurred: \n");
        for (String error : errors) {
            builder.append(error).append('\n');
        }
        fail(builder.toString());
    }
}

From source file:pl.edu.icm.cermine.libsvm.SVMParameterFinder.java

public void run(String inputFile, String ext, int threads, int kernel, int degree)
        throws AnalysisException, IOException, TransformationException, CloneNotSupportedException,
        InterruptedException, ExecutionException {
    List<TrainingSample<BxZoneLabel>> samples = getSamples(inputFile, ext);

    ExecutorService executor = Executors.newFixedThreadPool(3);
    CompletionService<EvaluationParams> completionService = new ExecutorCompletionService<EvaluationParams>(
            executor);//w  w  w . j  av a2s .c  o  m

    double bestRate = 0;
    int bestclog = 0;
    int bestglog = 0;

    int submitted = 0;

    for (int clog = -5; clog <= 15; clog++) {
        for (int glog = 3; glog >= -15; glog--) {
            completionService.submit(new Evaluator(samples, new EvaluationParams(clog, glog), kernel, degree));
            submitted++;
        }
    }

    while (submitted > 0) {
        Future<EvaluationParams> f1 = completionService.take();
        EvaluationParams p = f1.get();
        if (p.rate > bestRate) {
            bestRate = p.rate;
            bestclog = p.clog;
            bestglog = p.glog;
        }
        System.out.println("Gamma: " + p.glog + ", C: " + p.clog + ", rate: " + p.rate + " (Best: " + bestglog
                + " " + bestclog + " " + bestRate + ")");
        submitted--;
    }

    executor.shutdown();
}

From source file:pl.edu.icm.cermine.libsvm.parameters.SVMParameterFinder.java

public void run(String inputFile, String ext, int threads, int kernel, int degree, int minc, int maxc, int ming,
        int maxg) throws AnalysisException, InterruptedException, ExecutionException {
    List<TrainingSample<BxZoneLabel>> samples = getSamples(inputFile, ext);

    ExecutorService executor = Executors.newFixedThreadPool(3);
    CompletionService<EvaluationParams> completionService = new ExecutorCompletionService<EvaluationParams>(
            executor);//  www.  j  a v a2  s.  co m

    double bestRate = 0;
    int bestclog = 0;
    int bestglog = 0;

    int submitted = 0;

    for (int clog = minc; clog <= maxc; clog++) {
        for (int glog = maxg; glog >= ming; glog--) {
            completionService.submit(new Evaluator(samples, new EvaluationParams(clog, glog), kernel, degree));
            submitted++;
        }
    }

    while (submitted > 0) {
        Future<EvaluationParams> f1 = completionService.take();
        EvaluationParams p = f1.get();
        if (p.rate > bestRate) {
            bestRate = p.rate;
            bestclog = p.clog;
            bestglog = p.glog;
        }
        System.out.println("Gamma: " + p.glog + ", C: " + p.clog + ", rate: " + p.rate + " (Best: " + bestglog
                + " " + bestclog + " " + bestRate + ")");
        submitted--;
    }

    executor.shutdown();
}

From source file:com.laudandjolynn.mytv.crawler.CrawlerGroup.java

@Override
public List<TvStation> crawlAllTvStation() {
    List<TvStation> resultList = new ArrayList<TvStation>();
    int size = crawlers.size();
    int maxThreadNum = Constant.CPU_PROCESSOR_NUM;
    ThreadFactory threadFactory = new BasicThreadFactory.Builder().namingPattern("Mytv_CrawlerGroup_%d")
            .build();/*from w  w  w.j  a  va2  s  .com*/
    ExecutorService executorService = Executors.newFixedThreadPool(size > maxThreadNum ? maxThreadNum : size,
            threadFactory);
    CompletionService<List<TvStation>> completionService = new ExecutorCompletionService<List<TvStation>>(
            executorService);
    for (final Crawler crawler : crawlers) {
        Callable<List<TvStation>> task = new Callable<List<TvStation>>() {
            @Override
            public List<TvStation> call() throws Exception {
                return crawler.crawlAllTvStation();
            }
        };
        completionService.submit(task);
    }
    executorService.shutdown();
    int count = 0;
    while (count < size) {
        try {
            List<TvStation> stationList = completionService.take().get();
            if (stationList != null) {
                resultList.addAll(stationList);
            }
        } catch (InterruptedException e) {
            logger.error("crawl task of all tv station interrupted.", e);
        } catch (ExecutionException e) {
            logger.error("crawl task of all tv station executed fail.", e);
        }
        count++;
    }

    for (CrawlEventListener listener : listeners) {
        listener.crawlEnd(new AllTvStationCrawlEndEvent(this, resultList));
    }
    return resultList;
}

From source file:com.opengamma.integration.viewer.status.impl.ViewStatusCalculationWorker.java

public ViewStatusResultAggregator run() {
    ViewStatusResultAggregator aggregator = new ViewStatusResultAggregatorImpl();
    CompletionService<PerViewStatusResult> completionService = new ExecutorCompletionService<PerViewStatusResult>(
            _executor);/*w  w  w .j a v a2s.  c  o m*/
    //submit task to executor to run partitioned by security type
    for (String securityType : _valueRequirementBySecType.keySet()) {
        Collection<String> valueRequirements = _valueRequirementBySecType.get(securityType);
        completionService.submit(new ViewStatusCalculationTask(_toolContext, _portfolioId, _user, securityType,
                valueRequirements, _marketDataSpecification));
    }
    try {
        // process all completed task
        for (int i = 0; i < _valueRequirementBySecType.size(); i++) {
            Future<PerViewStatusResult> futureTask = completionService.take();
            PerViewStatusResult perViewStatusResult = futureTask.get();
            for (ViewStatusKey viewStatusKey : perViewStatusResult.keySet()) {
                aggregator.putStatus(viewStatusKey, perViewStatusResult.get(viewStatusKey));
            }

        }
    } catch (InterruptedException ex) {
        Thread.currentThread().interrupt();
    } catch (ExecutionException ex) {
        throw new OpenGammaRuntimeException("Error running View status report", ex.getCause());
    }
    return aggregator;
}

From source file:com.appdynamics.monitors.hadoop.communicator.AmbariCommunicator.java

/**
 * Populates <code>metrics</code> Map with all numeric Ambari clusters metrics.
 * @see #getClusterMetrics(java.io.Reader)
 *
 * @param metrics/* w  w w  .j  av  a 2s  .c o  m*/
 */
public void populate(Map<String, Object> metrics) {
    this.metrics = metrics;
    try {
        Reader response = (new Response("http://" + host + ":" + port + "/api/v1/clusters")).call();

        Map<String, Object> json = (Map<String, Object>) parser.parse(response, simpleContainer);
        try {
            List<Map> clusters = (ArrayList<Map>) json.get("items");

            CompletionService<Reader> threadPool = new ExecutorCompletionService<Reader>(executor);
            int count = 0;
            for (Map cluster : clusters) {
                if (xmlParser.isIncludeCluster((String) ((Map) cluster.get("Clusters")).get("cluster_name"))) {
                    threadPool.submit(new Response(cluster.get("href") + CLUSTER_FIELDS));
                    count++;
                }
            }
            for (; count > 0; count--) {
                getClusterMetrics(threadPool.take().get());
            }
        } catch (Exception e) {
            logger.error("Failed to parse cluster names: " + stackTraceToString(e));
        }
    } catch (Exception e) {
        logger.error("Failed to get response for cluster names: " + stackTraceToString(e));
    }
    executor.shutdown();
}

From source file:com.appdynamics.monitors.hadoop.communicator.AmbariCommunicator.java

/**
 * Parses a JSON Reader object as cluster metrics and collect service and host metrics.
 * @see #getServiceMetrics(java.io.Reader, String)
 * @see #getHostMetrics(java.io.Reader, String)
 *
 * @param response/*from  w  ww  .j a  va  2  s . c o m*/
 */
private void getClusterMetrics(Reader response) {
    try {
        Map<String, Object> json = (Map<String, Object>) parser.parse(response, simpleContainer);
        try {
            String clusterName = (String) ((Map) json.get("Clusters")).get("cluster_name");
            List<Map> services = (ArrayList<Map>) json.get("services");
            List<Map> hosts = (ArrayList<Map>) json.get("hosts");

            CompletionService<Reader> threadPool = new ExecutorCompletionService<Reader>(executor);
            int count = 0;
            for (Map service : services) {
                if (xmlParser
                        .isIncludeService((String) ((Map) service.get("ServiceInfo")).get("service_name"))) {
                    threadPool.submit(new Response(service.get("href") + SERVICE_FIELDS));
                    count++;
                }
            }
            for (; count > 0; count--) {
                getServiceMetrics(threadPool.take().get(), clusterName + "|services");
            }

            for (Map host : hosts) {
                if (xmlParser.isIncludeHost((String) ((Map) host.get("Hosts")).get("host_name"))) {
                    threadPool.submit(new Response(host.get("href") + HOST_FIELDS));
                    count++;
                }
            }
            for (; count > 0; count--) {
                getHostMetrics(threadPool.take().get(), clusterName + "|hosts");
            }
        } catch (Exception e) {
            logger.error("Failed to parse cluster metrics: " + stackTraceToString(e));
        }
    } catch (Exception e) {
        logger.error("Failed to get response for cluster metrics: " + stackTraceToString(e));
    }
}

From source file:io.scigraph.owlapi.loader.BatchOwlLoader.java

public void loadOntology() throws InterruptedException, ExecutionException {
    CompletionService<Long> completionService = new ExecutorCompletionService<Long>(exec);
    Set<Future<?>> futures = new HashSet<>();
    if (!ontologies.isEmpty()) {
        for (int i = 0; i < numConsumers; i++) {
            futures.add(completionService.submit(consumerProvider.get()));
        }/*from ww  w.j av a2  s  .  co  m*/
        for (int i = 0; i < numProducers; i++) {
            futures.add(completionService.submit(producerProvider.get()));
        }
        for (OntologySetup ontology : ontologies) {
            urlQueue.offer(ontology);
        }
        for (int i = 0; i < numProducers; i++) {
            urlQueue.offer(POISON_STR);
        }
    }

    while (futures.size() > 0) {
        Future<?> completedFuture = completionService.take();
        futures.remove(completedFuture);
        try {
            completedFuture.get();
        } catch (ExecutionException e) {
            logger.log(Level.SEVERE, "Stopping batchLoading due to: " + e.getMessage(), e);
            e.printStackTrace();
            exec.shutdownNow();
            throw new InterruptedException(e.getCause().getMessage());
        }
    }

    exec.shutdown();
    exec.awaitTermination(10, TimeUnit.DAYS);
    graph.shutdown();
    logger.info("Postprocessing...");
    postprocessorProvider.get().postprocess();

    if (cliqueConfiguration.isPresent()) {
        postprocessorProvider.runCliquePostprocessor(cliqueConfiguration.get());
    }

    postprocessorProvider.shutdown();

}

From source file:com.laudandjolynn.mytv.proxy.MyTvProxyManager.java

public void prepareProxies(ProxyProvider... providers) {
    int length = providers == null ? 0 : providers.length;
    if (length > 0) {
        int maxThreadNum = Constant.CPU_PROCESSOR_NUM;
        ThreadFactory threadFactory = new BasicThreadFactory.Builder().namingPattern("MyTv_Find_Proxies_%d")
                .build();/* w ww  .  j a v a  2s.c o  m*/
        ExecutorService executorService = Executors
                .newFixedThreadPool(length > maxThreadNum ? maxThreadNum : length, threadFactory);
        CompletionService<List<Proxy>> completionService = new ExecutorCompletionService<List<Proxy>>(
                executorService);
        providerList.clear();
        for (int i = 0; i < length; i++) {
            final ProxyProvider provider = providers[i];
            providerList.add(provider);
            completionService.submit(new Callable<List<Proxy>>() {

                @Override
                public List<Proxy> call() throws Exception {
                    return provider.getProxies();
                }
            });
        }
        executorService.shutdown();

        int count = 0;
        List<Proxy> resultList = new ArrayList<Proxy>();
        while (count < length) {
            try {
                Future<List<Proxy>> future = completionService.take();
                List<Proxy> proxies = future.get();
                if (proxies != null) {
                    resultList.addAll(proxies);
                }
            } catch (InterruptedException e) {
                logger.error("get proxies thread has interrupted.", e);
            } catch (ExecutionException e) {
                logger.error("get proxies thread has execution fail.", e);
            }
            count++;
        }
        resultList.add(LOCALHOST_PROXY);
        PROXY_QUEUE.clear();
        PROXY_QUEUE.addAll(resultList);
    }
}