Example usage for java.util.concurrent Executor execute

List of usage examples for java.util.concurrent Executor execute

Introduction

In this page you can find the example usage for java.util.concurrent Executor execute.

Prototype

void execute(Runnable command);

Source Link

Document

Executes the given command at some time in the future.

Usage

From source file:io.atomix.cluster.messaging.impl.NettyMessagingService.java

private <T> void executeOnPooledConnection(Address address, String type,
        Function<ClientConnection, CompletableFuture<T>> callback, Executor executor,
        CompletableFuture<T> future) {
    if (address.equals(localAddress)) {
        callback.apply(localClientConnection).whenComplete((result, error) -> {
            if (error == null) {
                executor.execute(() -> future.complete(result));
            } else {
                executor.execute(() -> future.completeExceptionally(error));
            }/* ww  w. j a  va2s.com*/
        });
        return;
    }

    getChannel(address, type).whenComplete((channel, channelError) -> {
        if (channelError == null) {
            final ClientConnection connection = getOrCreateRemoteClientConnection(channel);
            callback.apply(connection).whenComplete((result, sendError) -> {
                if (sendError == null) {
                    executor.execute(() -> future.complete(result));
                } else {
                    final Throwable cause = Throwables.getRootCause(sendError);
                    if (!(cause instanceof TimeoutException) && !(cause instanceof MessagingException)) {
                        channel.close().addListener(f -> {
                            connection.close();
                            clientConnections.remove(channel);
                        });
                    }
                    executor.execute(() -> future.completeExceptionally(sendError));
                }
            });
        } else {
            executor.execute(() -> future.completeExceptionally(channelError));
        }
    });
}

From source file:byps.test.TestRemoteServerR.java

public void testSimultan() throws InterruptedException {
    Executor tpool = Executors.newCachedThreadPool();
    int nbOfThreads = 5;
    final CountDownLatch cdl = new CountDownLatch(nbOfThreads);
    for (int t = 0; t < nbOfThreads; t++) {
        Runnable run = new Runnable() {
            public void run() {
                TestRemoteServerR testObj = new TestRemoteServerR();
                try {
                    testObj.testLoop10();
                } catch (Throwable e) {
                    log.error("exception", e);
                } finally {
                    cdl.countDown();/*  ww w .  ja v a 2 s.  co m*/
                }
            }
        };
        tpool.execute(run);
    }
    cdl.await();
}

From source file:org.apache.axis2.jaxws.server.dispatcher.ProviderDispatcher.java

public void invokeOneWay(MessageContext request) {
    if (log.isDebugEnabled()) {
        log.debug("Preparing to invoke javax.xml.ws.Provider based endpoint");
        log.debug("Invocation pattern: one way");
    }/*from   www.j a v  a2s  .c  o m*/

    initialize(request);

    providerInstance = getProviderInstance();

    Object param = createRequestParameters(request);

    if (log.isDebugEnabled()) {
        Class providerType = getProviderType();
        final Object input = providerType.cast(param);
        log.debug("Invoking Provider<" + providerType.getName() + ">");
        if (input != null) {
            log.debug("Parameter type: " + input.getClass().getName());
        } else {
            log.debug("Parameter is NULL");
        }
    }

    ExecutorFactory ef = (ExecutorFactory) FactoryRegistry.getFactory(ExecutorFactory.class);
    Executor executor = ef.getExecutorInstance(ExecutorFactory.SERVER_EXECUTOR);

    // If the property has been set to disable thread switching, then we can 
    // do so by using a SingleThreadedExecutor instance to continue processing
    // work on the existing thread.
    Boolean disable = (Boolean) request.getProperty(ServerConstants.SERVER_DISABLE_THREAD_SWITCH);
    if (disable != null && disable.booleanValue()) {
        if (log.isDebugEnabled()) {
            log.debug("Server side thread switch disabled.  Setting Executor to the SingleThreadedExecutor.");
        }
        executor = new SingleThreadedExecutor();
    }

    Method m = getJavaMethod();
    Object[] params = new Object[] { param };

    EndpointInvocationContext eic = (EndpointInvocationContext) request.getInvocationContext();
    ClassLoader cl = Thread.currentThread().getContextClassLoader();

    AsyncInvocationWorker worker = new AsyncInvocationWorker(m, params, cl, eic);
    FutureTask task = new FutureTask<AsyncInvocationWorker>(worker);
    executor.execute(task);

    return;
}

From source file:org.apache.axis2.jaxws.server.dispatcher.ProviderDispatcher.java

public void invokeAsync(MessageContext request, EndpointCallback callback) {
    if (log.isDebugEnabled()) {
        log.debug("Preparing to invoke javax.xml.ws.Provider based endpoint");
        log.debug("Invocation pattern: two way, async");
    }/*from  w w  w  . j a v a2  s. c om*/

    initialize(request);

    providerInstance = getProviderInstance();

    Object param = createRequestParameters(request);

    if (log.isDebugEnabled()) {
        Class providerType = getProviderType();
        final Object input = providerType.cast(param);
        log.debug("Invoking Provider<" + providerType.getName() + ">");
        if (input != null) {
            log.debug("Parameter type: " + input.getClass().getName());
        } else {
            log.debug("Parameter is NULL");
        }
    }

    ExecutorFactory ef = (ExecutorFactory) FactoryRegistry.getFactory(ExecutorFactory.class);
    Executor executor = ef.getExecutorInstance(ExecutorFactory.SERVER_EXECUTOR);

    // If the property has been set to disable thread switching, then we can 
    // do so by using a SingleThreadedExecutor instance to continue processing
    // work on the existing thread.
    Boolean disable = (Boolean) request.getProperty(ServerConstants.SERVER_DISABLE_THREAD_SWITCH);
    if (disable != null && disable.booleanValue()) {
        if (log.isDebugEnabled()) {
            log.debug("Server side thread switch disabled.  Setting Executor to the SingleThreadedExecutor.");
        }
        executor = new SingleThreadedExecutor();
    }

    Method m = getJavaMethod();
    Object[] params = new Object[] { param };

    EndpointInvocationContext eic = (EndpointInvocationContext) request.getInvocationContext();
    ClassLoader cl = Thread.currentThread().getContextClassLoader();

    AsyncInvocationWorker worker = new AsyncInvocationWorker(m, params, cl, eic);
    FutureTask task = new FutureTask<AsyncInvocationWorker>(worker);
    executor.execute(task);

    return;
}

From source file:org.apache.bookkeeper.mledger.impl.ManagedLedgerTest.java

/**
 * Reproduce a race condition between opening cursors and concurrent mark delete operations
 *//*from w  w w  .j  a  v  a2 s. c om*/
@Test(timeOut = 20000)
public void testOpenRaceCondition() throws Exception {
    ManagedLedgerConfig config = new ManagedLedgerConfig();
    config.setEnsembleSize(2).setAckQuorumSize(2).setMetadataEnsembleSize(2);
    final ManagedLedger ledger = factory.open("my-ledger", config);
    final ManagedCursor c1 = ledger.openCursor("c1");

    final int N = 1000;
    final Position position = ledger.addEntry("entry-0".getBytes());
    Executor executor = Executors.newCachedThreadPool();
    final CountDownLatch counter = new CountDownLatch(2);
    executor.execute(new Runnable() {
        @Override
        public void run() {
            try {
                for (int i = 0; i < N; i++) {
                    c1.markDelete(position);
                }
                counter.countDown();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

    executor.execute(new Runnable() {
        @Override
        public void run() {
            try {
                for (int i = 0; i < N; i++) {
                    ledger.openCursor("cursor-" + i);
                }
                counter.countDown();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

    // If there is the race condition, this method will not complete triggering the test timeout
    counter.await();
}

From source file:com.jaspersoft.jasperserver.api.engine.jasperreports.service.impl.EngineServiceImpl.java

protected ReportUnitResult fillReport(ExecutionContext context, ReportUnitRequestBase request,
        ReportUnit reportUnit, boolean inMemoryUnit) {
    ReportExecutionListener executionListener = createReportExecutionListener(request);

    boolean asynchronous = request.isAsynchronous();
    ReportFiller filler = createReportFiller(asynchronous);
    filler.setJasperReportsContext(request.getJasperReportsContext());

    Executor executor = getReportExecutor(asynchronous);
    if (log.isDebugEnabled()) {
        log.debug("Running report " + reportUnit.getURIString() + " on " + executor);
    }//from  w w  w . j  a v  a  2  s.co  m

    executionListener.init();
    ReportFill reportFill = new ReportFill(context, request, reportUnit, inMemoryUnit, filler,
            executionListener);
    executor.execute(reportFill);

    // blocking for the result
    JasperPrintAccessor reportAccessor;
    try {
        reportAccessor = filler.getResult();
    } catch (RuntimeException e) {
        throw e;
    } catch (Throwable e) {
        throw new JSException(e);
    }

    if (log.isDebugEnabled()) {
        log.debug("Returning fill result " + reportAccessor);
    }

    Map params = request.getReportParameters();
    JRVirtualizer virtualizer = params == null ? null
            : ((JRVirtualizer) params.get(JRParameter.REPORT_VIRTUALIZER));

    ReportUnitResult result = new ReportUnitResult(reportUnit.getURIString(), reportAccessor, virtualizer);
    result.setRequestId(request.getId());
    result.setDataTimestamp(filler.getDataTimestamp());
    result.setReportContext(request.getReportContext());
    result.setPaginated(filler.isPaginated());
    return result;
}

From source file:org.apache.solr.request.SimpleFacets.java

License:asdf

/**
 * Returns a list of value constraints and the associated facet counts 
 * for each facet field specified in the params.
 *
 * @see FacetParams#FACET_FIELD/*from   w w w  .ja  va  2  s.  co  m*/
 * @see #getFieldMissingCount
 * @see #getFacetTermEnumCounts
 */
@SuppressWarnings("unchecked")
public NamedList<Object> getFacetFieldCounts() throws IOException, SyntaxError {

    NamedList<Object> res = new SimpleOrderedMap<>();
    String[] facetFs = global.getParams(FacetParams.FACET_FIELD);
    if (null == facetFs) {
        return res;
    }

    // Passing a negative number for FACET_THREADS implies an unlimited number of threads is acceptable.
    // Also, a subtlety of directExecutor is that no matter how many times you "submit" a job, it's really
    // just a method call in that it's run by the calling thread.
    int maxThreads = req.getParams().getInt(FacetParams.FACET_THREADS, 0);
    Executor executor = maxThreads == 0 ? directExecutor : facetExecutor;
    final Semaphore semaphore = new Semaphore((maxThreads <= 0) ? Integer.MAX_VALUE : maxThreads);
    List<Future<NamedList>> futures = new ArrayList<>(facetFs.length);

    if (fdebugParent != null) {
        fdebugParent.putInfoItem("maxThreads", maxThreads);
    }

    try {
        //Loop over fields; submit to executor, keeping the future
        for (String f : facetFs) {
            if (fdebugParent != null) {
                fdebug = new FacetDebugInfo();
                fdebugParent.addChild(fdebug);
            }
            final ParsedParams parsed = parseParams(FacetParams.FACET_FIELD, f);
            final SolrParams localParams = parsed.localParams;
            final String termList = localParams == null ? null : localParams.get(CommonParams.TERMS);
            final String key = parsed.key;
            final String facetValue = parsed.facetValue;
            Callable<NamedList> callable = () -> {
                try {
                    NamedList<Object> result = new SimpleOrderedMap<>();
                    if (termList != null) {
                        List<String> terms = StrUtils.splitSmart(termList, ",", true);
                        result.add(key, getListedTermCounts(facetValue, parsed, terms));
                    } else {
                        result.add(key, getTermCounts(facetValue, parsed));
                    }
                    return result;
                } catch (SolrException se) {
                    throw se;
                } catch (Exception e) {
                    throw new SolrException(ErrorCode.SERVER_ERROR,
                            "Exception during facet.field: " + facetValue, e);
                } finally {
                    semaphore.release();
                }
            };

            RunnableFuture<NamedList> runnableFuture = new FutureTask<>(callable);
            semaphore.acquire();//may block and/or interrupt
            executor.execute(runnableFuture);//releases semaphore when done
            futures.add(runnableFuture);
        } //facetFs loop

        //Loop over futures to get the values. The order is the same as facetFs but shouldn't matter.
        for (Future<NamedList> future : futures) {
            res.addAll(future.get());
        }
        assert semaphore.availablePermits() >= maxThreads;
    } catch (InterruptedException e) {
        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                "Error while processing facet fields: InterruptedException", e);
    } catch (ExecutionException ee) {
        Throwable e = ee.getCause();//unwrap
        if (e instanceof RuntimeException) {
            throw (RuntimeException) e;
        }
        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                "Error while processing facet fields: " + e.toString(), e);
    }

    return res;
}

From source file:com.nttec.everychan.cache.BitmapCache.java

/**
 *   ?  ?  ImageView.<br>/*from   w  ww  .  j  a  va 2 s  .  c o  m*/
 * ? ImageView ??? ?:<ul>
 * <li>? ? ? ,  ?  (? ??,   )</li>
 * <li> {@link Boolean#TRUE},    ?</li>
 * <li> {@link Boolean#FALSE},      ( ?   downloadFromInternet == false)</li></ul>
 * @param hash ? ( ? )
 * @param url ? URL (?  ?)
 * @param maxSize ?   ??,     ,  0, ? ?? ?  ?
 * @param chan   ? ??   
 * @param zipFile - - ?  ? ?  (  null)
 * @param task ?? 
 * @param imageView  {@link ImageView},    
 * @param executor ? ?
 * @param handler Handler UI 
 * @param downloadFromInternet     
 * @param defaultResId ID ?? ?  , ?    ( ?  downloadFromInternet == false),
 *  0 - ?    
 */
public void asyncGet(String hash, String url, int maxSize, ChanModule chan, ReadableContainer zipFile,
        CancellableTask task, ImageView imageView, Executor executor, Handler handler,
        boolean downloadFromInternet, int defaultResId) {
    if (hash == null) {
        Logger.e(TAG, "received null hash for url: " + url);
        imageView.setTag(Boolean.FALSE);
        imageView.setImageResource(defaultResId);
        return;
    }
    Bitmap fromLru = getFromMemory(hash);
    if (fromLru != null) {
        imageView.setTag(Boolean.TRUE);
        imageView.setImageBitmap(fromLru);
        return;
    } else {
        imageView.setImageBitmap(EMPTY_BMP);
    }
    class ImageDownloader implements Runnable {
        private final String hash;
        private final String url;
        private final int maxSize;
        private final ChanModule chan;
        private final ReadableContainer zipFile;
        private final CancellableTask task;
        private final ImageView imageView;
        private final Handler handler;
        private final boolean downloadFromInternet;
        private final int defaultResId;

        public ImageDownloader(String hash, String url, int maxSize, ChanModule chan, ReadableContainer zipFile,
                CancellableTask task, ImageView imageView, Handler handler, boolean downloadFromInternet,
                int defaultResId) {
            this.hash = hash;
            this.url = url;
            this.maxSize = maxSize;
            this.chan = chan;
            this.zipFile = zipFile;
            this.task = task;
            this.imageView = imageView;
            this.handler = handler;
            this.downloadFromInternet = downloadFromInternet;
            this.defaultResId = defaultResId;
        }

        private Bitmap bmp;

        @Override
        public void run() {
            bmp = getFromCache(hash);
            if (bmp == null && zipFile != null)
                bmp = getFromContainer(hash, zipFile);
            if (bmp == null && downloadFromInternet) {
                bmp = download(hash, url, maxSize, chan, task);
            }
            if (task != null && task.isCancelled())
                return;
            if (imageView.getTag() == null || !imageView.getTag().equals(hash))
                return;
            if (bmp == null) {
                if (defaultResId == 0) {
                    imageView.setTag(Boolean.FALSE);
                    return;
                }
            }
            handler.post(new Runnable() {
                @Override
                public void run() {
                    try {
                        if (imageView.getTag() == null || !imageView.getTag().equals(hash))
                            return;
                        if (bmp != null) {
                            imageView.setTag(Boolean.TRUE);
                            imageView.setImageBitmap(bmp);
                        } else {
                            imageView.setTag(Boolean.FALSE);
                            imageView.setImageResource(defaultResId);
                        }
                    } catch (OutOfMemoryError oom) {
                        MainApplication.freeMemory();
                        Logger.e(TAG, oom);
                    }
                }
            });
        }
    }
    if (task != null && task.isCancelled())
        return;
    imageView.setTag(hash);
    executor.execute(new ImageDownloader(hash, url, maxSize, chan, zipFile, task, imageView, handler,
            downloadFromInternet, defaultResId));
}

From source file:com.linkedin.pinot.integration.tests.BaseClusterIntegrationTest.java

public static Future<Map<File, File>> buildSegmentsFromAvro(final List<File> avroFiles, Executor executor,
        int baseSegmentIndex, final File baseDirectory, final File segmentTarDir, final String tableName,
        final boolean createStarTreeIndex, final com.linkedin.pinot.common.data.Schema inputPinotSchema) {
    int segmentCount = avroFiles.size();
    LOGGER.info("Building " + segmentCount + " segments in parallel");
    List<ListenableFutureTask<Pair<File, File>>> futureTasks = new ArrayList<ListenableFutureTask<Pair<File, File>>>();

    for (int i = 1; i <= segmentCount; ++i) {
        final int segmentIndex = i - 1;
        final int segmentNumber = i + baseSegmentIndex;

        final ListenableFutureTask<Pair<File, File>> buildSegmentFutureTask = ListenableFutureTask
                .<Pair<File, File>>create(new Callable<Pair<File, File>>() {
                    @Override//from ww w . j av  a  2 s.  c o  m
                    public Pair<File, File> call() throws Exception {
                        try {
                            // Build segment
                            LOGGER.info("Starting to build segment " + segmentNumber);
                            File outputDir = new File(baseDirectory, "segment-" + segmentNumber);
                            final File inputAvroFile = avroFiles.get(segmentIndex);
                            final SegmentGeneratorConfig genConfig = SegmentTestUtils
                                    .getSegmentGenSpecWithSchemAndProjectedColumns(inputAvroFile, outputDir,
                                            TimeUnit.DAYS, tableName, inputPinotSchema);

                            if (inputPinotSchema != null) {
                                genConfig.setSchema(inputPinotSchema);
                            }

                            // jfim: We add a space and a special character to do a regression test for PINOT-3296 Segments with spaces
                            // in their filename don't work properly
                            genConfig.setSegmentNamePostfix(Integer.toString(segmentNumber) + " %");
                            genConfig.setEnableStarTreeIndex(createStarTreeIndex);

                            // Enable off heap star tree format in the integration test.
                            StarTreeIndexSpec starTreeIndexSpec = null;
                            if (createStarTreeIndex) {
                                starTreeIndexSpec = new StarTreeIndexSpec();
                                starTreeIndexSpec.setEnableOffHeapFormat(true);
                            }
                            genConfig.setStarTreeIndexSpec(starTreeIndexSpec);

                            final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
                            driver.init(genConfig);
                            driver.build();

                            // Tar segment
                            String segmentName = outputDir.list()[0];
                            final String tarGzPath = TarGzCompressionUtils.createTarGzOfDirectory(
                                    outputDir.getAbsolutePath() + "/" + segmentName,
                                    new File(segmentTarDir, segmentName).getAbsolutePath());
                            LOGGER.info("Completed segment " + segmentNumber + " : " + segmentName
                                    + " from file " + inputAvroFile.getName());
                            return new ImmutablePair<File, File>(inputAvroFile, new File(tarGzPath));
                        } catch (Exception e) {
                            LOGGER.error("Exception while building segment input: {} output {} ",
                                    avroFiles.get(segmentIndex), "segment-" + segmentNumber);
                            throw new RuntimeException(e);
                        }
                    }
                });

        futureTasks.add(buildSegmentFutureTask);
        executor.execute(buildSegmentFutureTask);
    }

    ListenableFuture<List<Pair<File, File>>> pairListFuture = Futures.allAsList(futureTasks);
    return Futures.transform(pairListFuture, new AsyncFunction<List<Pair<File, File>>, Map<File, File>>() {
        @Override
        public ListenableFuture<Map<File, File>> apply(List<Pair<File, File>> input) throws Exception {
            Map<File, File> avroToSegmentMap = new HashMap<File, File>();
            for (Pair<File, File> avroToSegmentPair : input) {
                avroToSegmentMap.put(avroToSegmentPair.getLeft(), avroToSegmentPair.getRight());
            }
            return Futures.immediateFuture(avroToSegmentMap);
        }
    });
}