Example usage for java.util.concurrent Future isDone

List of usage examples for java.util.concurrent Future isDone

Introduction

In this page you can find the example usage for java.util.concurrent Future isDone.

Prototype

boolean isDone();

Source Link

Document

Returns true if this task completed.

Usage

From source file:com.opentable.etcd.SmokeTest.java

@Test
public void testWatchPrefix() throws Exception {
    String key = prefix + "/watch";

    EtcdResult result = this.client.set(key + "/f2", "f2");
    Assert.assertTrue(!result.isError());
    Assert.assertNotNull(result.node);// w  w  w . j av  a2 s  .  co m
    Assert.assertEquals("f2", result.node.value);

    final long watchIndex = result.node.modifiedIndex + 1;
    Future<EtcdResult> watchFuture = backgroundThread.submit(() -> {
        return this.client.watch(key, watchIndex, true);
    });
    try {
        EtcdResult watchResult = watchFuture.get(100, TimeUnit.MILLISECONDS);
        Assert.fail("Subtree watch fired unexpectedly: " + watchResult);
    } catch (TimeoutException e) {
        // Expected
    }

    Assert.assertFalse(watchFuture.isDone());

    result = this.client.set(key + "/f1", "f1");
    Assert.assertTrue(!result.isError());
    Assert.assertNotNull(result.node);
    Assert.assertEquals("f1", result.node.value);

    EtcdResult watchResult = watchFuture.get(100, TimeUnit.MILLISECONDS);

    Assert.assertNotNull(watchResult);
    Assert.assertTrue(!watchResult.isError());
    Assert.assertNotNull(watchResult.node);

    {
        Assert.assertEquals(key + "/f1", watchResult.node.key);
        Assert.assertEquals("f1", watchResult.node.value);
        Assert.assertEquals("set", watchResult.action);
        Assert.assertNull(result.prevNode);
        Assert.assertEquals(result.node.modifiedIndex, watchResult.node.modifiedIndex);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDownload() throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);//w  ww.ja  v a2 s .co  m
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    int[] sizes = new int[10];
    for (int i = 0; i < 10; ++i) {
        sizes[i] = rand.nextInt(512) + 512;
        LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
        if (i % 2 == 1) {
            vis = LocalResourceVisibility.APPLICATION;
        }
        Path p = new Path(basedir, "" + i);
        LocalResource rsrc = createFile(files, p, sizes[i], rand, vis);
        rsrcVis.put(rsrc, vis);
        Path destPath = dirs.getLocalPathForWrite(basedir.toString(), sizes[i], conf);
        destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
        FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
        pending.put(rsrc, exec.submit(fsd));
    }

    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    for (Future<Path> path : pending.values()) {
        Assert.assertTrue(path.isDone());
    }

    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            Path localized = p.getValue().get();
            assertEquals(sizes[Integer.parseInt(localized.getName())], p.getKey().getSize());

            FileStatus status = files.getFileStatus(localized.getParent());
            FsPermission perm = status.getPermission();
            assertEquals("Cache directory permissions are incorrect", new FsPermission((short) 0755), perm);

            status = files.getFileStatus(localized);
            perm = status.getPermission();
            System.out
                    .println("File permission " + perm + " for rsrc vis " + p.getKey().getVisibility().name());
            assert (rsrcVis.containsKey(p.getKey()));
            Assert.assertTrue("Private file should be 500",
                    perm.toShort() == FSDownload.PRIVATE_FILE_PERMS.toShort());
        }
    } catch (ExecutionException e) {
        throw new IOException("Failed exec", e);
    }
}

From source file:nl.uva.sne.disambiguators.Wikidata.java

private Map<String, List<String>> getCategories(Set<Term> terms)
        throws MalformedURLException, InterruptedException, ExecutionException {
    Map<String, List<String>> cats = new HashMap<>();

    if (terms.size() > 0) {
        int maxT = 2;
        BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue(maxT);
        ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            ExecutorService pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<String, List<String>>>> set1 = new HashSet<>();
        String prop = "P910";
        for (Term t : terms) {
            URL url = new URL(
                    page + "?action=wbgetclaims&format=json&props=&property=" + prop + "&entity=" + t.getUID());
            System.err.println(url);
            WikiRequestor req = new WikiRequestor(url, t.getUID(), 1);
            Future<Map<String, List<String>>> future = pool.submit(req);
            set1.add(future);/* w  w  w  .  j a  v  a 2 s. c o  m*/
        }
        pool.shutdown();

        Map<String, List<String>> map = new HashMap<>();
        for (Future<Map<String, List<String>>> future : set1) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<String, List<String>> c = future.get();
            if (c != null) {
                map.putAll(c);
            }
        }
        workQueue = new ArrayBlockingQueue(maxT);
        pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<String, List<String>>>> set2 = new HashSet<>();
        for (Term t : terms) {
            List<String> catIDs = map.get(t.getUID());
            for (String catID : catIDs) {
                URL url = new URL(
                        page + "?action=wbgetentities&format=json&props=labels&languages=en&ids=" + catID);
                System.err.println(url);
                WikiRequestor req = new WikiRequestor(url, t.getUID(), 2);
                Future<Map<String, List<String>>> future = pool.submit(req);
                set2.add(future);
            }
        }
        pool.shutdown();

        for (Future<Map<String, List<String>>> future : set2) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<String, List<String>> c = future.get();
            if (c != null) {
                cats.putAll(c);
            }
        }
    }

    return cats;
}

From source file:org.trend.hgraph.util.test.HGraphClientPerformanceTest.java

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length < 4) {
        System.err.println("Options must greater than 4");
        printUsage();//from   w  w  w  .j a  v  a  2  s.c o  m
        return -1;
    }

    System.out.println("args=" + Arrays.toString(args));
    String cmd = null;
    int mustStartIdx = -1;
    int level = 2;
    int threads = 100;
    long interval = 1000; // ms
    boolean isMs = false;
    for (int a = 0; a < args.length; a++) {
        cmd = args[a];
        if (cmd.startsWith("-")) {
            if (mustStartIdx != -1) {
                System.err.println("must start option order is incorrect");
                printUsage();
                return -1;
            }

            if ("-l".equals(cmd)) {
                a++;
                cmd = args[a];
                try {
                    level = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("parse number for -l:" + cmd + " failed");
                    printUsage();
                    return -1;
                }
            } else if ("-t".equals(cmd)) {
                a++;
                cmd = args[a];
                try {
                    threads = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("parse number for -t:" + cmd + " failed");
                    printUsage();
                    return -1;
                }
            } else if ("-m".equals(cmd)) {
                isMs = true;
            } else if ("-i".equals(cmd)) {
                a++;
                cmd = args[a];
                try {
                    interval = Long.parseLong(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("parse number for -i:" + cmd + " failed");
                    printUsage();
                    return -1;
                }

            } else {
                System.err.println("undefined option:" + cmd);
                printUsage();
                return -1;
            }

        } else {
            if (mustStartIdx == -1) {
                mustStartIdx = a;
                break;
            } else {
                System.err.println("must start option order is incorrect");
                printUsage();
                return -1;
            }
        }
    }

    if (mustStartIdx + 4 != args.length) {
        System.err.println("The must option still not satisfied !!");
        printUsage();
        return -1;
    }

    String vt = args[mustStartIdx];
    String et = args[mustStartIdx + 1];
    File ipf = new File(args[mustStartIdx + 2]);
    File opp = new File(args[mustStartIdx + 3]);
    Configuration conf = this.getConf();

    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY, vt);
    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_EDGE_NAME_KEY, et);

    // run test threads
    ThreadFactory tf = new DaemonThreadFactory(Executors.defaultThreadFactory());
    ExecutorService pool = Executors.newFixedThreadPool(threads, tf);
    @SuppressWarnings("rawtypes")
    List<Future> fs = new ArrayList<Future>();
    @SuppressWarnings("rawtypes")
    Future f = null;

    for (int a = 0; a < threads; a++) {
        fs.add(pool.submit(new Task(ipf, opp, conf, level, isMs)));
        synchronized (this) {
            wait(interval);
        }
    }

    while (fs.size() > 0) {
        f = fs.get(0);
        f.get();
        if (f.isDone()) {
            if (f.isCancelled()) {
                LOGGER.warn("a future:" + f + " was cancelled !!");
            }
            fs.remove(0);
        }
    }

    return 0;
}

From source file:net.ychron.unirestinst.http.HttpClientHelper.java

public <T> Future<HttpResponse<T>> requestAsync(HttpRequest request, final Class<T> responseClass,
        Callback<T> callback) {
    HttpUriRequest requestObj = prepareRequest(request, true);

    CloseableHttpAsyncClient asyncHttpClient = options.getAsyncHttpClient();
    if (!asyncHttpClient.isRunning()) {
        asyncHttpClient.start();/*from ww w.  j ava 2 s. co m*/
        AsyncIdleConnectionMonitorThread asyncIdleConnectionMonitorThread = (AsyncIdleConnectionMonitorThread) options
                .getOption(Option.ASYNC_MONITOR);
        asyncIdleConnectionMonitorThread.start();
    }

    final Future<org.apache.http.HttpResponse> future = asyncHttpClient.execute(requestObj,
            prepareCallback(responseClass, callback));

    return new Future<HttpResponse<T>>() {

        public boolean cancel(boolean mayInterruptIfRunning) {
            return future.cancel(mayInterruptIfRunning);
        }

        public boolean isCancelled() {
            return future.isCancelled();
        }

        public boolean isDone() {
            return future.isDone();
        }

        public HttpResponse<T> get() throws InterruptedException, ExecutionException {
            org.apache.http.HttpResponse httpResponse = future.get();
            return new HttpResponse<T>(options, httpResponse, responseClass);
        }

        public HttpResponse<T> get(long timeout, TimeUnit unit)
                throws InterruptedException, ExecutionException, TimeoutException {
            org.apache.http.HttpResponse httpResponse = future.get(timeout, unit);
            return new HttpResponse<T>(options, httpResponse, responseClass);
        }
    };
}

From source file:eu.edisonproject.training.wsd.Wikidata.java

private Map<CharSequence, List<CharSequence>> getCategories(Set<Term> terms)
        throws MalformedURLException, InterruptedException, ExecutionException {
    Map<CharSequence, List<CharSequence>> cats = new HashMap<>();

    if (terms.size() > 0) {
        int maxT = 2;
        BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue(maxT);
        ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            ExecutorService pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<CharSequence, List<CharSequence>>>> set1 = new HashSet<>();
        String prop = "P910";
        for (Term t : terms) {
            URL url = new URL(
                    PAGE + "?action=wbgetclaims&format=json&props=&property=" + prop + "&entity=" + t.getUid());
            Logger.getLogger(Wikidata.class.getName()).log(Level.FINE, url.toString());
            WikiRequestor req = new WikiRequestor(url, t.getUid().toString(), 1);
            Future<Map<CharSequence, List<CharSequence>>> future = pool.submit(req);
            set1.add(future);//from ww  w  .jav a 2  s  . co m
        }
        pool.shutdown();

        Map<CharSequence, List<CharSequence>> map = new HashMap<>();
        for (Future<Map<CharSequence, List<CharSequence>>> future : set1) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<CharSequence, List<CharSequence>> c = future.get();
            if (c != null) {
                map.putAll(c);
            }
        }
        workQueue = new ArrayBlockingQueue(maxT);
        pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue);

        //            pool = new ThreadPoolExecutor(maxT, maxT,
        //                    5000L, TimeUnit.MILLISECONDS,
        //                    new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy());
        Set<Future<Map<CharSequence, List<CharSequence>>>> set2 = new HashSet<>();
        for (Term t : terms) {
            List<CharSequence> catIDs = map.get(t.getUid());
            for (CharSequence catID : catIDs) {
                URL url = new URL(
                        PAGE + "?action=wbgetentities&format=json&props=labels&languages=en&ids=" + catID);
                Logger.getLogger(Wikidata.class.getName()).log(Level.FINE, url.toString());
                WikiRequestor req = new WikiRequestor(url, t.getUid().toString(), 2);
                Future<Map<CharSequence, List<CharSequence>>> future = pool.submit(req);
                set2.add(future);
            }
        }
        pool.shutdown();

        for (Future<Map<CharSequence, List<CharSequence>>> future : set2) {
            while (!future.isDone()) {
                //                Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet....");
                Thread.currentThread().sleep(10);
            }
            Map<CharSequence, List<CharSequence>> c = future.get();
            if (c != null) {
                cats.putAll(c);
            }
        }
    }

    return cats;
}

From source file:com.ctrip.infosec.rule.executor.RulesExecutorService.java

/**
 * /*from w  ww . j a  v  a2 s .co m*/
 */
void executeParallel(RiskFact fact) {

    // matchRules        
    List<Rule> matchedRules = Configs.matchRules(fact, false);

    TraceLogger.traceLog("? " + matchedRules.size() + " ? ...");
    List<Callable<RuleExecuteResultWithEvent>> runs = Lists.newArrayList();
    for (Rule rule : matchedRules) {
        final RiskFact factCopy = BeanMapper.copy(fact, RiskFact.class);

        // set default result
        if (!Constants.eventPointsWithScene.contains(factCopy.eventPoint)) {
            Map<String, Object> defaultResult = Maps.newHashMap();
            defaultResult.put(Constants.riskLevel, 0);
            defaultResult.put(Constants.riskMessage, "PASS");
            factCopy.results.put(rule.getRuleNo(), defaultResult);
        }

        final StatelessRuleEngine statelessRuleEngine = SpringContextHolder.getBean(StatelessRuleEngine.class);
        final String packageName = rule.getRuleNo();
        final String _logPrefix = Contexts.getLogPrefix();
        final String _traceLoggerParentTransId = TraceLogger.getTransId();

        try {
            // add current execute ruleNo before execution
            factCopy.ext.put(Constants.key_ruleNo, rule.getRuleNo());
            factCopy.ext.put(Constants.key_isAsync, false);

            runs.add(new Callable<RuleExecuteResultWithEvent>() {

                @Override
                public RuleExecuteResultWithEvent call() throws Exception {
                    RuleMonitorHelper.newTrans(factCopy, RuleMonitorType.RULE, packageName);
                    TraceLogger.beginTrans(factCopy.eventId);
                    TraceLogger.setParentTransId(_traceLoggerParentTransId);
                    TraceLogger.setLogPrefix("[" + packageName + "]");
                    Contexts.setPolicyOrRuleNo(packageName);
                    try {
                        long start = System.currentTimeMillis();
                        // remove current execute ruleNo when finished execution.
                        statelessRuleEngine.execute(packageName, factCopy);

                        long handlingTime = System.currentTimeMillis() - start;

                        if (!Constants.eventPointsWithScene.contains(factCopy.eventPoint)) {

                            Map<String, Object> resultWithScene = factCopy.resultsGroupByScene.get(packageName);
                            if (resultWithScene != null) {
                                resultWithScene.put(Constants.async, false);
                                resultWithScene.put(Constants.timeUsage, handlingTime);

                                TraceLogger.traceLog(">>>> [" + packageName
                                        + "] : [???] riskLevel = "
                                        + resultWithScene.get(Constants.riskLevel) + ", riskMessage = "
                                        + resultWithScene.get(Constants.riskMessage) + ", riskScene = "
                                        + resultWithScene.get(Constants.riskScene) + ", usage = "
                                        + resultWithScene.get(Constants.timeUsage) + "ms");
                            }

                            Map<String, Object> result = factCopy.results.get(packageName);
                            if (result != null) {
                                result.put(Constants.async, false);
                                result.put(Constants.timeUsage, handlingTime);

                                TraceLogger.traceLog(">>>> [" + packageName + "] : riskLevel = "
                                        + result.get(Constants.riskLevel) + ", riskMessage = "
                                        + result.get(Constants.riskMessage) + ", usage = "
                                        + result.get(Constants.timeUsage) + "ms");
                            }

                        } else {

                            Map<String, Object> result = factCopy.results.get(packageName);
                            if (result != null) {
                                result.put(Constants.async, false);
                                result.put(Constants.timeUsage, handlingTime);
                                int riskLevel = MapUtils.getIntValue(result, Constants.riskLevel, 0);
                                if (riskLevel > 0) {
                                    TraceLogger.traceLog(">>>> [" + packageName
                                            + "] [?]: [??] riskLevel = "
                                            + result.get(Constants.riskLevel) + ", riskMessage = "
                                            + result.get(Constants.riskMessage) + ", usage = "
                                            + result.get(Constants.timeUsage) + "ms");
                                }
                            }

                            Map<String, Object> resultWithScene = factCopy.resultsGroupByScene.get(packageName);
                            if (resultWithScene != null) {
                                resultWithScene.put(Constants.async, false);
                                resultWithScene.put(Constants.timeUsage, handlingTime);

                                TraceLogger.traceLog(
                                        ">>>> [" + packageName + "] [?]: riskLevel = "
                                                + resultWithScene.get(Constants.riskLevel) + ", riskMessage = "
                                                + resultWithScene.get(Constants.riskMessage) + ", riskScene = "
                                                + resultWithScene.get(Constants.riskScene) + ", usage = "
                                                + resultWithScene.get(Constants.timeUsage) + "ms");
                            } else {
                                TraceLogger.traceLog(">>>> [" + packageName
                                        + "] [?]: ?");
                            }
                        }
                        return new RuleExecuteResultWithEvent(packageName, factCopy.results,
                                factCopy.resultsGroupByScene, factCopy.eventBody, factCopy.ext);
                    } catch (Exception e) {
                        logger.warn(_logPrefix + ". packageName: " + packageName, e);
                    } finally {
                        TraceLogger.commitTrans();
                        RuleMonitorHelper.commitTrans2Trunk(factCopy);
                        Contexts.clearLogPrefix();
                    }
                    return null;
                }

            });

        } catch (Throwable ex) {
            logger.warn(_logPrefix + ". packageName: " + packageName, ex);
        }

    }
    List<RuleExecuteResultWithEvent> rawResult = new ArrayList<RuleExecuteResultWithEvent>();
    try {
        List<Future<RuleExecuteResultWithEvent>> results = ParallelExecutorHolder.excutor.invokeAll(runs,
                timeout, TimeUnit.MILLISECONDS);
        for (Future f : results) {
            try {
                if (f.isDone()) {
                    RuleExecuteResultWithEvent r = (RuleExecuteResultWithEvent) f.get();
                    rawResult.add(r);
                } else {
                    f.cancel(true);
                }
            } catch (Exception e) {
                // ignored
            }
        }
    } catch (Exception e) {
        // ignored
    }
    if (rawResult.size() > 0) {
        for (RuleExecuteResultWithEvent item : rawResult) {
            // merge eventBody
            if (item.getEventBody() != null) {
                for (String key : item.getEventBody().keySet()) {
                    Object value = item.getEventBody().get(key);
                    if (!fact.eventBody.containsKey(key) && value != null) {
                        fact.eventBody.put(key, value);
                    }
                }
            }
            // merge ext
            if (item.getExt() != null) {
                for (String key : item.getExt().keySet()) {
                    Object value = item.getExt().get(key);
                    if (!fact.ext.containsKey(key) && value != null) {
                        fact.ext.put(key, value);
                    }
                }
            }
            // merge results
            if (item.getResults() != null) {
                fact.results.putAll(item.getResults());
            }
            // merge resultsGroupByScene
            if (item.getResultsGroupByScene() != null) {
                fact.resultsGroupByScene.putAll(item.getResultsGroupByScene());
            }
        }
    }
}

From source file:com.num.mobiperf.Checkin.java

private synchronized boolean checkGetCookie() {
    if (false) { // NOT TESTING SERVER
        authCookie = getFakeAuthCookie();
        return true;
    }//from   w w  w .j a  va2s.c  o m
    Future<Cookie> getCookieFuture = accountSelector.getCheckinFuture();
    if (getCookieFuture == null) {
        // Logger.i("checkGetCookie called too early");
        return false;
    }
    while (!getCookieFuture.isDone())
        ;
    if (getCookieFuture.isDone()) {
        try {
            authCookie = getCookieFuture.get();
            // Logger.i("Got authCookie: " + authCookie);
            return true;
        } catch (InterruptedException e) {
            // Logger.e("Unable to get auth cookie", e);
            return false;
        } catch (ExecutionException e) {
            // Logger.e("Unable to get auth cookie", e);
            return false;
        }
    } else {
        // Logger.i("getCookieFuture is not yet finished");
        return false;
    }
}

From source file:hudson.slaves.SlaveComputer.java

@Override
public String getIcon() {
    Future<?> l = lastConnectActivity;
    if (l != null && !l.isDone())
        return "computer-flash.gif";
    return super.getIcon();
}

From source file:org.apache.hadoop.hbase.backup.regionserver.LogRollBackupSubprocedurePool.java

/**
 * Wait for all of the currently outstanding tasks submitted via {@link #submitTask(Callable)}
 * @return <tt>true</tt> on success, <tt>false</tt> otherwise
 * @throws ForeignException exception/*from  w  w w. j av a 2  s  .  co  m*/
 */
public boolean waitForOutstandingTasks() throws ForeignException {
    LOG.debug("Waiting for backup procedure to finish.");

    try {
        for (Future<Void> f : futures) {
            f.get();
        }
        return true;
    } catch (InterruptedException e) {
        if (aborted) {
            throw new ForeignException("Interrupted and found to be aborted while waiting for tasks!", e);
        }
        Thread.currentThread().interrupt();
    } catch (ExecutionException e) {
        if (e.getCause() instanceof ForeignException) {
            throw (ForeignException) e.getCause();
        }
        throw new ForeignException(name, e.getCause());
    } finally {
        // close off remaining tasks
        for (Future<Void> f : futures) {
            if (!f.isDone()) {
                f.cancel(true);
            }
        }
    }
    return false;
}