Example usage for java.lang InterruptedException getMessage

List of usage examples for java.lang InterruptedException getMessage

Introduction

In this page you can find the example usage for java.lang InterruptedException getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns the detail message string of this throwable.

Usage

From source file:hudson.plugins.blazemeter.PerformanceBuilder.java

@Override
public void perform(@Nonnull Run<?, ?> run, @Nonnull FilePath workspace, @Nonnull Launcher launcher,
        @Nonnull TaskListener listener) throws InterruptedException, IOException {

    if (!validateTestId(listener)) {
        run.setResult(Result.FAILURE);
        return;/*  ww w . j  a v  a 2s  . co m*/
    }

    BlazemeterCredentialsBAImpl credentials = Utils.findCredentials(credentialsId, CredentialsScope.GLOBAL);
    boolean isValidCredentials = !StringUtils.isBlank(credentialsId) && validateCredentials(credentials);
    if (!isValidCredentials) {
        listener.error(BzmJobNotifier.formatMessage("Can not start build: Invalid credentials=" + credentialsId
                + "... is deprecated or absent in credentials store."));
        run.setResult(Result.NOT_BUILT);
        return;
    }

    String serverUrlConfig = BlazeMeterPerformanceBuilderDescriptor.getDescriptor().getBlazeMeterURL();
    String jobName = run.getFullDisplayName();
    VirtualChannel channel = launcher.getChannel();

    final long reportLinkId = System.currentTimeMillis();
    EnvVars envVars = run.getEnvironment(listener);

    BzmBuild bzmBuild = new BzmBuild(this, credentials.getUsername(), credentials.getPassword().getPlainText(),
            jobName, run.getId(),
            StringUtils.isBlank(serverUrlConfig) ? Constants.A_BLAZEMETER_COM : serverUrlConfig, envVars,
            workspace, listener, ProxyConfiguration.load(), !(channel instanceof LocalChannel),
            envVars.expand(reportLinkName), reportLinkId);

    ReportUrlTask reportUrlTask = new ReportUrlTask(run, jobName, channel, reportLinkId);

    Timer timer = new Timer(true);
    timer.scheduleAtFixedRate(reportUrlTask, 20 * 1000, 10 * 1000);
    try {
        Result result = channel.call(bzmBuild);
        run.setResult(result);
    } catch (InterruptedException e) {
        LOGGER.warning("Build has been aborted");
        // start new task for wait Slave
        InterruptListenerTask interrupt = new InterruptListenerTask(run, jobName, channel);
        interrupt.start();
        interrupt.join();
        run.setResult(Result.ABORTED);
    } catch (Exception e) {
        listener.getLogger().println(BzmJobNotifier.formatMessage("Failure with exception: " + e.getMessage()));
        e.printStackTrace(listener.getLogger());
        run.setResult(Result.FAILURE);
    } finally {
        reportUrlTask.cancel();
        timer.cancel();
        timer.purge();
    }
}

From source file:de.unibi.cebitec.bibiserv.web.beans.runinthecloud.BashExecutor.java

/**
 * CheckInstanceAccess tests wheather the started EC2 is already up and in
 * which state of starting the instance is. Furthermore it controls the
 * progressBar on the confirmation.xhtml. Each exception increments the
 * value 'progressBarValue'. Because I tested the instanceAccess so often I
 * noticed that there are ca. 15 ClientHandlerExecptions and ca. 4
 * UniformInterfaceExecptions before the 'alive' command comes from the
 * server REST-service. So each CHE increments the progressBarValue with 2
 * points and each UIE increments the value with 17. So the value will stop
 * at round about 97% and the successfull 'alive' increments it finally to
 * 99% and the redirection starts./*from  w w  w  .  j  a v  a2s .c  o  m*/
 */
public void checkInstanceAccess() {
    String url = redirectAddress.substring(0, redirectAddress.lastIndexOf("/")) + "/rest";
    Client client = Client.create(new DefaultClientConfig());
    WebResource res = client.resource(url);
    String answer;
    while (true) {
        try {
            answer = res.path("manager").type(MediaType.TEXT_PLAIN).get(String.class);
            if (answer.equals("alive")) {
                progressBarValue = 99;
                accessReady = true;
                break;
            }
        } catch (ClientHandlerException e) {
            /**
             * Hier reicht ein log.info und kein break, da die einzige
             * mgliche exception erwartet wird: java.net.ConnectException:
             * Verbindungsaufbau abgelehnt = akzeptiert;
             * java.net.ConnectException: Die Wartezeit fr die Verbindung
             * ist abgelaufen = falsche dns.
             */
            // es fallen ca 15 CLEs ...
            log.error(e.getMessage(), e);
            if (progressBarValue + 2 >= 98) {

            } else {
                progressBarValue += 2;
            }
            try {
                Thread.sleep(10000);
            } catch (InterruptedException ie) {
                log.info(ie.getMessage(), ie);
                break;
            }
        } catch (UniformInterfaceException e) {
            // ... und ca 4 UFEs
            log.error(e.getMessage(), e);
            if (progressBarValue + 17 >= 98) {

            } else {
                progressBarValue += 17;
            }
            try {
                Thread.sleep(10000);
            } catch (InterruptedException ie) {
                log.info(ie);
                break;
            }
        }
    }
    connectToInstance();
}

From source file:org.openpipeline.pipeline.connector.webcrawler.Fetcher.java

/**
 * Fetches the data associated with the next URL.
 * /*from   w w w .j ava2 s  .c  om*/
 * @param nextUrlItem
 *            containing the next URL to crawl
 * 
 * @return action based on the status code of the HttpClient, robots safety
 *         check, max number of redirects, max number of fetch attempts
 */
public int fetch(LinkDBRecord nextUrlItem) {

    if (nextUrlItem == null) {
        return HttpResultMapper.ACTION_DELETE;
    }

    if (client == null) {
        throw new RuntimeException("Fetcher not initialized.");
    }

    String nextUrl = nextUrlItem.getNextUrl();
    redirectUrl = nextUrl;
    HttpResponse httpResponse = null;
    HttpGet get = null;
    lastModified = 0;

    try {
        /*
         * Check the compliance with allow/disallow directives.
         */
        UrlItem item = new UrlItem(nextUrlItem.getNextUrl());

        boolean robotsSafe = robotsDirectives.allowed(item);
        if (!robotsSafe) {
            if (debug) {
                logger.debug("Robots denied, next URL: " + nextUrl);
            }
            return HttpResultMapper.ACTION_DELETE;
        }
        /*
         * Check the compliance with visit-time directives.
         */
        boolean visitTimeSafe = robotsDirectives.visitTimeOK();
        if (!visitTimeSafe) {
            if (debug) {
                logger.debug("Robots visit time denied, next URL: " + nextUrl);
            }
            return HttpResultMapper.ACTION_SKIP;
        }

        int status = -1;
        int numRedirects = 0;

        while (true) {

            get = new HttpGet();

            /* Set uri for the next execution of get */
            URI uri = new URI(nextUrl);
            get.setURI(uri);

            /*
             * Check crawl delay. If the fetcher follows the redirect URL it
             * will also observe the crawl delay
             */
            long waitTime = robotsDirectives.crawlDelayTime(lastFetchTimeThisDomain);

            if (waitTime > 0) {
                try {
                    Thread.sleep(waitTime);
                } catch (InterruptedException e) {
                    logger.error("Exception in fetcher in thread.sleep, next URL: " + nextUrl + ". Message: "
                            + e.getMessage());
                }
            }

            /* Execute get method */
            DefaultRedirectHandler redirectHandler = new DefaultRedirectHandler();
            client.setRedirectHandler(redirectHandler);

            HttpContext localContext = new BasicHttpContext();

            httpResponse = client.execute(get, localContext);
            if (httpResponse == null) {
                break;
            }

            Header lastModHeader = httpResponse.getFirstHeader("last-modified");
            if (lastModHeader != null) {
                String lastModifiedDate = lastModHeader.getValue();
                Date date = format.parse(lastModifiedDate);
                lastModified = date.getTime();
            }

            StatusLine statusLine = httpResponse.getStatusLine();
            if (statusLine == null) {
                /* Should not happen after execute */
                status = -1;
            } else {
                status = httpResponse.getStatusLine().getStatusCode();
            }

            lastFetchTimeThisDomain = System.currentTimeMillis();

            HttpEntity entity = httpResponse.getEntity();

            if (HttpResultMapper.permanentRedirect(status) || HttpResultMapper.temporaryRedirect(status)) {
                /*
                 * The fetcher follows a redirect until the maximum number
                 * of redirects is reached.
                 */
                if (numRedirects == maxNumberOfRedirects) {
                    break;
                }

                /* Update the URL to be fetched */
                URI redirectURI = redirectHandler.getLocationURI(httpResponse, localContext);

                String newUrl = redirectURI.toString();
                numRedirects++;

                /*
                 * In case of a permanent redirect, the fetcher asks the URL
                 * filter whether to follow it or not. The fetcher follows
                 * all temporary redirects.
                 */
                if (HttpResultMapper.permanentRedirect(status)) {

                    boolean redirectUrlOK = urlFilter.checkCanonicalForm(newUrl, nextUrl);

                    /*
                     * Only follows the permanent redirects which are
                     * different because of the formatting to the canonical
                     * form such as removing the trailing slash
                     */
                    if (!redirectUrlOK) {
                        /* Permanent redirect, keep the redirect URL */
                        redirectUrl = newUrl;
                        break;
                    }
                }
                /*
                 * If the permanent redirect URL differs just in formatting,
                 * or if temporary redirect follow it.
                 * 
                 * The redirect URL becomes nextURL for the next iteration
                 * of the while loop.
                 */
                nextUrl = newUrl;

                if (debug) {
                    logger.debug("Fetcher: had a redirect, redirect URL: " + nextUrl + ", status: " + status);
                }
            } else {
                /*
                 * get's responseBody contains data if success and is null
                 * otherwise
                 */
                // TODO retry if
                // exception?

                if (entity != null) {

                    long inputSize = entity.getContentLength();

                    if (inputSize > 0 && inputSize >= maxFileSize) {
                        throw new RuntimeException("Fetcher exception: data exceeds the max file size.");
                    }
                    /* Often the data length is not known */
                    inputStream = getData(entity);
                }
                break;
            }
            /*
             * Need to release the current connection, otherwise client does
             * not work
             */
            entity.consumeContent();
        }

        /*
         * Decide on action after the while loop is done, possibly done with
         * redirects
         */
        int action = HttpResultMapper.getHttpCodeResult(status);

        int fetchAttempts = nextUrlItem.getFetchAttempts();
        if (action != HttpResultMapper.ACTION_FINALIZE && fetchAttempts == maxNumberOfFetchAttempts) {
            /*
             * Remove items which have too many fetch attempts: redirects,
             * skip etc
             */
            action = HttpResultMapper.ACTION_DELETE;
        } else if (numRedirects == maxNumberOfRedirects) {
            /*
             * Avoid following too many redirects
             */
            action = HttpResultMapper.ACTION_DELETE;
        }

        return action;

    } catch (Throwable e) {
        /*
         * Currently, no re-tries are implemented. The HttpClient
         * automatically tries to recover from safe exceptions.
         */

        if (e instanceof org.apache.http.conn.ConnectTimeoutException) {
            return HttpResultMapper.ACTION_SKIP;
        }
        return HttpResultMapper.ACTION_DELETE;
    } finally {
        if (get != null) {
            get.abort();
        }
    }
}

From source file:com.baifendian.swordfish.execserver.job.ProcessJob.java

/**
 * ?, ? shell //from w  w w.j  a  v  a2 s  . c om
 *
 * @param command ?
 * @return ??, 0 ?, 
 */
public int runCommand(String command) {
    // , 
    long remainTime = calcNodeTimeout();
    int exitCode;

    try {
        // ?
        processBuilder = new ProcessBuilder();

        // ? job ?
        if (StringUtils.isEmpty(command)) {
            exitCode = 0;
            return exitCode;
        }

        // ?
        String commandFile = String.format("%s/%s.command", workDir, jobAppId);

        logger.info("proxy user:{}, work dir:{}", proxyUser, workDir);

        // ?, ??
        if (!Files.exists(Paths.get(commandFile))) {
            logger.info("generate command file:{}", commandFile);

            StringBuilder stringBuilder = new StringBuilder();
            stringBuilder.append("#!/bin/sh\n");
            stringBuilder.append("BASEDIR=$(cd `dirname $0`; pwd)\n");
            stringBuilder.append("cd $BASEDIR\n");

            if (envFile != null) {
                stringBuilder.append("source " + envFile + "\n");
            }

            stringBuilder.append("\n\n");
            stringBuilder.append(command);

            // ?
            FileUtils.writeStringToFile(new File(commandFile), stringBuilder.toString(),
                    Charset.forName("UTF-8"));
        }

        // ?

        processBuilder.command("sudo", "-u", proxyUser, "sh", commandFile);

        // 
        processBuilder.directory(new File(workDir));

        //  error ? merge ?
        processBuilder.redirectErrorStream(true);
        process = processBuilder.start();

        // ??
        printCommand(processBuilder);

        // ??
        readProcessOutput();

        int pid = getProcessId(process);

        logger.info("Process start, process id is: {}", pid);

        // 
        if (isLongJob) {
            // ?, , ??
            // ?,  10 , ?
            while (!isCompleted.getAsBoolean() && process.isAlive()) {
                Thread.sleep(3000);
            }

            logger.info("streaming job has exit, work dir:{}, pid:{}", workDir, pid);

            // ?,  storm, ????
            exitCode = (isCompleted.getAsBoolean()) ? 0 : -1;
        } else {// ?
            boolean status = process.waitFor(remainTime, TimeUnit.SECONDS);

            if (status) {
                exitCode = process.exitValue();
                logger.info("job has exit, work dir:{}, pid:{}", workDir, pid);
            } else {
                cancel();
                exitCode = -1;
                logger.info("job has timeout, work dir:{}, pid:{}", workDir, pid);
            }
        }
    } catch (InterruptedException e) {
        logger.error("interrupt exception, maybe task has been cancel or killed.", e);
        exitCode = -1;
        throw new ExecException("Process has been interrupted. Exit code is " + exitCode);
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        exitCode = -1;
        throw new ExecException("Process error. Exit code is " + exitCode);
    }

    return exitCode;
}

From source file:com.qpark.maven.plugin.springconfig.WebServiceDispatcherXmlGenerator.java

private void createWebServiceDynamicWsdlConfig(final String sid) {
    String sids = Util.capitalizePackageName(sid);
    if (sids.length() > 32) {
        sids = new StringBuffer(48).append(sids.substring(0, 32)).append(System.currentTimeMillis()).toString();
        try {/*from  www. ja  v  a2 s. c o m*/
            Thread.sleep(1);
        } catch (final InterruptedException e) {
        }
    }
    final String fileName = new StringBuffer(128).append(sids).append("DynamicWsdlConfig.xml").toString();
    final File f = Util.getFile(this.outputDirectory, "dispatcher", fileName);
    this.log.info(new StringBuffer().append("Write ").append(f.getAbsolutePath()));
    try {
        Util.writeToFile(f, this.getWebServiceDynamicWsdlConfig(sid));
    } catch (final Exception e) {
        this.log.error(e.getMessage());
        e.printStackTrace();
    }
}

From source file:at.ac.tuwien.dsg.rSybl.cloudInteractionUnit.enforcementPlugins.dryRun.DryRunEnforcementAPI.java

private void scaleInComponent(Node o, String IP) {

    DependencyGraph graph = new DependencyGraph();
    graph.setCloudService(controlledService);
    Node toBeScaled = graph.getNodeWithID(o.getId());
    Node toBeRemoved = graph.getNodeWithID(IP);
    RuntimeLogger.logger.info("Trying to remove  " + toBeRemoved.getId() + " From " + toBeScaled.getId());
    String cmd = "";
    String ip = IP;/*  ww w  . j a  va  2s  .c om*/
    String uuid = (String) toBeRemoved.getStaticInformation().get("UUID");
    RuntimeLogger.logger.info("Removing server with UUID" + uuid);

    //flexiantActions.removeServer(uuid);
    try {
        Thread.sleep(30000);
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        RuntimeLogger.logger.info(e.getMessage());
    }

    toBeScaled.removeNode(toBeRemoved);

    monitoring.refreshServiceStructure(controlledService);
}

From source file:com.aol.advertising.qiao.injector.file.AbstractFileTailer.java

/**
 * Follows changes in the file, calling the callback's handle method for
 * each new line or new block.// w ww. j a  v  a 2  s. c om
 *
 * <pre>
 * - Open the file
 * - Compute the checksum of the file
 * - Set read position
 * - Lock the file using the checksum
 * - Process the file until file is done and a new file is created
 * - Notify listeners on complete
 * - Unlock the file
 * </pre>
 */
@Override
public void run() {
    long ts_start = System.currentTimeMillis();

    boolean file_rotated_or_truncated = false;
    RandomAccessFile reader = null;
    String filename = tailedFile.getAbsolutePath();
    try {
        reader = prepareFileToRead(tailedFile);

        // get last read position via checksum.
        FileReadingPositionCache.FileReadState fstate = _position.getReadState();
        logger.info(
                String.format("LAST PROCESSING STATUS: file=%s, timestamp=%s, read_position=%d, checksum=%d",
                        filename, fstate.getFriendlyTimestamp(), fstate.position, fstate.checksum));

        logger.info(">continue processing from last read: " + _position.toString());

        numFiles.set(1);
        while (running) {
            if (file_rotated_or_truncated)
                ts_start = System.currentTimeMillis();

            file_rotated_or_truncated = process(reader, delayMillis, file_rotated_or_truncated);
            if (file_rotated_or_truncated) {
                IOUtils.closeQuietly(reader);

                fstate = _position.getReadState();
                long mod_time = tailedFile.lastModified();
                notifyListenerOnComplete(new QiaoFileEntry(tailedFile.getAbsolutePath(), mod_time,
                        fstate.checksum, fstate.timestamp, fstate.position, true));

                long dur = System.currentTimeMillis() - ts_start;
                fpStats.update(dur);

                _position.remove();
                if (logger.isDebugEnabled())
                    logger.debug("removed " + currentReadFileChecksum + " from positionCache");

                fileLockManager.removeFileLock(currentReadFileChecksum); // delete lock
                logger.info("File " + tailedFile.getAbsolutePath() + " rotated");

                currentReadFileChecksum = 0;

                // --- another file ---
                reader = prepareFileToRead(tailedFile);

                numFiles.incrementAndGet(); // incr file count
                if (dataHandler != null)
                    dataHandler.fileRotated();

            } else {
                CommonUtils.sleepQuietly(delayMillis);
            }
        }
    } catch (InterruptedException e) {
    } catch (QiaoOperationException e) {
        logger.error(e.getMessage(), e);
    } catch (Throwable t) {
        logger.error(t.getMessage(), t);
    } finally {
        if (dataHandler != null)
            dataHandler.close();

        IOUtils.closeQuietly(reader);
        if (currentReadFileChecksum != 0)
            fileLockManager.removeFileLock(currentReadFileChecksum);
    }

    logger.info(this.getClass().getSimpleName() + " terminated");

}

From source file:squash.booking.lambdas.core.PageManager.java

private void copyJsonDataToS3(String keyName, String jsonToCopy) throws Exception {

    logger.log("About to copy cached json data to S3");

    try {//from ww w.j  a  v  a  2 s .  co  m
        logger.log("Uploading json data to S3 bucket: " + websiteBucketName + " and key: " + keyName + ".json");
        byte[] jsonAsBytes = jsonToCopy.getBytes(StandardCharsets.UTF_8);
        ByteArrayInputStream jsonAsStream = new ByteArrayInputStream(jsonAsBytes);
        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(jsonAsBytes.length);
        metadata.setContentType("application/json");
        // Direct caches not to satisfy future requests with this data without
        // revalidation.
        if (keyName.contains("famousplayers")) {
            // Famousplayers list is good for a year
            metadata.setCacheControl("max-age=31536000");
        } else {
            metadata.setCacheControl("no-cache, must-revalidate");
        }
        PutObjectRequest putObjectRequest = new PutObjectRequest(websiteBucketName, keyName + ".json",
                jsonAsStream, metadata);
        // Data must be public so it can be served from the website
        putObjectRequest.setCannedAcl(CannedAccessControlList.PublicRead);
        IS3TransferManager transferManager = getS3TransferManager();
        TransferUtils.waitForS3Transfer(transferManager.upload(putObjectRequest), logger);
        logger.log("Uploaded cached json data to S3 bucket");
    } catch (AmazonServiceException ase) {
        ExceptionUtils.logAmazonServiceException(ase, logger);
        throw new Exception("Exception caught while copying json data to S3");
    } catch (AmazonClientException ace) {
        ExceptionUtils.logAmazonClientException(ace, logger);
        throw new Exception("Exception caught while copying json data to S3");
    } catch (InterruptedException e) {
        logger.log("Caught interrupted exception: ");
        logger.log("Error Message: " + e.getMessage());
        throw new Exception("Exception caught while copying json data to S3");
    }
}

From source file:com.petpet.c3po.controller.Controller.java

/**
 * Starts all the workers. Including the adaptors, consolidators and gatherer.
 *
 * @param collection/*from   ww w .  jav  a  2s  .  c  om*/
 *          the name of the collection that is processed.
 * @param adaptThreads
 *          the number of adaptor threads in the pool.
 * @param consThreads
 *          the number of consolidator threads in the pool.
 * @param type
 *          the type of the adaptors.
 * @param adaptorcnf
 *          the adaptor configuration.
 */
private void startWorkers(String collection, int adaptThreads, int consThreads, String type,
        Map<String, String> adaptorcnf) {

    this.adaptorPool = Executors.newFixedThreadPool(adaptThreads);
    this.consolidatorPool = Executors.newFixedThreadPool(consThreads);

    List<Consolidator> consolidators = new ArrayList<Consolidator>();

    LOG.debug("Initializing consolidators...");
    for (int i = 0; i < consThreads; i++) {
        Consolidator c = new Consolidator(this.persistence, this.processingQueue);
        consolidators.add(c);
        this.consolidatorPool.submit(c);
    }

    // no more consolidators can be added.
    this.consolidatorPool.shutdown();

    List<ProcessingRule> rules = this.getRules(collection);

    Collections.sort(rules, new Comparator<ProcessingRule>() {

        // sorts from descending
        @Override
        public int compare(ProcessingRule r1, ProcessingRule r2) {
            int first = this.fixPriority(r2.getPriority());
            int second = this.fixPriority(r1.getPriority());
            return new Integer(first).compareTo(new Integer(second));
        }

        private int fixPriority(int prio) {
            if (prio < 0)
                return 0;

            if (prio > 1000)
                return 1000;

            return prio;
        }

    });

    LOG.debug("Initializing adaptors...");
    for (int i = 0; i < adaptThreads; i++) {
        AbstractAdaptor a = this.getAdaptor(type);

        a.setCache(this.persistence.getCache());
        a.setQueue(this.processingQueue);
        a.setGatherer(this.gatherer);
        a.setConfig(adaptorcnf);
        a.setRules(rules);
        a.configure();

        this.adaptorPool.submit(a);
    }

    // no more adaptors can be added.
    this.adaptorPool.shutdown();

    Thread gathererThread = new Thread(this.gatherer, "MetadataGatherer");
    gathererThread.setPriority(Thread.NORM_PRIORITY + 1);
    gathererThread.start();

    try {

        // kills the pool and all adaptor workers after a month;
        boolean adaptorsTerminated = this.adaptorPool.awaitTermination(2678400, TimeUnit.SECONDS);

        if (adaptorsTerminated) {
            this.stopConsoldators(consolidators);
            this.consolidatorPool.awaitTermination(2678400, TimeUnit.SECONDS);

        } else {
            System.out.println("Oh my, It seems something went wrong. This process took too long");
            LOG.error("Time out occurred, process was terminated");
        }

    } catch (InterruptedException e) {
        LOG.error("An error occurred: {}", e.getMessage());
    } finally {
        String path = FileUtils.getTempDirectory().getPath() + File.separator + "c3poarchives";
        FileUtils.deleteQuietly(new File(path));
    }

    // allow every rule to execute its tasks after job handling is done, like
    // printing statistics or cleaning up
    for (ProcessingRule processingRule : rules) {
        processingRule.onCommandFinished();
    }

    ActionLog log = new ActionLog(collection, ActionLog.UPDATED_ACTION);
    new ActionLogHelper(this.persistence).recordAction(log);
}

From source file:edu.cmu.graphchi.toolkits.collaborative_filtering.yarn.ApplicationMaster.java

private void finish() {
    // Join all launched threads
    // needed for when we time out
    // and we need to release containers
    for (Thread launchThread : launchThreads) {
        try {/*from w  w w  . j  ava2s  .co m*/
            launchThread.join(10000);
        } catch (InterruptedException e) {
            LOG.info("Exception thrown in thread join: " + e.getMessage());
            e.printStackTrace();
        }
    }

    // When the application completes, it should stop all running containers
    LOG.info("Application completed. Stopping running containers");
    nmClientAsync.stop();

    // When the application completes, it should send a finish application
    // signal to the RM
    LOG.info("Application completed. Signalling finish to RM");

    FinalApplicationStatus appStatus;
    String appMessage = null;
    success = true;
    if (numFailedContainers.get() == 0 && numCompletedContainers.get() == numTotalContainers) {
        appStatus = FinalApplicationStatus.SUCCEEDED;
    } else {
        appStatus = FinalApplicationStatus.FAILED;
        appMessage = "Diagnostics." + ", total=" + numTotalContainers + ", completed="
                + numCompletedContainers.get() + ", failed=" + numFailedContainers.get();
        success = false;
    }
    try {
        amRMClient.unregisterApplicationMaster(appStatus, appMessage, null);
    } catch (YarnException ex) {
        LOG.error("Failed to unregister application", ex);
    } catch (IOException e) {
        LOG.error("Failed to unregister application", e);
    }

    amRMClient.stop();
}