Example usage for java.lang Thread isAlive

List of usage examples for java.lang Thread isAlive

Introduction

In this page you can find the example usage for java.lang Thread isAlive.

Prototype

public final native boolean isAlive();

Source Link

Document

Tests if this thread is alive.

Usage

From source file:ffx.algorithms.MolecularDynamics.java

public void redynamic(final int nSteps, final double temperature) {
    skipIntro = true;/*from  w  w w  .  j  a va  2  s.  co m*/

    this.nSteps = nSteps;
    totalSimTime = 0.0;
    //        this.dt = timeStep * 1.0e-3;
    //        printFrequency = (int) (printInterval / this.dt);
    //        saveSnapshotFrequency = (int) (saveInterval / this.dt);
    //        saveSnapshotAsPDB = true;
    //        if (fileType.equals("XYZ")) {
    //            saveSnapshotAsPDB = false;
    //        }
    //        saveRestartFileFrequency = (int) (restartFrequency / this.dt);
    //        if (pdbFilter == null) {
    //            logger.warning("pdbf");
    //        }
    this.targetTemperature = temperature;
    thermostat.setTargetTemperature(temperature);
    this.initVelocities = false;

    done = false;
    terminate = false;
    initialized = true;

    Thread dynamicThread = new Thread(this);
    dynamicThread.start();
    synchronized (this) {
        try {
            while (dynamicThread.isAlive()) {
                wait(100);
            }
        } catch (InterruptedException e) {
            String message = " Molecular dynamics interrupted.";
            logger.log(Level.WARNING, message, e);
        }
    }
}

From source file:ffx.algorithms.MolecularDynamics.java

/**
 * Blocking molecular dynamics. When this method returns, the MD run is
 * done./* w  w w  . ja  v  a 2  s  .c o  m*/
 *
 * @param nSteps a int.
 * @param timeStep a double.
 * @param printInterval a double.
 * @param saveInterval a double.
 * @param temperature a double.
 * @param initVelocities a boolean.
 * @param dyn a {@link java.io.File} object.
 */
public void dynamic(final int nSteps, final double timeStep, final double printInterval,
        final double saveInterval, final double temperature, final boolean initVelocities, final File dyn) {
    /**
      * Return if already running; Could happen if two threads call dynamic
      * on the same MolecularDynamics instance.
      */
    if (!done) {
        logger.warning(" Programming error - a thread invoked dynamic when it was already running.");
        return;
    }

    if (integrator instanceof Stochastic) {
        logger.info(format("\n Stochastic dynamics in the NVT ensemble\n"));
    } else if (!(thermostat instanceof Adiabatic)) {
        logger.info(format("\n Molecular dynamics in the NVT ensemble\n"));
    } else {
        logger.info(format("\n Molecular dynamics in the NVE ensemble\n"));
    }

    init(nSteps, timeStep, printInterval, saveInterval, fileType, restartFrequency, temperature, initVelocities,
            dyn);

    done = false;

    if (dyn != null) {
        logger.info(format(" Continuing from " + dyn.getAbsolutePath()));
    }
    logger.info(String.format(" Number of steps:     %8d", nSteps));
    logger.info(String.format(" Time step:           %8.3f (fsec)", timeStep));
    logger.info(String.format(" Print interval:      %8.3f (psec)", printInterval));
    logger.info(String.format(" Save interval:       %8.3f (psec)", saveInterval));
    //logger.info(String.format(" Archive file: %s", archiveFile.getName()));
    for (int i = 0; i < assemblies.size(); i++) {
        AssemblyInfo ai = assemblies.get(i);
        logger.info(String.format(" Archive file %3d: %s", i, ai.archiveFile.getName()));
    }
    logger.info(String.format(" Restart file:     %s", restartFile.getName()));

    Thread dynamicThread = new Thread(this);
    dynamicThread.start();
    synchronized (this) {
        try {
            while (dynamicThread.isAlive()) {
                wait(100);
            }
        } catch (InterruptedException e) {
            String message = " Molecular dynamics interrupted.";
            logger.log(Level.WARNING, message, e);
        }
    }
    logger.info("Done with an MD round.");
}

From source file:org.deegree.test.services.wpvs.WPVSStressTest.java

public void test() throws IOException {
    resultData = new ArrayList<TestResultData>();
    imgLinks = new ArrayList<String>();

    List<Thread> threadList = new LinkedList<Thread>();
    for (int i = 1; i <= threads; i++) {
        Thread t = new Thread(new WPVSSender(i));
        t.start();//from   w  w  w .j a  v  a 2s .co  m
        threadList.add(t);
    }

    // prepare Log file
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyMMddHHmmss");
    Date date = new Date();
    String currentDate = dateFormat.format(date);
    logWriter = new PrintWriter(new BufferedWriter(new FileWriter("/tmp/" + currentDate + "-WPVS-stress.log")));
    logWriter.println("START_DATE      START_TIME      ELAPSED       SUCCESS       " + "IMAGEFILE       URL");

    try {
        for (Thread t : threadList)
            while (t.isAlive())
                t.join();
    } catch (InterruptedException e) {
        LOG.error(e.getMessage(), e);
    }

    logWriter.close();
}

From source file:edu.uci.ics.crawler4j.crawler.CrawlController.java

/**
 * Start.//from   ww w  .ja v  a  2 s  .  c o  m
 * 
 * @param <T>
 *            the generic type
 * @param _c
 *            the _c
 * @param numberOfCrawlers
 *            the number of crawlers
 */
public <T extends WebCrawler> void start(Class<T> _c, int numberOfCrawlers) {
    try {

        crawlersLocalData.clear();
        threads = new ArrayList<Thread>();
        List<T> crawlers = new ArrayList<T>();
        int numberofCrawlers = numberOfCrawlers;
        for (int i = 1; i <= numberofCrawlers; i++) {
            T crawler = _c.newInstance();
            Thread thread = new Thread(crawler, "Crawler " + i);
            logger.info("Thread state1 = " + thread.getState().toString());

            crawler.setThread(thread);
            crawler.setMyId(i);
            crawler.setMyController(this);
            thread.start();
            logger.info("Thread state2 = " + thread.getState().toString());
            crawlers.add(crawler);
            threads.add(thread);
            logger.info("Crawler " + i + " started.");
        }

        while (true) {
            sleep(DELAY);
            boolean someoneIsWorking = false;

            for (int i = 0; i < threads.size(); i++) {
                Thread thread = threads.get(i);
                if (!thread.isAlive()) {
                    recreateThread(_c, crawlers, i);
                } else if (thread.getState() == State.RUNNABLE) {
                    someoneIsWorking = true;
                    logger.info("Thread " + i + " was RUNNABLE.");
                } else if (thread.getState() == State.WAITING) {
                    logger.info("Thread " + i + " was WAITING.");
                    // thread.interrupt();
                    // thread.join();
                } else {
                    logger.info("Thread " + i + thread.getState().toString());
                    // recreateThread(_c, crawlers, i);
                }
            }

            if (!someoneIsWorking) {
                // Make sure again that none of the threads are alive.
                logger.info("It looks like no thread is working, waiting for 20 second to make sure...");
                sleep(DELAY);

                if (!isAnyThreadWorking()) {
                    long queueLength = Frontier.getQueueLength();
                    if (queueLength > 0) {
                        continue;
                    }
                    logger.info(
                            "No thread is working and no more URLs are in queue waiting for another 20 second to make sure...");
                    sleep(DELAY);
                    queueLength = Frontier.getQueueLength();
                    if (queueLength > 0) {
                        continue;
                    }
                    logger.info("All of the crawlers are stopped. Finishing the process...");
                    for (T crawler : crawlers) {
                        crawler.onBeforeExit();
                        crawlersLocalData.add(crawler.getMyLocalData());
                    }

                    // At this step, frontier notifies the threads that were waiting for new URLs and they should
                    // stop
                    // We will wait a few seconds for them and then return.
                    Frontier.finish();
                    logger.info("Waiting for 1 second before final clean up...");
                    sleep(DELAY);

                    try {
                        Frontier.close();
                        env.close();

                    } catch (Exception e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    /*
                     * for (int i = 0; i < threads.size(); i++) { Thread thread = threads.get(i);
                     * logger.info("Thread state = " + thread.getState().toString()); if (thread.isAlive()) {
                     * logger.info("Wait for live thread to die"); thread.join(); }
                     * 
                     * }
                     */
                    // PageFetcher.stopConnectionMonitorThread();
                    return;
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.eclipse.wb.tests.designer.rcp.model.rcp.PdeUtilsTest.java

/**
 * Test for {@link PdeUtils#createExtensionElement(String, String, Map)}.
 * <p>//from   ww  w  .  j a va  2  s  .c  om
 * There was problem with adding extension and waiting for it. Solution - run "ModelModification"
 * in UI thread. However this test in reality does not reproduce it.
 */
public void test_createExtensionElement_notInUI() throws Exception {
    createPluginXML(new String[] { "<plugin>", "</plugin>" });
    // do create
    IPluginElement element;
    {
        final String pointId = "org.eclipse.ui.views";
        Thread thread = new Thread() {
            @Override
            public void run() {
                try {
                    m_utils.createExtensionElement(pointId, "view",
                            ImmutableMap.of("id", "id_2", "name", "name 2", "class", "C_2"));
                } catch (Throwable e) {
                    DesignerPlugin.log(e);
                }
            }
        };
        thread.start();
        while (thread.isAlive()) {
            waitEventLoop(0);
        }
        //
        element = m_utils.waitExtensionElementById(pointId, "view", "id_2");
        assertNotNull(element);
    }
    assertEquals("id_2", PdeUtils.getAttribute(element, "id"));
    assertEquals("name 2", PdeUtils.getAttribute(element, "name"));
    assertEquals("C_2", PdeUtils.getAttribute(element, "class"));
    // plugin.xml updated
    m_getSource_ignoreSpaces = true;
    assertPluginXML(new String[] { "<plugin>", "   <extension", "         point='org.eclipse.ui.views'>",
            "      <view", "            class='C_2'", "            id='id_2'", "            name='name 2'>",
            "      </view>", "   </extension>", "</plugin>" });
}

From source file:com.github.hdl.tensorflow.yarn.app.Client.java

/**
 * Monitor the submitted application for completion.
 * @param appId Application Id of application to be monitored
 * @return true if application completed successfully
 * @throws YarnException/*  w ww .  j  ava  2s.  com*/
 * @throws IOException
 */
private boolean monitorApplication(ApplicationId appId) throws YarnException, IOException {

    while (true) {

        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            LOG.debug("Thread sleep in monitoring loop interrupted");
        }

        ApplicationReport report = yarnClient.getApplicationReport(appId);

        //      LOG.info("Got application report from ASM for"
        //          + ", appId=" + appId.getId()
        //          + ", clientToAMToken=" + report.getClientToAMToken()
        //          + ", appDiagnostics=" + report.getDiagnostics()
        //          + ", appMasterHost=" + report.getHost()
        //          + ", appQueue=" + report.getQueue()
        //          + ", appMasterRpcPort=" + report.getRpcPort()
        //          + ", appStartTime=" + report.getStartTime()
        //          + ", yarnAppState=" + report.getYarnApplicationState().toString()
        //          + ", tfAppFinalState=" + report.getFinalApplicationStatus().toString()
        //          + ", appTrackingUrl=" + report.getTrackingUrl()
        //          + ", appUser=" + report.getUser());

        YarnApplicationState state = report.getYarnApplicationState();
        FinalApplicationStatus tfStatus = report.getFinalApplicationStatus();

        if (YarnApplicationState.RUNNING == state) {
            if (appRpc == null) {
                String hostname = report.getHost();
                int port = report.getRpcPort();
                LOG.info("application master rpc host: " + hostname + "; port: " + port);
                appRpc = new TFApplicationRpcClient(hostname, port).getRpc();
            }

            if (appRpc != null && isEmptyString(clusterSpecJsonString)) {
                clusterSpecJsonString = appRpc.getClusterSpec();
                LOG.info("cluster spec is " + clusterSpecJsonString);
                if (!isEmptyString(clusterSpecJsonString)) {
                    TFClient tfClient = new TFClient(tfClientPy);
                    if (isEnableTensorBoard) {
                        Thread tensorBoardThread = new Thread() {
                            @Override
                            public void run() {
                                tfClient.startTensorBoardClient(tensorboardEventDir);
                            }
                        };
                        tensorBoardThread.start();
                        LOG.info("Launching tensorboard ...");
                        try {
                            Thread.sleep(3000);
                        } catch (InterruptedException e) {
                            LOG.debug("Thread sleep in monitoring loop interrupted");
                        }
                        if (tensorBoardThread.isAlive()) {
                            LOG.info("the tensorboard launched successfully on the localhost:6006");
                        } else {
                            LOG.info("the tensorboard launched failed");
                        }
                    }
                    tfClient.startTensorflowClient(clusterSpecJsonString);
                }
            }
        }

        if (YarnApplicationState.FINISHED == state) {
            if (FinalApplicationStatus.SUCCEEDED == tfStatus) {
                LOG.info("Application has completed successfully. Breaking monitoring loop");
                return true;
            } else {
                LOG.info("Application did finished unsuccessfully." + " YarnState=" + state.toString()
                        + ", tfAppFinalState=" + tfStatus.toString() + ". Breaking monitoring loop");
                return false;
            }
        } else if (YarnApplicationState.KILLED == state || YarnApplicationState.FAILED == state) {
            LOG.info("Application did not finish." + " YarnState=" + state.toString() + ", tfAppFinalState="
                    + tfStatus.toString() + ". Breaking monitoring loop");
            return false;
        }

    }

}

From source file:org.apache.flume.channel.recoverable.memory.TestRecoverableMemoryChannel.java

@Test
public void testRollbackWithSink() throws Exception {
    final NullSink nullSink = new NullSink();
    Context ctx = new Context();
    ctx.put("batchSize", "1");
    nullSink.configure(ctx);/*from  w  w w.j  a v a  2  s .com*/
    nullSink.setChannel(channel);
    final int numItems = 99;
    Thread t = new Thread() {
        @Override
        public void run() {
            int count = 0;
            while (count++ < numItems) {
                try {
                    nullSink.process();
                    Thread.sleep(1);
                } catch (EventDeliveryException e) {
                    break;
                } catch (Exception e) {
                    Throwables.propagate(e);
                }
            }
        }
    };
    t.setDaemon(true);
    t.setName("NullSink");
    t.start();

    putEvents(channel, "rollback", 10, 100);

    Transaction transaction;
    // put an item we will rollback
    transaction = channel.getTransaction();
    transaction.begin();
    channel.put(EventBuilder.withBody("this is going to be rolledback".getBytes(Charsets.UTF_8)));
    transaction.rollback();
    transaction.close();

    while (t.isAlive()) {
        Thread.sleep(1);
    }

    // simulate crash
    channel.stop();
    channel = createFileChannel();

    // get the item which was not rolled back
    transaction = channel.getTransaction();
    transaction.begin();
    Event event = channel.take();
    transaction.commit();
    transaction.close();
    Assert.assertNotNull(event);
    Assert.assertEquals("rollback-90-9", new String(event.getBody(), Charsets.UTF_8));
}

From source file:com.aurel.track.admin.customize.category.report.execute.ReportBeansToLaTeXConverter.java

/**
 *
 * @param workDir/*from  w  ww  .j  ava2  s  . com*/
 * @param latexFile
 */
protected int runPdflatex(File workDir, File latexFile, int nrOfRuns) {

    if (latexCmd == null) {
        return -99;
    }

    int exitValue = 0;

    try {

        String[] cmd = new String[] { latexCmd, "--halt-on-error", "-output-directory=" + workDir,
                latexFile.getAbsolutePath() };

        String texpath = new File((new File(latexCmd)).getParent()).getAbsolutePath();

        ProcessBuilder latexProcessBuilder = new ProcessBuilder(cmd);
        latexProcessBuilder.directory(workDir);
        Map<String, String> env = latexProcessBuilder.environment();
        String path = env.get("PATH");
        if (path != null) {
            path = texpath + ":" + path;
            env.put("PATH", path);
        }

        File stdoutlog = new File(workDir + File.separator + "stdout.log");
        latexProcessBuilder.redirectOutput(Redirect.appendTo(stdoutlog));

        File stderrlog = new File(workDir + File.separator + "stderr.log");
        latexProcessBuilder.redirectError(Redirect.appendTo(stderrlog));

        ProcessExecutor latexProcessExecutor = new ProcessExecutor(latexProcessBuilder);

        Thread executionThread = new Thread(latexProcessExecutor);

        long timeout = 20000;

        LOGGER.debug("Run xelatex thread started!");

        long startTime = System.currentTimeMillis();

        executionThread.start();

        int imod = 0;
        while (executionThread.isAlive()) {
            ++imod;
            if (imod % 5 == 0) {
                LOGGER.debug("Run xelatex thread is alive");
            }

            if (((System.currentTimeMillis() - startTime) > timeout) && executionThread.isAlive()) {
                executionThread.interrupt();

                LOGGER.debug("Run xelatex thread interrupted!");

                latexProcessExecutor.killProcess();
            }
            Thread.sleep(100);
        }

        LOGGER.debug("Run xelatex done!");

        exitValue = latexProcessExecutor.getExitValue();

        try {
            Thread.sleep(1000);
        } catch (Exception ex) {
            LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
        }
    } catch (Exception ex) {
        LOGGER.error(ExceptionUtils.getStackTrace(ex), ex);
    }

    return exitValue;
}

From source file:org.pentaho.big.data.kettle.plugins.pig.JobEntryPigScriptExecutor.java

public Result execute(final Result result, int arg1) throws KettleException {
    result.setNrErrors(0);// w  ww . j  av  a 2s  . c o  m
    if (Utils.isEmpty(m_scriptFile)) {
        throw new KettleException(
                BaseMessages.getString(PKG, JOB_ENTRY_PIG_SCRIPT_EXECUTOR_ERROR_NO_PIG_SCRIPT_SPECIFIED));
    }
    try {
        String scriptFileS = m_scriptFile;
        scriptFileS = environmentSubstitute(scriptFileS);

        final PigService pigService = namedClusterServiceLocator.getService(namedCluster, PigService.class);
        // Make sure we can execute locally if desired
        if (m_localExecution && !pigService.isLocalExecutionSupported()) {
            throw new KettleException(
                    BaseMessages.getString(PKG, JOB_ENTRY_PIG_SCRIPT_EXECUTOR_WARNING_LOCAL_EXECUTION));
        }
        // transform the map type to list type which can been accepted by ParameterSubstitutionPreprocessor
        final List<String> paramList = new ArrayList<String>();
        if (m_params != null) {
            for (Map.Entry<String, String> entry : m_params.entrySet()) {
                String name = entry.getKey();
                name = environmentSubstitute(name); // do environment variable substitution
                String value = entry.getValue();
                value = environmentSubstitute(value); // do environment variable substitution
                paramList.add(name + "=" + value);
            }
        }

        final PigService.ExecutionMode execMode = (m_localExecution ? PigService.ExecutionMode.LOCAL
                : PigService.ExecutionMode.MAPREDUCE);

        if (m_enableBlocking) {
            PigResult pigResult = pigService.executeScript(scriptFileS, execMode, paramList, getName(),
                    getLogChannel(), this, parentJob.getLogLevel());
            processScriptExecutionResult(pigResult, result);
        } else {
            final String finalScriptFileS = scriptFileS;
            final Thread runThread = new Thread() {
                public void run() {
                    PigResult pigResult = pigService.executeScript(finalScriptFileS, execMode, paramList,
                            getName(), getLogChannel(), JobEntryPigScriptExecutor.this,
                            parentJob.getLogLevel());
                    processScriptExecutionResult(pigResult, result);
                }
            };

            runThread.start();
            parentJob.addJobListener(new JobListener() {

                @Override
                public void jobStarted(Job job) throws KettleException {
                }

                @Override
                public void jobFinished(Job job) throws KettleException {
                    if (runThread.isAlive()) {
                        logMinimal(BaseMessages.getString(PKG,
                                "JobEntryPigScriptExecutor.Warning.AsynctaskStillRunning", getName(),
                                job.getJobname()));
                    }
                }
            });
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        result.setStopped(true);
        result.setNrErrors(1);
        result.setResult(false);
        logError(ex.getMessage(), ex);
    }

    return result;
}

From source file:com.arpnetworking.metrics.impl.ApacheHttpSinkTest.java

@Test
public void testSafeSleep() throws InterruptedException {
    final AtomicBoolean value = new AtomicBoolean(false);
    final Thread thread = new Thread(() -> {
        // Value will not be set if safe sleep throws or is not interrupted.
        ApacheHttpSink.safeSleep(500);/* w  ww  . j a v a 2s. c  om*/
        value.set(true);
    });

    thread.start();
    Thread.sleep(100);
    thread.interrupt();
    thread.join(600);

    Assert.assertFalse(thread.isAlive());
    Assert.assertTrue(value.get());
}