List of usage examples for java.lang Thread isAlive
public final native boolean isAlive();
From source file:mServer.search.MserverSearch.java
@SuppressWarnings("deprecation") public boolean filmeSuchen(MserverSearchTask aktSearchTask) { boolean ret = true; try {/*from w w w .j a va 2s .com*/ // =========================================== // den nchsten Suchlauf starten MserverLog.systemMeldung(""); MserverLog.systemMeldung("-----------------------------------"); MserverLog.systemMeldung("Filmsuche starten"); crawler = new Crawler(); // was und wie CrawlerConfig.senderLoadHow = aktSearchTask.loadHow(); CrawlerConfig.updateFilmliste = aktSearchTask.updateFilmliste(); CrawlerConfig.nurSenderLaden = arrLesen(aktSearchTask.arr[MserverSearchTask.SUCHEN_SENDER_NR].trim()); CrawlerConfig.orgFilmlisteErstellen = aktSearchTask.orgListeAnlegen(); CrawlerConfig.orgFilmliste = MserverDaten.system[MserverKonstanten.SYSTEM_FILMLISTE_ORG_NR]; // live-steams CrawlerConfig.importLive = MserverDaten.system[MserverKonstanten.SYSTEM_IMPORT_LIVE_NR]; // und noch evtl. ein paar Imports von Filmlisten anderer Server CrawlerConfig.importUrl_1__anhaengen = MserverDaten.system[MserverKonstanten.SYSTEM_IMPORT_URL_1_NR]; CrawlerConfig.importUrl_2__anhaengen = MserverDaten.system[MserverKonstanten.SYSTEM_IMPORT_URL_2_NR]; // fr die alte Filmliste CrawlerConfig.importOld = MserverDaten.system[MserverKonstanten.SYSTEM_IMPORT_OLD_NR]; CrawlerConfig.importAkt = MserverDatumZeit .getNameAkt(MserverDaten.system[MserverKonstanten.SYSTEM_IMPORT_AKT_NR]); // Rest Config.setUserAgent(MserverDaten.getUserAgent()); CrawlerConfig.proxyUrl = MserverDaten.system[MserverKonstanten.SYSTEM_PROXY_URL_NR]; CrawlerConfig.proxyPort = MserverDaten.getProxyPort(); Config.debug = MserverDaten.debug; Log.setLogfile(MserverDaten.getLogDatei(MserverKonstanten.LOG_FILE_NAME_MSEARCH)); Thread t = new Thread(crawler); t.setName("Crawler"); t.start(); MserverLog.systemMeldung("Filme suchen gestartet"); // =========================================== // warten auf das Ende //int warten = aktSearchTask.allesLaden() == true ? MvSKonstanten.WARTEZEIT_ALLES_LADEN : MvSKonstanten.WARTEZEIT_UPDATE_LADEN; int warten = aktSearchTask.getWaitTime()/*Minuten*/; MserverLog.systemMeldung("Max Laufzeit[Min]: " + warten); MserverLog.systemMeldung("-----------------------------------"); TimeUnit.MINUTES.timedJoin(t, warten); // =========================================== // erst mal schauen ob noch was luft if (t != null) { if (t.isAlive()) { MserverLog.fehlerMeldung(915147623, MserverSearch.class.getName(), "Der letzte Suchlauf luft noch"); if (crawler != null) { MserverLog.systemMeldung(""); MserverLog.systemMeldung(""); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("und wird jetzt gestoppt"); MserverLog.systemMeldung( "Zeit: " + FastDateFormat.getInstance("dd.MM.yyyy HH:mm:ss").format(new Date())); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung(""); //und jetzt STOPPEN!!!!!!!! crawler.stop(); } int w; if (loadLongMax()) w = 30; // 30 Minuten bei langen Lufen else w = 20;// 20 Minuten warten, das Erstellen/Komprimieren der Liste dauert TimeUnit.MINUTES.timedJoin(t, w); if (t.isAlive()) { MserverLog.systemMeldung(""); MserverLog.systemMeldung(""); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("und noch gekillt"); MserverLog.systemMeldung( "Zeit: " + FastDateFormat.getInstance("dd.MM.yyyy HH:mm:ss").format(new Date())); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung(""); ret = false; } //jetzt ist Schicht im Schacht t.stop(); } } } catch (Exception ex) { MserverLog.fehlerMeldung(636987308, MserverSearch.class.getName(), "filmeSuchen", ex); } int l = crawler.getListeFilme().size(); MserverLog.systemMeldung(""); MserverLog.systemMeldung(""); MserverLog.systemMeldung("================================"); MserverLog.systemMeldung("Filmliste Anzahl Filme: " + l); if (l < 10_000) { //dann hat was nicht gepasst MserverLog.systemMeldung(" Fehler!!"); MserverLog.systemMeldung("================================"); ret = false; } else { MserverLog.systemMeldung(" dann ist alles OK"); MserverLog.systemMeldung("================================"); } MserverLog.systemMeldung("filmeSuchen beendet"); crawler = null; return ret; }
From source file:org.lilyproject.hadooptestfw.HBaseProxy.java
public void stop() throws Exception { if (mode == Mode.EMBED) { // Since HBase mini cluster shutdown has a tendency of sometimes failing (hanging waiting on master // to end), add a protection for this so that we do not run indefinitely. Especially important not to // annoy the other projects on our Hudson server. Thread stopHBaseThread = new Thread() { @Override/* w w w .j a v a2s. c om*/ public void run() { try { hbaseTestUtil.shutdownMiniCluster(); hbaseTestUtil = null; } catch (Exception e) { System.out.println("Error shutting down mini cluster."); e.printStackTrace(); } } }; stopHBaseThread.start(); stopHBaseThread.join(60000); if (stopHBaseThread.isAlive()) { System.err.println("Unable to stop embedded mini cluster within predetermined timeout."); System.err.println("Dumping stack for future investigation."); ReflectionUtils.printThreadInfo(new PrintWriter(System.out), "Thread dump"); System.out.println( "Will now try to interrupt the mini-cluster-stop-thread and give it some more time to end."); stopHBaseThread.interrupt(); stopHBaseThread.join(20000); throw new Exception("Failed to stop the mini cluster within the predetermined timeout."); } } // Close connections with HBase and HBase's ZooKeeper handles //HConnectionManager.deleteConnectionInfo(CONF, true); HConnectionManager.deleteAllConnections(true); // Close all HDFS connections FileSystem.closeAll(); conf = null; if (clearData && testHome != null) { TestHomeUtil.cleanupTestHome(testHome); } ManagementFactory.getPlatformMBeanServer() .unregisterMBean(new ObjectName("LilyHBaseProxy:name=ReplicationPeer")); }
From source file:org.echocat.jomon.net.dns.DnsServer.java
@Override public void close() throws Exception { synchronized (this) { _closed = true;//from ww w . j a v a2s . co m synchronized (_closeables) { for (final Closeable closeable : _closeables) { closeQuietly(closeable); } } for (final Thread thread : _threads) { do { thread.interrupt(); try { thread.join(10); } catch (final InterruptedException ignored) { LOG.info("Got interrupted and could not wait for end of '" + thread + "'."); currentThread().interrupt(); } } while (!currentThread().isInterrupted() && thread.isAlive()); } } }
From source file:com.nuvolect.deepdive.probe.DecompileApk.java
/** * Return the status of a compile process. The status can be one of three states: * running: compile process is running/*from w w w .ja va 2s . c om*/ * stopped: comple process has stopped, folder exists * empty: compile process is not running, folder does not exist * * @param folderExists * @param aThread * @return * * The member variable {@link #m_active_threads} is bumped for each active thread */ private String getThreadStatus(boolean folderExists, Thread aThread) { if (aThread != null && aThread.isAlive()) { ++m_active_threads; return "running"; } if (folderExists) return "stopped"; return "empty"; }
From source file:com.spotify.helios.system.SystemTestBase.java
private void listThreads() { final Set<Thread> threads = Thread.getAllStackTraces().keySet(); final Map<String, Thread> sorted = Maps.newTreeMap(); for (final Thread t : threads) { final ThreadGroup tg = t.getThreadGroup(); if (t.isAlive() && (tg == null || !tg.getName().equals("system"))) { sorted.put(t.getName(), t);/*from w ww.ja va2s. c o m*/ } } log.info("= THREADS " + Strings.repeat("=", 70)); for (final Thread t : sorted.values()) { final ThreadGroup tg = t.getThreadGroup(); log.info("{}: \"{}\" ({}{})", t.getId(), t.getName(), (tg == null ? "" : tg.getName() + " "), (t.isDaemon() ? "daemon" : "")); } log.info(Strings.repeat("=", 80)); }
From source file:org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutorTest.java
@Test public void shouldInterruptWhile() throws Exception { final Map<String, List<Object>> compilerCustomizerConfig = new HashMap<>(); compilerCustomizerConfig.put(ThreadInterruptCustomizerProvider.class.getName(), new ArrayList<>()); final Map<String, Object> config = new HashMap<>(); config.put("compilerCustomizerProviders", compilerCustomizerConfig); final GremlinExecutor gremlinExecutor = GremlinExecutor.build() .addEngineSettings("gremlin-groovy", Collections.emptyList(), Collections.emptyList(), Arrays.asList(PATHS.get("GremlinExecutorInit.groovy")), config) .create();/*w ww . j a v a 2s . c o m*/ final AtomicBoolean asserted = new AtomicBoolean(false); final Thread t = new Thread(() -> { try { gremlinExecutor .eval("s = System.currentTimeMillis();\nwhile((System.currentTimeMillis() - s) < 10000) {}") .get(); } catch (Exception se) { asserted.set(se instanceof InterruptedException); } }); t.start(); Thread.sleep(100); t.interrupt(); while (t.isAlive()) { } assertTrue(asserted.get()); }
From source file:eionet.cr.web.action.factsheet.FolderActionBean.java
private void saveAndHarvest() throws IOException, DAOException { // start the thread that saves the file's content and attempts to harvest it Thread thread = new Thread(this); thread.start();/* ww w . j a v a2 s . co m*/ // check the thread after every second, exit loop if it hasn't finished in 15 seconds for (int loopCount = 0; thread.isAlive() && loopCount < 15; loopCount++) { try { Thread.sleep(1000); } catch (InterruptedException e) { throw new CRRuntimeException(e.toString(), e); } } // if the the thread reported an exception, throw it if (saveAndHarvestException != null) { if (saveAndHarvestException instanceof DAOException) { throw (DAOException) saveAndHarvestException; } else if (saveAndHarvestException instanceof IOException) { throw (IOException) saveAndHarvestException; } else if (saveAndHarvestException instanceof RuntimeException) { throw (RuntimeException) saveAndHarvestException; } else { throw new CRRuntimeException(saveAndHarvestException.getMessage(), saveAndHarvestException); } } // add feedback message to the bean's context if (!thread.isAlive()) { addSystemMessage("File saved and harvested!"); } else { if (!contentSaved) { addSystemMessage("Saving and harvesting the file continues in the background!"); } else { addSystemMessage("File content saved, but harvest continues in the background!"); } } }
From source file:org.jumpmind.symmetric.AbstractSymmetricEngine.java
public synchronized void stop() { log.info("Stopping SymmetricDS externalId={} version={} database={}", new Object[] { parameterService == null ? "?" : parameterService.getExternalId(), Version.version(), symmetricDialect == null ? "?" : symmetricDialect.getName() }); if (jobManager != null) { jobManager.stopJobs();/*w ww. j av a2 s . c o m*/ } if (routerService != null) { routerService.stop(); } if (nodeCommunicationService != null) { nodeCommunicationService.stop(); } if (pushService != null) { pushService.stop(); } if (dataLoaderService != null) { dataLoaderService.stop(); } if (statisticManager != null) { List<ProcessInfo> infos = statisticManager.getProcessInfos(); for (ProcessInfo processInfo : infos) { Thread thread = processInfo.getThread(); if (processInfo.getStatus() != Status.OK && thread.isAlive()) { log.info("Trying to interrupt thread '{}' ", thread.getName()); try { thread.interrupt(); } catch (Exception e) { log.info("Caught exception while attempting to interrupt thread", e); } } } Thread.interrupted(); } started = false; starting = false; }
From source file:org.apache.hadoop.mapred.gridmix.TestGridMixClasses.java
@Test(timeout = 120000) public void testSerialReaderThread() throws Exception { Configuration conf = new Configuration(); File fin = new File("src" + File.separator + "test" + File.separator + "resources" + File.separator + "data" + File.separator + "wordcount2.json"); // read couple jobs from wordcount2.json JobStoryProducer jobProducer = new ZombieJobProducer(new Path(fin.getAbsolutePath()), null, conf); CountDownLatch startFlag = new CountDownLatch(1); UserResolver resolver = new SubmitterUserResolver(); FakeJobSubmitter submitter = new FakeJobSubmitter(); File ws = new File("target" + File.separator + this.getClass().getName()); if (!ws.exists()) { Assert.assertTrue(ws.mkdirs());/* ww w. j a va2 s. c om*/ } SerialJobFactory jobFactory = new SerialJobFactory(submitter, jobProducer, new Path(ws.getAbsolutePath()), conf, startFlag, resolver); Path ioPath = new Path(ws.getAbsolutePath()); jobFactory.setDistCacheEmulator(new DistributedCacheEmulator(conf, ioPath)); Thread test = jobFactory.createReaderThread(); test.start(); Thread.sleep(1000); // SerialReaderThread waits startFlag assertEquals(0, submitter.getJobs().size()); // start! startFlag.countDown(); while (test.isAlive()) { Thread.sleep(1000); jobFactory.update(null); } // submitter was called twice assertEquals(2, submitter.getJobs().size()); }
From source file:org.lsc.AbstractSynchronize.java
public final synchronized void shutdownAsynchronousSynchronize2Ldap(final String syncName, boolean forceStop) { Thread asyncThread = asynchronousThreads.get(syncName); long startTime = System.currentTimeMillis(); if (asyncThread == null) { LOGGER.info("Trying to stop a non running asynchronous task: " + syncName); return;//from w w w . j av a 2 s .co m } while (asyncThread.isAlive()) { try { asyncThread.join(1000); if ((System.currentTimeMillis() - startTime) > 5000) { if (forceStop) { // After 5 secondes, leaving asyncThread.interrupt(); asyncThread.join(1000); } else { break; } } } catch (InterruptedException ie) { // Thread has been interrupted, doing nothing } } if (!asyncThread.isAlive()) { asynchronousThreads.remove(syncName); mapSTasks.remove(syncName); } }