List of usage examples for java.lang Thread interrupt
public void interrupt()
From source file:bear.fx.DownloadFxApp.java
public void createScene(Stage stage) { try {//from ww w. j a v a 2s.c om stage.setTitle("Downloading JDK " + version + "..."); instance.set(this); appStartedLatch.countDown(); final SimpleBrowser browser = SimpleBrowser.newBuilder().useFirebug(false).useJQuery(true) .createWebView(!miniMode).build(); final ProgressBar progressBar = new ProgressBar(0); final Label progressLabel = new Label("Retrieving a link..."); VBox vBox = VBoxBuilder.create().children(progressLabel, progressBar, browser).fillWidth(true).build(); Scene scene = new Scene(vBox); stage.setScene(scene); if (miniMode) { stage.setWidth(300); } else { stage.setWidth(1024); stage.setHeight(768); } stage.show(); VBox.setVgrow(browser, Priority.ALWAYS); /** * location changed to: http://www.oracle.com/technetwork/java/javase/downloads/java-archive-downloads-javase7-521261.html? location changed to: http://download.oracle.com/otn/java/jdk/7u45-b18/jdk-7u45-linux-x64.tar.gz location changed to: https://edelivery.oracle.com/akam/otn/java/jdk/7u45-b18/jdk-7u45-linux-x64.tar.gz location changed to: https://login.oracle.com/pls/orasso/orasso.wwsso_app_admin.ls_login?Site2pstoreToken=v1.2~CA55CD32~750C6EFBC9B3CB198B2ADFE87BDD4DEB60E0218858C8BFE85DCCC65865D0E810E845839B422974847E1D489D3AF25FDC9574400197F9190C389876C1EC683A6006A06F7F05D41C94455B8354559F5699F5D0EF102F26FE905E77D40487455F7829501E3A783E1354EB0B8F05B828D0FC3BA22C62D3576883850E0B99849309B0C26F286E5650F63E9C6A7C376165C9A3EED86BF2FA0FAEE3D1F7F2957F5FBD5035AF0A3522E534141FE38DFDD55C4F7F517F9E81336C993BB76512C0D30A5B5C5FD82ED1C10E9D27284B6B1633E4B7B9FA5C2E38D9C5E3845C18C009E294E881FD8B654B67050958E57F0DC20885D6FA87A59FAA7564F94F location changed to: https://login.oracle.com/mysso/signon.jsp location changed to: https://login.oracle.com/oam/server/sso/auth_cred_submit location changed to: https://edelivery.oracle.com/osso_login_success?urlc=v1.2%7E30E69346FE17F27D4F83121B0B8EC362E0B315901364AAA7D6F0B7A05CD8AA31802F5A69D70C708F34C64B65D233922B57D3C31839E82CE78E5C8DA55D729DD339893285D21A8E8B1AE8557C9240D6E33C9965956E136F4CB093779F97AF67C3DB8FF19FF2A638296BD0AA81A7801904AC5607F0568B6CEAF7ED9FCE4B7BEA80071617E4B2779F60F0C76A89F7D195965D2F003F9EDD2A1ADFD264C1C4C7F921010B08D3846CEC9524237A9337B6B0BC433BB17993A670B6C913EB4CFDC217A753F9E2943DE0CBDC41D4705AC67C2B96A4892C65F5450B146939B0EBFDF098680BBBE1F13356460C95A23D8D198D1C6762E45E62F120E32C2549E6263071DA84F8321370D2410CCA93E9A071A02ED6EB40BF40EDFC6F65AC7BA73CDB06DF4265455419D9185A6256FFE41A7FF54042374D09F5C720F3104B2EAC924778482D4BE855A45B2636CE91C7D947FF1F764674CE0E42FFCCFE411AABFE07EA0E96838AFEA263D2D5A405BD location changed to: https://edelivery.oracle.com/akam/otn/java/jdk/7u45-b18/jdk-7u45-linux-x64.tar.gz location changed to: http://download.oracle.com/otn/java/jdk/7u45-b18/jdk-7u45-linux-x64.tar.gz?AuthParam=1390405890_f9186a44471784229268632878dd89e4 */ browser.getEngine().locationProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observableValue, String oldLoc, final String uri) { logger.info("change: {}", uri); if (uri.contains("signon.jsp")) { browser.getEngine().executeScript( "" + "alert(document);\n" + "alert(document.getElementById('sso_username'));\n"); new Thread("signon.jsp waiter") { @Override public void run() { setStatus("waiting for the login form..."); try { Thread.sleep(1000); } catch (InterruptedException e) { throw Exceptions.runtime(e); } browser.waitFor("$('#sso_username').length > 0", 10000); System.out.println("I see it all, I see it now!"); Platform.runLater(new Runnable() { @Override public void run() { browser.getEngine().executeScript("" + "alert(document.getElementById('sso_username'));\n" + "alert($('#sso_username').val('" + oracleUser + "'));\n" + "alert($('#ssopassword').val('" + oraclePassword + "'));\n" + downloadJDKJs() + "\n" + "clickIt($('.sf-btnarea a'))"); } }); } }.start(); } if (uri.contains("download.oracle") && uri.contains("?")) { //will be here after // clicking accept license and link -> * not logged in * -> here -> download -> redirect to login // download -> fill form -> * logged in * -> here -> download Thread thread = new Thread(new Runnable() { @Override public void run() { try { DefaultHttpClient httpClient = new DefaultHttpClient(); HttpGet httppost = new HttpGet(uri); HttpResponse response = httpClient.execute(httppost); int code = response.getStatusLine().getStatusCode(); if (code != 200) { System.out.println(IOUtils.toString(response.getEntity().getContent())); throw new RuntimeException("failed to download: " + uri); } final File file = new File(tempDestDir, StringUtils.substringBefore(FilenameUtils.getName(uri), "?")); HttpEntity entity = response.getEntity(); final long length = entity.getContentLength(); final CountingOutputStream os = new CountingOutputStream( new FileOutputStream(file)); System.out.printf("Downloading %s to %s...%n", uri, file); Thread progressThread = new Thread(new Runnable() { double lastProgress; @Override public void run() { while (!Thread.currentThread().isInterrupted()) { long copied = os.getCount(); double progress = copied * 100D / length; if (progress != lastProgress) { final String s = String.format("%s: %s/%s %s%%", file.getName(), FileUtils.humanReadableByteCount(copied, false, false), FileUtils.humanReadableByteCount(length, false, true), LangUtils.toConciseString(progress, 1)); setStatus(s); System.out.print("\r" + s); } lastProgress = progress; progressBar.setProgress(copied * 1D / length); // progressProp.set(progress); try { Thread.sleep(500); } catch (InterruptedException e) { break; } } } }, "progressThread"); progressThread.start(); ByteStreams.copy(entity.getContent(), os); progressThread.interrupt(); System.out.println("Download complete."); downloadResult.set(new DownloadResult(file, "", true)); downloadLatch.countDown(); } catch (Exception e) { LoggerFactory.getLogger("log").warn("", e); downloadResult.set(new DownloadResult(null, e.getMessage(), false)); throw Exceptions.runtime(e); } } }, "fx-downloader"); thread.start(); } } public void setStatus(final String s) { Platform.runLater(new Runnable() { @Override public void run() { progressLabel.setText(s); } }); } }); // links from http://www.oracle.com/technetwork/java/archive-139210.html Map<Integer, String> archiveLinksMap = new HashMap<Integer, String>(); archiveLinksMap.put(5, "http://www.oracle.com/technetwork/java/javasebusiness/downloads/java-archive-downloads-javase5-419410.html"); archiveLinksMap.put(6, "http://www.oracle.com/technetwork/java/javase/downloads/java-archive-downloads-javase6-419409.html"); archiveLinksMap.put(7, "http://www.oracle.com/technetwork/java/javase/downloads/java-archive-downloads-javase7-521261.html"); Map<Integer, String> latestLinksMap = new HashMap<Integer, String>(); latestLinksMap.put(7, "http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html"); String archiveUrl = null; String latestUrl = null; char ch = version.charAt(0); switch (ch) { case '7': case '6': case '5': latestUrl = latestLinksMap.get(ch - '0'); archiveUrl = archiveLinksMap.get(ch - '0'); break; default: archiveUrl = null; } if (latestUrl != null) { final String finalArchiveUrl = archiveUrl; tryFind(browser, latestUrl, new WhenDone() { @Override public void whenDone(boolean found) { tryArchiveLink(found, finalArchiveUrl, browser); } }); } else { tryArchiveLink(false, archiveUrl, browser); } } catch (Exception e) { e.printStackTrace(); } }
From source file:org.cloudata.core.master.CloudataMaster.java
public void shutdown() { if (!conf.getBoolean("testmode", false)) { System.exit(0);//from ww w . jav a 2 s. c o m } if (!stopRequested) { stopRequested = true; server.stop(); if (infoServer != null) { try { infoServer.stop(); } catch (InterruptedException e1) { } } // ThreadGroup threadGroup = Thread.currentThread().getThreadGroup(); Thread[] threads = new Thread[threadGroup.activeCount()]; threadGroup.enumerate(threads, true); for (Thread thread : threads) { try { // LOG.debug("Master shutdown:child thread:" + thread.getId()); thread.interrupt(); } catch (Exception e) { } } } LOG.debug("shutdown masterserver:" + hostName); }
From source file:org.apache.bookkeeper.util.Shell.java
/** Run a command */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null;/*from w w w. j a v a2s. c o m*/ ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } if (Shell.WINDOWS) { synchronized (WindowsProcessLaunchLock) { // To workaround the race condition issue with child processes // inheriting unintended handles during process launch that can // lead to hangs on reading output and error streams, we // serialize process creation. More info available at: // http://support.microsoft.com/kb/315939 process = builder.start(); } } else { process = builder.start(); } if (timeOutInterval > 0) { timeOutTimer = new Timer("Shell command timeout"); timeoutTimerTask = new ShellTimeoutTimerTask(this); //One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader( new InputStreamReader(process.getErrorStream(), Charsets.UTF_8)); BufferedReader inReader = new BufferedReader( new InputStreamReader(process.getInputStream(), Charsets.UTF_8)); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); try { // make sure that the error thread exits errThread.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while reading the error stream", ie); } completed.set(true); //the timeout thread handling //taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if (timeOutTimer != null) { timeOutTimer.cancel(); } // close the input stream try { inReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); } try { errReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = MathUtils.now(); } }
From source file:org.jvnet.hudson.test.JenkinsRule.java
protected void setUpTimeout() { if (timeout <= 0) { System.out.println("Test timeout disabled."); return;/* w w w .j a v a 2 s.co m*/ } final Thread testThread = Thread.currentThread(); timeoutTimer = new Timer(); timeoutTimer.schedule(new TimerTask() { @Override public void run() { if (timeoutTimer != null) { LOGGER.warning(String.format("Test timed out (after %d seconds).", timeout)); dumpThreads(); testThread.interrupt(); } } }, TimeUnit.SECONDS.toMillis(timeout)); }
From source file:org.springframework.yarn.test.Shell.java
/** Run a command */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null;// ww w .j a va 2s . co m ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } if (Shell.WINDOWS) { synchronized (WindowsProcessLaunchLock) { // To workaround the race condition issue with child processes // inheriting unintended handles during process launch that can // lead to hangs on reading output and error streams, we // serialize process creation. More info available at: // http://support.microsoft.com/kb/315939 process = builder.start(); } } else { process = builder.start(); } if (timeOutInterval > 0) { timeOutTimer = new Timer("Shell command timeout"); timeoutTimerTask = new ShellTimeoutTimerTask(this); // One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); try { // make sure that the error thread exits errThread.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while reading the error stream", ie); } completed.set(true); // the timeout thread handling // taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if (timeOutTimer != null) { timeOutTimer.cancel(); } // close the input stream try { inReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); } try { errReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = Time.now(); } }
From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDaoIntegrationTest.java
@Test public void shouldLoadAllActivePipelinesEvenWhenThereIsStageStatusChange() throws Exception { PipelineConfig twistConfig = PipelineMother.twoBuildPlansWithResourcesAndMaterials("twist", "dev", "ft"); goConfigDao.addPipeline(twistConfig, "pipelinesqlmapdaotest"); Pipeline twistPipeline = dbHelper.newPipelineWithAllStagesPassed(twistConfig); PipelineConfig mingleConfig = PipelineMother.twoBuildPlansWithResourcesAndMaterials("mingle", "dev", "ft"); goConfigDao.addPipeline(mingleConfig, "pipelinesqlmapdaotest"); final Pipeline firstPipeline = dbHelper.newPipelineWithAllStagesPassed(mingleConfig); final Pipeline secondPipeline = dbHelper.newPipelineWithFirstStagePassed(mingleConfig); dbHelper.scheduleStage(secondPipeline, mingleConfig.get(1)); Pipeline thirdPipeline = dbHelper.newPipelineWithFirstStageScheduled(mingleConfig); Thread stageStatusChanger = new Thread() { @Override//from w ww .java 2 s .co m public void run() { for (;;) { pipelineDao.stageStatusChanged(secondPipeline.findStage("dev")); if (super.isInterrupted()) { break; } } } }; stageStatusChanger.setDaemon(true); stageStatusChanger.start(); PipelineInstanceModels pipelineHistories = pipelineDao.loadActivePipelines(); assertThat(pipelineHistories.size(), is(3)); assertThat(pipelineHistories.get(0).getId(), is(thirdPipeline.getId())); assertThat(pipelineHistories.get(1).getId(), is(secondPipeline.getId())); assertThat(pipelineHistories.get(2).getId(), is(twistPipeline.getId())); assertThat(pipelineHistories.get(0).getBuildCause().getMaterialRevisions().isEmpty(), is(false)); stageStatusChanger.interrupt(); }
From source file:org.pentaho.di.job.entries.hadoopjobexecutor.JobEntryHadoopJobExecutor.java
public Result execute(final Result result, int arg1) throws KettleException { result.setNrErrors(0);/*from w w w . j a v a 2 s .co m*/ Log4jFileAppender appender = null; String logFileName = "pdi-" + this.getName(); //$NON-NLS-1$ try { appender = LogWriter.createFileAppender(logFileName, true, false); LogWriter.getInstance().addAppender(appender); log.setLogLevel(parentJob.getLogLevel()); } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.FailedToOpenLogFile", logFileName, //$NON-NLS-1$ e.toString())); logError(Const.getStackTracker(e)); } try { URL resolvedJarUrl = resolveJarUrl(jarUrl); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.ResolvedJar", resolvedJarUrl.toExternalForm())); } HadoopShim shim = getHadoopConfiguration().getHadoopShim(); if (isSimple) { String simpleLoggingIntervalS = environmentSubstitute(getSimpleLoggingInterval()); int simpleLogInt = 60; try { simpleLogInt = Integer.parseInt(simpleLoggingIntervalS, 10); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", simpleLoggingIntervalS, simpleLogInt)); } final Class<?> mainClass = locateDriverClass(resolvedJarUrl, shim); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.UsingDriverClass", mainClass == null ? "null" : mainClass.getName())); logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.SimpleMode")); } final AtomicInteger threads = new AtomicInteger(1); final NoExitSecurityManager nesm = new NoExitSecurityManager(System.getSecurityManager()); smStack.setSecurityManager(nesm); try { Runnable r = new Runnable() { public void run() { try { try { executeMainMethod(mainClass); } finally { restoreSecurityManager(threads, nesm); } } catch (NoExitSecurityManager.NoExitSecurityException ex) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, ex); } } catch (InvocationTargetException ex) { if (ex.getTargetException() instanceof NoExitSecurityManager.NoExitSecurityException) { // Only log if we're blocking and waiting for this to complete if (simpleBlocking) { logExitStatus(result, mainClass, (NoExitSecurityManager.NoExitSecurityException) ex .getTargetException()); } } else { throw new RuntimeException(ex); } } catch (Exception ex) { throw new RuntimeException(ex); } } }; Thread t = new Thread(r); t.setDaemon(true); t.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { restoreSecurityManager(threads, nesm); if (simpleBlocking) { // Only log if we're blocking and waiting for this to complete logError(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.ErrorExecutingClass", mainClass.getName()), e); result.setResult(false); } } }); nesm.addBlockedThread(t); t.start(); if (simpleBlocking) { // wait until the thread is done do { logDetailed(BaseMessages.getString(JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass.getName())); t.join(simpleLogInt * 1000); } while (!parentJob.isStopped() && t.isAlive()); if (t.isAlive()) { // Kill thread if it's still running. The job must have been stopped. t.interrupt(); } } } finally { // If we're not performing simple blocking spawn a watchdog thread to restore the security manager when all // threads are complete if (!simpleBlocking) { Runnable threadWatchdog = new Runnable() { @Override public void run() { while (threads.get() > 0) { try { Thread.sleep(100); } catch (InterruptedException e) { /* ignore */ } } restoreSecurityManager(threads, nesm); } }; Thread watchdog = new Thread(threadWatchdog); watchdog.setDaemon(true); watchdog.start(); } } } else { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryHadoopJobExecutor.AdvancedMode")); } Configuration conf = shim.createConfiguration(); FileSystem fs = shim.getFileSystem(conf); URL[] urls = new URL[] { resolvedJarUrl }; URLClassLoader loader = new URLClassLoader(urls, shim.getClass().getClassLoader()); String hadoopJobNameS = environmentSubstitute(hadoopJobName); conf.setJobName(hadoopJobNameS); String outputKeyClassS = environmentSubstitute(outputKeyClass); conf.setOutputKeyClass(loader.loadClass(outputKeyClassS)); String outputValueClassS = environmentSubstitute(outputValueClass); conf.setOutputValueClass(loader.loadClass(outputValueClassS)); if (mapperClass != null) { String mapperClassS = environmentSubstitute(mapperClass); Class<?> mapper = loader.loadClass(mapperClassS); conf.setMapperClass(mapper); } if (combinerClass != null) { String combinerClassS = environmentSubstitute(combinerClass); Class<?> combiner = loader.loadClass(combinerClassS); conf.setCombinerClass(combiner); } if (reducerClass != null) { String reducerClassS = environmentSubstitute(reducerClass); Class<?> reducer = loader.loadClass(reducerClassS); conf.setReducerClass(reducer); } if (inputFormatClass != null) { String inputFormatClassS = environmentSubstitute(inputFormatClass); Class<?> inputFormat = loader.loadClass(inputFormatClassS); conf.setInputFormat(inputFormat); } if (outputFormatClass != null) { String outputFormatClassS = environmentSubstitute(outputFormatClass); Class<?> outputFormat = loader.loadClass(outputFormatClassS); conf.setOutputFormat(outputFormat); } String hdfsHostnameS = environmentSubstitute(hdfsHostname); String hdfsPortS = environmentSubstitute(hdfsPort); String jobTrackerHostnameS = environmentSubstitute(jobTrackerHostname); String jobTrackerPortS = environmentSubstitute(jobTrackerPort); List<String> configMessages = new ArrayList<String>(); shim.configureConnectionInformation(hdfsHostnameS, hdfsPortS, jobTrackerHostnameS, jobTrackerPortS, conf, configMessages); for (String m : configMessages) { logBasic(m); } String inputPathS = environmentSubstitute(inputPath); String[] inputPathParts = inputPathS.split(","); List<Path> paths = new ArrayList<Path>(); for (String path : inputPathParts) { paths.add(fs.asPath(conf.getDefaultFileSystemURL(), path)); } Path[] finalPaths = paths.toArray(new Path[paths.size()]); conf.setInputPaths(finalPaths); String outputPathS = environmentSubstitute(outputPath); conf.setOutputPath(fs.asPath(conf.getDefaultFileSystemURL(), outputPathS)); // process user defined values for (UserDefinedItem item : userDefined) { if (item.getName() != null && !"".equals(item.getName()) && item.getValue() != null && !"".equals(item.getValue())) { String nameS = environmentSubstitute(item.getName()); String valueS = environmentSubstitute(item.getValue()); conf.set(nameS, valueS); } } conf.setJar(environmentSubstitute(jarUrl)); String numMapTasksS = environmentSubstitute(numMapTasks); String numReduceTasksS = environmentSubstitute(numReduceTasks); int numM = 1; try { numM = Integer.parseInt(numMapTasksS); } catch (NumberFormatException e) { logError("Can't parse number of map tasks '" + numMapTasksS + "'. Setting num" + "map tasks to 1"); } int numR = 1; try { numR = Integer.parseInt(numReduceTasksS); } catch (NumberFormatException e) { logError("Can't parse number of reduce tasks '" + numReduceTasksS + "'. Setting num" + "reduce tasks to 1"); } conf.setNumMapTasks(numM); conf.setNumReduceTasks(numR); RunningJob runningJob = shim.submitJob(conf); String loggingIntervalS = environmentSubstitute(getLoggingInterval()); int logIntv = 60; try { logIntv = Integer.parseInt(loggingIntervalS); } catch (NumberFormatException e) { logError(BaseMessages.getString(PKG, "ErrorParsingLogInterval", loggingIntervalS, logIntv)); } if (blocking) { try { int taskCompletionEventIndex = 0; while (!parentJob.isStopped() && !runningJob.isComplete()) { if (logIntv >= 1) { printJobStatus(runningJob); taskCompletionEventIndex = logTaskMessages(runningJob, taskCompletionEventIndex); Thread.sleep(logIntv * 1000); } else { Thread.sleep(60000); } } if (parentJob.isStopped() && !runningJob.isComplete()) { // We must stop the job running on Hadoop runningJob.killJob(); // Indicate this job entry did not complete result.setResult(false); } printJobStatus(runningJob); // Log any messages we may have missed while polling logTaskMessages(runningJob, taskCompletionEventIndex); } catch (InterruptedException ie) { logError(ie.getMessage(), ie); } // Entry is successful if the MR job is successful overall result.setResult(runningJob.isSuccessful()); } } } catch (Throwable t) { t.printStackTrace(); result.setStopped(true); result.setNrErrors(1); result.setResult(false); logError(t.getMessage(), t); } if (appender != null) { LogWriter.getInstance().removeAppender(appender); appender.close(); ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } return result; }
From source file:gov.noaa.pfel.erddap.util.EDStatic.java
/** This interrupts/kill all of the thredds in runningThreads. * Erddap.destroy calls this when tomcat is stopped. *//*from ww w. j a v a2 s.c o m*/ public static void destroy() { long time = System.currentTimeMillis(); try { String names[] = String2.toStringArray(runningThreads.keySet().toArray()); String2.log("\nEDStatic.destroy will try to interrupt nThreads=" + names.length + "\n threadNames=" + String2.toCSSVString(names)); //shutdown Cassandra clusters/sessions EDDTableFromCassandra.shutdown(); //interrupt all of them for (int i = 0; i < names.length; i++) { try { Thread thread = (Thread) runningThreads.get(names[i]); if (thread != null && thread.isAlive()) thread.interrupt(); else runningThreads.remove(names[i]); } catch (Throwable t) { String2.log(MustBe.throwableToString(t)); } } //wait for threads to finish int waitedSeconds = 0; int maxSeconds = 600; //10 minutes while (true) { boolean allDone = true; for (int i = 0; i < names.length; i++) { try { if (names[i] == null) continue; //it has already stopped Thread thread = (Thread) runningThreads.get(names[i]); if (thread != null && thread.isAlive()) { allDone = false; if (waitedSeconds > maxSeconds) { String2.log(" " + names[i] + " thread is being stop()ped!!!"); thread.stop(); runningThreads.remove(names[i]); names[i] = null; } } else { String2.log(" " + names[i] + " thread recognized the interrupt in " + waitedSeconds + " s"); runningThreads.remove(names[i]); names[i] = null; } } catch (Throwable t) { String2.log(MustBe.throwableToString(t)); allDone = false; } } if (allDone) { String2.log("EDStatic.destroy successfully interrupted all threads in " + waitedSeconds + " s"); break; } if (waitedSeconds > maxSeconds) { String2.log("!!! EDStatic.destroy is done, but it had to stop() some threads."); break; } Math2.sleep(2000); waitedSeconds += 2; } //finally if (useLuceneSearchEngine) String2.log("stopping lucene..."); try { if (luceneIndexSearcher != null) luceneIndexSearcher.close(); } catch (Throwable t) { } luceneIndexSearcher = null; try { if (luceneIndexReader != null) luceneIndexReader.close(); } catch (Throwable t) { } luceneIndexReader = null; luceneDatasetIDFieldCache = null; try { if (luceneIndexWriter != null) //indices will be thrown away, so don't make pending changes luceneIndexWriter.close(false); } catch (Throwable t) { } luceneIndexWriter = null; } catch (Throwable t) { String2.log(MustBe.throwableToString(t)); } }
From source file:com.streamsets.pipeline.stage.origin.hdfs.cluster.ClusterHDFSSourceIT.java
@Test(timeout = 30000) public void testProduceDelimitedNoHeader() throws Exception { ClusterHdfsConfigBean conf = new ClusterHdfsConfigBean(); conf.hdfsUri = miniDFS.getURI().toString(); conf.hdfsDirLocations = Arrays.asList(dir.toUri().getPath()); conf.hdfsConfigs = new HashMap<>(); conf.hdfsKerberos = false;// w ww . j ava 2 s. c o m conf.hdfsConfDir = hadoopConfDir; conf.recursive = false; conf.produceSingleRecordPerMessage = false; conf.dataFormat = DataFormat.DELIMITED; conf.dataFormatConfig.csvFileFormat = CsvMode.CSV; conf.dataFormatConfig.csvHeader = CsvHeader.NO_HEADER; conf.dataFormatConfig.csvMaxObjectLen = 4096; conf.dataFormatConfig.csvRecordType = CsvRecordType.LIST; conf.dataFormatConfig.csvSkipStartLines = 0; SourceRunner sourceRunner = new SourceRunner.Builder(ClusterHdfsDSource.class, createSource(conf)) .addOutputLane("lane").setExecutionMode(ExecutionMode.CLUSTER_BATCH).setResourcesDir(resourcesDir) .build(); sourceRunner.runInit(); List<Map.Entry> list = new ArrayList<>(); list.add(new Pair("1", new String("A,B\na,b"))); list.add(new Pair("2", new String("C,D\nc,d"))); Thread th = createThreadForAddingBatch(sourceRunner, list); try { StageRunner.Output output = sourceRunner.runProduce(null, 5); String newOffset = output.getNewOffset(); Assert.assertEquals("2", newOffset); List<Record> records = output.getRecords().get("lane"); Assert.assertEquals(4, records.size()); Record record = records.get(0); Assert.assertEquals("A", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[0]/header")); Assert.assertEquals("B", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[1]/header")); record = records.get(1); Assert.assertEquals("a", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[0]/header")); Assert.assertEquals("b", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[1]/header")); record = records.get(2); Assert.assertEquals("C", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[0]/header")); Assert.assertEquals("D", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[1]/header")); record = records.get(3); Assert.assertEquals("c", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[0]/header")); Assert.assertEquals("d", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString()); Assert.assertFalse(record.has("[1]/header")); if (sourceRunner != null) { sourceRunner.runDestroy(); } } finally { th.interrupt(); } }
From source file:org.commoncrawl.service.listcrawler.DataTransferAgent.java
static int uploadSingeFile(CCBridgeServerMapping mapping, FileSystem fs, Configuration conf, Path hdfsFilePath, String uploadName, EventLoop eventLoop) throws IOException { final FileStatus fileStatus = fs.getFileStatus(hdfsFilePath); LOG.info("Uploading:" + uploadName + " size:" + fileStatus.getLen() + " to:" + mapping._internalName); {/* ww w .j a va 2s .c o m*/ // construct url URL fePostURL = new URL("http://" + mapping._externalName + ":8090/"); LOG.info("POST URL IS:" + fePostURL.toString()); // open input stream final FSDataInputStream is = fs.open(hdfsFilePath); final Semaphore blockingSemaphore = new Semaphore(0); NIOHttpConnection connection = null; try { // create connection connection = new NIOHttpConnection(fePostURL, eventLoop.getSelector(), eventLoop.getResolver(), null); // set listener connection.setListener(new Listener() { @Override public void HttpConnectionStateChanged(NIOHttpConnection theConnection, State oldState, State state) { LOG.info("Connection State Changed to:" + state.toString()); if (state == State.DONE || state == State.ERROR) { //LOG.info("Connection Transition to Done or Error"); //LOG.info("Response Headers:" + theConnection.getResponseHeaders().toString()); blockingSemaphore.release(); } } @Override public void HttpContentAvailable(NIOHttpConnection theConnection, NIOBufferList contentBuffer) { // TODO Auto-generated method stub } }); // set headers connection.getRequestHeaders().reset(); connection.getRequestHeaders().prepend("PUT /put?src=" + uploadName + " HTTP/1.1", null); connection.getRequestHeaders().set("Host", mapping._internalName + ":8090"); connection.getRequestHeaders().set("Content-Length", Long.toString(fileStatus.getLen())); connection.getRequestHeaders().set("Connection", "keep-alive"); connection.setPopulateDefaultHeaderItems(false); final LinkedBlockingDeque<BufferStruct> _loaderQueue = new LinkedBlockingDeque<BufferStruct>(20); final AtomicBoolean eof = new AtomicBoolean(); final ByteBuffer sentinel = ByteBuffer.allocate(4096); sentinel.position(sentinel.position()); final Thread loaderThread = new Thread(new Runnable() { int _id = 0; @Override public void run() { int bytesRead; byte incomingBuffer[] = new byte[4096 * 10]; try { while ((bytesRead = is.read(incomingBuffer)) != -1) { ByteBuffer buffer = ByteBuffer.wrap(incomingBuffer, 0, bytesRead); buffer.position(bytesRead); //LOG.info("Loader Thread Read:"+ bytesRead + " Buffer:" + ++_id); try { _loaderQueue.put(new BufferStruct(buffer, _id)); } catch (InterruptedException e) { LOG.error(CCStringUtils.stringifyException(e)); break; } incomingBuffer = new byte[4096 * 10]; } try { _loaderQueue.put(new BufferStruct(sentinel, ++_id)); } catch (InterruptedException e) { } } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); return; } } }); loaderThread.start(); // set data source ... connection.setDataSource(new DataSource() { int bytesTransferred = 0; @Override public boolean read(NIOBufferList dataBuffer) throws IOException { if (eof.get()) return true; //LOG.info("Connect read callback triggered"); BufferStruct buffer = _loaderQueue.poll(); if (buffer != null) { if (buffer._buffer != sentinel) { //LOG.info("Got Buffer:"+ buffer._id); if (buffer._id == 1) { //LOG.info("Inital Buffer Bytes:" + new String(buffer._buffer.array(),0,10).toString()); } dataBuffer.write(buffer._buffer); bytesTransferred += buffer._buffer.limit(); //LOG.info("Read:" + buffer._buffer.limit() + " Transfered:" + bytesTransferred); return false; } else { //LOG.info("EOF Condition"); dataBuffer.write(sentinel); eof.set(true); return true; } } return false; } }); // open connection connection.open(); // wait for connection to complete ... blockingSemaphore.acquireUninterruptibly(); // kill loader thread loaderThread.interrupt(); try { LOG.info("Waiting for Loader Thread"); loaderThread.join(); LOG.info("Done Waiting for Loader Thread"); } catch (InterruptedException e) { } } finally { is.close(); if (connection != null) { connection.close(); LOG.info("Response Code for File:" + uploadName + "to Host: " + mapping._internalName + " is:" + connection.getResponseHeaders().getHttpResponseCode()); return connection.getResponseHeaders().getHttpResponseCode(); /* if (connection.getResponseHeaders().getHttpResponseCode() != 200) { throw new IOException("Failed to upload file:" + dataFile.getName() + " responseCode:" + connection.getResponseHeaders().getHttpResponseCode()); } */ } } } // something went wrong ??? LOG.error("Failed to upload file:" + uploadName + " unknown response code"); return 500; }