List of usage examples for java.lang InterruptedException getMessage
public String getMessage()
From source file:io.hops.hopsworks.common.dao.tensorflow.config.TensorBoardProcessMgr.java
/** * Start the TensorBoard process//from w w w .ja v a2s . c om * @param project * @param user * @param hdfsUser * @param hdfsLogdir * @return * @throws IOException */ @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) public TensorBoardDTO startTensorBoard(Project project, Users user, HdfsUsers hdfsUser, String hdfsLogdir) throws IOException { String prog = settings.getHopsworksDomainDir() + "/bin/tensorboard.sh"; Process process = null; Integer port = 0; BigInteger pid = null; String tbBasePath = settings.getStagingDir() + Settings.TENSORBOARD_DIRS + File.separator; String projectUserUniquePath = project.getName() + "_" + hdfsUser.getName(); String tbPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath); String certsPath = "\"\""; File tbDir = new File(tbPath); if (tbDir.exists()) { for (File file : tbDir.listFiles()) { if (file.getName().endsWith(".pid")) { String pidContents = com.google.common.io.Files.readFirstLine(file, Charset.defaultCharset()); try { pid = BigInteger.valueOf(Long.parseLong(pidContents)); if (pid != null && ping(pid) == 0) { killTensorBoard(pid); } } catch (NumberFormatException nfe) { LOGGER.log(Level.WARNING, "Expected number in pidfile " + file.getAbsolutePath() + " got " + pidContents); } } } FileUtils.deleteDirectory(tbDir); } tbDir.mkdirs(); DistributedFileSystemOps dfso = dfsService.getDfsOps(); try { certsPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath + "_certs"); File certsDir = new File(certsPath); certsDir.mkdirs(); HopsUtils.materializeCertificatesForUserCustomDir(project.getName(), user.getUsername(), settings.getHdfsTmpCertDir(), dfso, certificateMaterializer, settings, certsPath); } catch (IOException ioe) { LOGGER.log(Level.SEVERE, "Failed in materializing certificates for " + hdfsUser + " in directory " + certsPath, ioe); HopsUtils.cleanupCertificatesForUserCustomDir(user.getUsername(), project.getName(), settings.getHdfsTmpCertDir(), certificateMaterializer, certsPath, settings); } finally { if (dfso != null) { dfsService.closeDfsClient(dfso); } } String anacondaEnvironmentPath = settings.getAnacondaProjectDir(project.getName()); int retries = 3; while (retries > 0) { if (retries == 0) { throw new IOException( "Failed to start TensorBoard for project=" + project.getName() + ", user=" + user.getUid()); } // use pidfile to kill any running servers port = ThreadLocalRandom.current().nextInt(40000, 59999); String[] command = new String[] { "/usr/bin/sudo", prog, "start", hdfsUser.getName(), hdfsLogdir, tbPath, port.toString(), anacondaEnvironmentPath, settings.getHadoopVersion(), certsPath, settings.getJavaHome() }; LOGGER.log(Level.INFO, Arrays.toString(command)); ProcessBuilder pb = new ProcessBuilder(command); try { // Send both stdout and stderr to the same stream pb.redirectErrorStream(true); process = pb.start(); synchronized (pb) { try { // Wait until the launcher bash script has finished process.waitFor(20l, TimeUnit.SECONDS); } catch (InterruptedException ex) { LOGGER.log(Level.SEVERE, "Woken while waiting for the TensorBoard to start: {0}", ex.getMessage()); } } int exitValue = process.exitValue(); String pidPath = tbPath + File.separator + port + ".pid"; File pidFile = new File(pidPath); // Read the pid for TensorBoard server if (pidFile.exists()) { String pidContents = com.google.common.io.Files.readFirstLine(pidFile, Charset.defaultCharset()); pid = BigInteger.valueOf(Long.parseLong(pidContents)); } if (exitValue == 0 && pid != null) { int maxWait = 10; String logFilePath = tbPath + File.separator + port + ".log"; File logFile = new File(logFilePath); while (maxWait > 0) { String logFileContents = com.google.common.io.Files.readFirstLine(logFile, Charset.defaultCharset()); // It is not possible to have a fixed wait time before showing the TB, we need to be sure it has started if (logFile.length() > 0 && (logFileContents.contains("Loaded") | logFileContents.contains("Reloader") | logFileContents.contains("event")) | maxWait == 1) { Thread.currentThread().sleep(5000); TensorBoardDTO tensorBoardDTO = new TensorBoardDTO(); String host = null; try { host = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException ex) { Logger.getLogger(TensorBoardProcessMgr.class.getName()).log(Level.SEVERE, null, ex); } tensorBoardDTO.setEndpoint(host + ":" + port); tensorBoardDTO.setPid(pid); return tensorBoardDTO; } else { Thread.currentThread().sleep(1000); maxWait--; } } TensorBoardDTO tensorBoardDTO = new TensorBoardDTO(); tensorBoardDTO.setPid(pid); String host = null; try { host = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException ex) { Logger.getLogger(TensorBoardProcessMgr.class.getName()).log(Level.SEVERE, null, ex); } tensorBoardDTO.setEndpoint(host + ":" + port); return tensorBoardDTO; } else { LOGGER.log(Level.SEVERE, "Failed starting TensorBoard got exitcode " + exitValue + " retrying on new port"); if (pid != null) { this.killTensorBoard(pid); } pid = null; } } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Problem starting TensorBoard: {0}", ex); if (process != null) { process.destroyForcibly(); } } finally { retries--; } } //Failed to start TensorBoard, make sure there is no process running for it! (This should not be needed) if (pid != null && this.ping(pid) == 0) { this.killTensorBoard(pid); } //Certificates cleanup in case they were materialized but no TB started successfully dfso = dfsService.getDfsOps(); certsPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath + "_certs"); File certsDir = new File(certsPath); certsDir.mkdirs(); try { HopsUtils.cleanupCertificatesForUserCustomDir(user.getUsername(), project.getName(), settings.getHdfsTmpCertDir(), certificateMaterializer, certsPath, settings); } finally { if (dfso != null) { dfsService.closeDfsClient(dfso); } } return null; }
From source file:info.magnolia.integrationtests.uitest.AbstractMagnoliaUITest.java
protected static void delay(final int delayInSeconds, final String motivation) { log.debug("Delaying for {}s. Motivation: {}", delayInSeconds, motivation); try {//from w ww .j a v a2 s . co m Thread.sleep(delayInSeconds * 1000); } catch (InterruptedException e) { fail(e.getMessage()); } }
From source file:org.cloudfoundry.maven.Logs.java
@Override protected void doExecute() throws MojoExecutionException { try {//from w w w.j a va 2 s .com getLog().info(String.format("Getting logs for '%s'", getAppname())); LoggingListener listener = new LoggingListener(); getClient().streamLogs(getAppname(), listener); synchronized (listener) { try { listener.wait(); } catch (InterruptedException e) { throw new MojoExecutionException("Interrupted while streaming logs", e); } } } catch (CloudFoundryException e) { if (HttpStatus.NOT_FOUND.equals(e.getStatusCode())) { throw new MojoExecutionException(String.format("Application '%s' does not exist", getAppname()), e); } else { throw new MojoExecutionException(String.format( "Error getting logs for application '%s'. Error message: '%s'. Description: '%s'", getAppname(), e.getMessage(), e.getDescription()), e); } } }
From source file:test.nov21.configuration.AbstractPage.java
protected ExpectedCondition<WebElement> visibilityOfElementLocated(final By by) { return new ExpectedCondition<WebElement>() { @Override//from w ww. ja va2 s. c o m public WebElement apply(WebDriver driver) { try { Thread.sleep(3000); } catch (InterruptedException e) { LOG.error(e.getMessage()); } WebElement element = getDriver().findElement(by); return element.isDisplayed() ? element : null; } }; }
From source file:au.org.ala.layers.util.BatchConsumer.java
@Override public void run() { boolean repeat = true; String id = ""; while (repeat) { String currentBatch = null; try {/*from w ww . j a v a 2 s . c o m*/ currentBatch = waitingBatchDirs.take(); id = new File(currentBatch).getName(); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yy hh:mm:ss:SSS"); String str = sdf.format(new Date()); BatchProducer.logUpdateEntry(id, "started", "started", str, null); writeToFile(currentBatch + "status.txt", "started at " + str, true); writeToFile(currentBatch + "started.txt", str, true); String fids = readFile(currentBatch + "fids.txt"); String points = readFile(currentBatch + "points.txt"); String gridcache = readFile(currentBatch + "gridcache.txt"); ArrayList<String> sample = null; HashMap[] pointSamples = null; if ("1".equals(gridcache)) { pointSamples = layerIntersectDao.sampling(points, 1); } else if ("2".equals(gridcache)) { pointSamples = layerIntersectDao.sampling(points, 2); } else { IntersectCallback callback = new ConsumerCallback(id); sample = layerIntersectDao.sampling(fids.split(","), splitStringToDoublesArray(points, ','), callback); } //convert pointSamples to string array if (pointSamples != null) { Set columns = new LinkedHashSet(); for (int i = 0; i < pointSamples.length; i++) { columns.addAll(pointSamples[i].keySet()); } //fids fids = ""; for (Object o : columns) { if (!fids.isEmpty()) { fids += ","; } fids += o; } //columns ArrayList<StringBuilder> sb = new ArrayList<StringBuilder>(); for (int i = 0; i < columns.size(); i++) { sb.add(new StringBuilder()); } for (int i = 0; i < pointSamples.length; i++) { int pos = 0; for (Object o : columns) { sb.get(pos).append("\n").append(pointSamples[i].get(o)); pos++; } } //format sample = new ArrayList<String>(); for (int i = 0; i < sb.size(); i++) { sample.add(sb.get(i).toString()); } } System.out.println("start csv output at " + sdf.format(new Date())); BufferedOutputStream bos = new BufferedOutputStream( new FileOutputStream(currentBatch + "sample.csv")); IntersectUtil.writeSampleToStream(splitString(fids, ','), splitString(points, ','), sample, bos); bos.flush(); bos.close(); System.out.println("finish csv output at " + sdf.format(new Date())); str = sdf.format(new Date()); BatchProducer.logUpdateEntry(id, "finished", "finished", str, fids.split(",").length + 1); writeToFile(currentBatch + "status.txt", "finished at " + str, true); writeToFile(currentBatch + "finished.txt", str, true); } catch (InterruptedException e) { //thread stop request repeat = false; break; } catch (Exception e) { if (currentBatch != null) { try { BatchProducer.logUpdateEntry(id, "error", "error", e.getMessage(), null); writeToFile(currentBatch + "status.txt", "error " + e.getMessage(), true); writeToFile(currentBatch + "error.txt", e.getMessage(), true); } catch (Exception ex) { ex.printStackTrace(); } } e.printStackTrace(); } currentBatch = null; } }
From source file:com.cfets.door.yarn.jboss.JBossApplicationMaster.java
private void finish() { for (Thread launchThread : launchThreads) { try {//from w w w . j a va 2s.co m launchThread.join(10000); } catch (InterruptedException e) { LOG.info("Exception thrown in thread join: " + e.getMessage()); e.printStackTrace(); } } LOG.info("Application completed. Stopping running containers"); nmClientAsync.stop(); LOG.info("Application completed. Signalling finish to RM"); FinalApplicationStatus appStatus; String appMessage = null; success = true; if (numFailedContainers.get() == 0 && numCompletedContainers.get() == numTotalContainers) { appStatus = FinalApplicationStatus.SUCCEEDED; } else { appStatus = FinalApplicationStatus.FAILED; appMessage = "Diagnostics." + ", total=" + numTotalContainers + ", completed=" + numCompletedContainers.get() + ", allocated=" + numAllocatedContainers.get() + ", failed=" + numFailedContainers.get(); success = false; } try { resourceManager.unregisterApplicationMaster(appStatus, appMessage, null); } catch (YarnException ex) { LOG.log(Level.SEVERE, "Failed to unregister application", ex); } catch (IOException e) { LOG.log(Level.SEVERE, "Failed to unregister application", e); } done = true; resourceManager.stop(); }
From source file:com.mobeelizer.java.connection.MobeelizerConnectionServiceImpl.java
@Override public MobeelizerOperationError waitUntilSyncRequestComplete(final String ticket) { try {//from www . j a v a2 s. c o m for (int i = 0; i < 240; i++) { MobeelizerOperationStatus<String> checkStatusResult = executeGetAndGetContent("/checkStatus", new String[] { "ticket", ticket }); if (checkStatusResult.getError() != null) { return checkStatusResult.getError(); } JSONObject json = new JSONObject(checkStatusResult.getContent()); String status = json.getString("status"); delegate.logInfo("Check task status: " + status); if ("REJECTED".toString().equals(status)) { String message = "Check task status success: " + status + " with result " + json.getString("result") + " and message '" + json.getString("message") + "'"; delegate.logInfo(message); return MobeelizerOperationErrorImpl.syncRejected(json.getString("result"), json.getString("message")); } else if ("FINISHED".toString().equals(status)) { return null; } try { Thread.sleep(100 * i + 500); } catch (InterruptedException e) { return MobeelizerOperationErrorImpl.other(e.getMessage()); } } } catch (JSONException e) { return MobeelizerOperationErrorImpl.exception(new IOException(e.getMessage())); } return null; }
From source file:org.opencron.server.service.ExecuteService.java
/** * ??//from w w w .ja v a 2 s . c o m */ private boolean executeSameTimeJob(final long groupId, final Queue<JobVo> jobQueue) { final List<Boolean> result = new ArrayList<Boolean>(0); Thread jobThread = new Thread(new Runnable() { @Override public void run() { for (final JobVo jobVo : jobQueue) { //?(?,??) Thread thread = new Thread(new Runnable() { public void run() { result.add(doFlowJob(jobVo, groupId)); } }); thread.start(); } } }); jobThread.start(); //?,?... try { jobThread.join(); } catch (InterruptedException e) { logger.error("[opencron] job rumModel with SAMETIME error:{}", e.getMessage()); } return !result.contains(false); }
From source file:com.aol.advertising.qiao.injector.file.watcher.QiaoFileManager.java
@Override public void run() { while (running.get()) { try {//from w ww .j a v a 2 s . c om FileOperationEvent event = getNextEvent(); if (event != null) { switch (event.eventType) { case MOVE_FILE: doneFileHandler.moveFileToDoneDirIfExists(event.filePath, event.checksum); break; case RENAME_FILE: renameFile(event.filePath, event.newfilePath); break; default: logger.error("invalid event type => " + event.eventType); } continue; } Path file = getNextFile(); if (file == null) { CommonUtils.sleepQuietly(fileCheckDelayMillis); continue; } if (Files.notExists(file)) { if (logger.isDebugEnabled()) logger.debug("file " + file + " does not exist"); continue; } long checksum = CommonUtils.checksumOptionalylUseFileLength(file.toFile(), this.checksumByteLength); if (isFileDone(file, checksum)) { doneFileHandler.moveFileToDoneDirIfExists(file, checksum); } else { if (logger.isDebugEnabled()) logger.debug("skipped " + file.toString() + " - not done"); } } catch (InterruptedException e) { logger.info("interrupted"); } catch (Exception e) { logger.error(e.getMessage(), e); } } logger.info(this.getClass().getSimpleName() + " terminated"); }
From source file:com.reactivetechnologies.analytics.core.handlers.ModelCombinerComponent.java
/** * Runs a cluster wide model collection, and generates a combined (ensembled/voted/evaluated) classifier model. * The generated model is persisted in database, only if it is different than the ones already present. * @return Persisted model Id, or "" if not persisted in this run * @throws EngineException/*from www. j ava2 s . c om*/ */ public CombinerResult runTask() throws EngineException { log.info("[ensembleModelTask] task starting.."); String modelId = ""; CombinerResult result = CombinerResult.IGNORED; try { boolean done = tryMemberSnapshot(10, TimeUnit.MINUTES); if (done) { modelId = ensembleModels(); if (modelId != null) { result = CombinerResult.MODEL_CREATED; result.setModelId(modelId); } } else { log.info("[ensembleModelTask] task ignored.. "); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); log.debug("", e); } catch (TimeoutException e) { log.warn("[ensembleModelTask] task timed out. Generated model may be inconsistent", e); result = CombinerResult.MODEL_CREATED; } catch (EngineException e) { if (e.getCause() instanceof DuplicateKeyException) { log.warn(e.getMessage()); //log.debug(e.getMessage(), e.getCause()); result = CombinerResult.MODEL_EXISTS; result.setModelId(e.getCause().getMessage()); } else throw e; } return result; }