List of usage examples for java.lang InterruptedException getCause
public synchronized Throwable getCause()
From source file:org.talend.oozie.scheduler.controller.ExecuteJobCompositeController.java
private String doRunJob(final JobContext jobContext) { Display.getDefault().asyncExec(new Runnable() { @Override/*www . j ava 2 s . c o m*/ public void run() { final String jobName = jobContext.getJobName(); Job runJob = new Job(jobName) { @Override protected IStatus run(IProgressMonitor monitor) { StringBuffer output = new StringBuffer(""); IStatus status = Status.OK_STATUS; try { monitor.beginTask(TOozieOutputMessages.MSG_OUTPUT_STARTUP, 100); // startupRemoteJob(output); monitor.worked(10); monitor.subTask(TOozieOutputMessages.MSG_OUTPUT_DEPLOYING); updateAllEnabledOrNot(OozieJobProcessStatus.DEPLOYING, jobName); deployJobOnHadoop(output, jobContext); monitor.worked(30); status = runRemoteJob(monitor, jobContext, output); return status; } catch (InterruptedException e) { updateAllEnabledOrNot(OozieJobProcessStatus.FAILED, jobName); output.append(outputLogWithPrefixDate(e.getMessage())); updateOutputTextContents(output.toString(), jobName); ExceptionHandler.process(e); } catch (JobSubmissionException e) { updateAllEnabledOrNot(OozieJobProcessStatus.FAILED, jobName); output.append(outputLogWithPrefixDate(e.getMessage())); output.append(outputLogWithPrefixDate(TOozieOutputMessages.MSG_ERROR_ERROR_SETTINGS)); if (e.getCause() instanceof OozieClientException) { OozieClientException ooException = (OozieClientException) e.getCause(); output.append(outputLogWithPrefixDate( ooException.getErrorCode() + ". " + ooException.getMessage())); } else { output.append(outputLogWithPrefixDate(e.getCause().getMessage())); } updateOutputTextContents(output.toString(), jobName); } catch (URISyntaxException e) { updateAllEnabledOrNot(OozieJobProcessStatus.FAILED, jobName); output.append(outputLogWithPrefixDate(e.getMessage())); updateOutputTextContents(output.toString(), jobName); } catch (OozieJobDeployException e) { updateAllEnabledOrNot(OozieJobProcessStatus.FAILED, jobName); output.append(outputLogWithPrefixDate(TOozieOutputMessages.MSG_OUTPUT_DEPLOY_FAILED)); output.append(outputLogWithPrefixDate(e.getMessage())); output.append(outputLogWithPrefixDate(e.getCause().getMessage())); updateOutputTextContents(output.toString(), jobName); } return Status.OK_STATUS; } }; runJob.setUser(true); runJob.schedule(); } }); return null; }
From source file:org.apache.hadoop.hbase.regionserver.HStore.java
/** * Creates an unsorted list of StoreFile loaded in parallel * from the given directory./* w w w .j a va 2 s . c om*/ * @throws IOException */ private List<StoreFile> loadStoreFiles() throws IOException { Collection<StoreFileInfo> files = fs.getStoreFiles(getColumnFamilyName()); if (files == null || files.size() == 0) { return new ArrayList<StoreFile>(); } // initialize the thread pool for opening store files in parallel.. ThreadPoolExecutor storeFileOpenerThreadPool = this.region .getStoreFileOpenAndCloseThreadPool("StoreFileOpenerThread-" + this.getColumnFamilyName()); CompletionService<StoreFile> completionService = new ExecutorCompletionService<StoreFile>( storeFileOpenerThreadPool); int totalValidStoreFile = 0; for (final StoreFileInfo storeFileInfo : files) { // open each store file in parallel completionService.submit(new Callable<StoreFile>() { @Override public StoreFile call() throws IOException { StoreFile storeFile = createStoreFileAndReader(storeFileInfo); return storeFile; } }); totalValidStoreFile++; } ArrayList<StoreFile> results = new ArrayList<StoreFile>(files.size()); IOException ioe = null; try { for (int i = 0; i < totalValidStoreFile; i++) { try { Future<StoreFile> future = completionService.take(); StoreFile storeFile = future.get(); long length = storeFile.getReader().length(); this.storeSize += length; this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes(); if (LOG.isDebugEnabled()) { LOG.debug("loaded " + storeFile.toStringDetailed()); } results.add(storeFile); } catch (InterruptedException e) { if (ioe == null) ioe = new InterruptedIOException(e.getMessage()); } catch (ExecutionException e) { if (ioe == null) ioe = new IOException(e.getCause()); } } } finally { storeFileOpenerThreadPool.shutdownNow(); } if (ioe != null) { // close StoreFile readers for (StoreFile file : results) { try { if (file != null) file.closeReader(true); } catch (IOException e) { LOG.warn(e.getMessage()); } } throw ioe; } return results; }
From source file:com.cloudant.sync.datastore.DatastoreImpl.java
@Override public DocumentRevision deleteDocumentFromRevision(final DocumentRevision rev) throws ConflictException { Preconditions.checkNotNull(rev, "DocumentRevision cannot be null"); Preconditions.checkState(isOpen(), "Datastore is closed"); try {/*from ww w. j av a 2 s .c o m*/ DocumentRevision deletedRevision = queue.submitTransaction(new SQLQueueCallable<DocumentRevision>() { @Override public DocumentRevision call(SQLDatabase db) throws Exception { return deleteDocumentInQueue(db, rev.getId(), rev.getRevision()); } }).get(); if (deletedRevision != null) { eventBus.post(new DocumentDeleted(rev, deletedRevision)); } return deletedRevision; } catch (InterruptedException e) { logger.log(Level.SEVERE, "Failed to delete document", e); } catch (ExecutionException e) { logger.log(Level.SEVERE, "Failed to delete document", e); if (e.getCause() != null) { if (e.getCause() instanceof ConflictException) { throw (ConflictException) e.getCause(); } } } return null; }
From source file:com.gazbert.bxbot.core.engine.TradingEngine.java
private void runMainControlLoop() { LOG.info(() -> "Starting Trading Engine..."); while (keepAlive) { try {/*from w w w. j av a 2s .co m*/ LOG.info(() -> "*** Starting next trade cycle... ***"); // Emergency Stop Check MUST run at start of every trade cycle. if (isEmergencyStopLimitBreached()) { break; } // Execute the Trading Strategies for (final TradingStrategy tradingStrategy : tradingStrategiesToExecute) { LOG.info(() -> "Executing Trading Strategy ---> " + tradingStrategy.getClass().getSimpleName()); tradingStrategy.execute(); } LOG.info(() -> "*** Sleeping " + tradeExecutionInterval + "s til next trade cycle... ***"); try { Thread.sleep(tradeExecutionInterval * 1000); } catch (InterruptedException e) { LOG.warn("Control Loop thread interrupted when sleeping before next trade cycle"); Thread.currentThread().interrupt(); } } catch (ExchangeNetworkException e) { /* * We have a network connection issue reported by Exchange Adapter when called directly from * Trading Engine. Current policy is to log it and sleep until next trade cycle. */ final String WARNING_MSG = "A network error has occurred in Exchange Adapter! " + "BX-bot will attempt next trade in " + tradeExecutionInterval + "s..."; LOG.error(WARNING_MSG, e); try { Thread.sleep(tradeExecutionInterval * 1000); } catch (InterruptedException e1) { LOG.warn("Control Loop thread interrupted when sleeping before next trade cycle"); Thread.currentThread().interrupt(); } } catch (TradingApiException e) { /* * A serious issue has occurred in the Exchange Adapter. * Current policy is to log it, send email alert if required, and shutdown bot. */ final String FATAL_ERROR_MSG = "A FATAL error has occurred in Exchange Adapter!"; LOG.fatal(FATAL_ERROR_MSG, e); emailAlerter.sendMessage(CRITICAL_EMAIL_ALERT_SUBJECT, buildCriticalEmailAlertMsgContent(FATAL_ERROR_MSG + DETAILS_ERROR_MSG_LABEL + e.getMessage() + CAUSE_ERROR_MSG_LABEL + e.getCause(), e)); keepAlive = false; } catch (StrategyException e) { /* * A serious issue has occurred in the Trading Strategy. * Current policy is to log it, send email alert if required, and shutdown bot. */ final String FATAL_ERROR_MSG = "A FATAL error has occurred in Trading Strategy!"; LOG.fatal(FATAL_ERROR_MSG, e); emailAlerter.sendMessage(CRITICAL_EMAIL_ALERT_SUBJECT, buildCriticalEmailAlertMsgContent(FATAL_ERROR_MSG + DETAILS_ERROR_MSG_LABEL + e.getMessage() + CAUSE_ERROR_MSG_LABEL + e.getCause(), e)); keepAlive = false; } catch (Exception e) { /* * A serious and *unexpected* issue has occurred in the Exchange Adapter or Trading Strategy. * Current policy is to log it, send email alert if required, and shutdown bot. */ final String FATAL_ERROR_MSG = "An unexpected FATAL error has occurred in Exchange Adapter or Trading Strategy!"; LOG.fatal(FATAL_ERROR_MSG, e); emailAlerter.sendMessage(CRITICAL_EMAIL_ALERT_SUBJECT, buildCriticalEmailAlertMsgContent(FATAL_ERROR_MSG + DETAILS_ERROR_MSG_LABEL + e.getMessage() + CAUSE_ERROR_MSG_LABEL + e.getCause(), e)); keepAlive = false; } } LOG.fatal("BX-bot is shutting down NOW!"); synchronized (IS_RUNNING_MONITOR) { isRunning = false; } }
From source file:org.apache.hadoop.hbase.regionserver.HStore.java
@Override public ImmutableCollection<StoreFile> close() throws IOException { this.lock.writeLock().lock(); try {//www.j a v a 2 s .c om // Clear so metrics doesn't find them. ImmutableCollection<StoreFile> result = storeEngine.getStoreFileManager().clearFiles(); if (!result.isEmpty()) { // initialize the thread pool for closing store files in parallel. ThreadPoolExecutor storeFileCloserThreadPool = this.region .getStoreFileOpenAndCloseThreadPool("StoreFileCloserThread-" + this.getColumnFamilyName()); // close each store file in parallel CompletionService<Void> completionService = new ExecutorCompletionService<Void>( storeFileCloserThreadPool); for (final StoreFile f : result) { completionService.submit(new Callable<Void>() { @Override public Void call() throws IOException { f.closeReader(true); return null; } }); } IOException ioe = null; try { for (int i = 0; i < result.size(); i++) { try { Future<Void> future = completionService.take(); future.get(); } catch (InterruptedException e) { if (ioe == null) { ioe = new InterruptedIOException(); ioe.initCause(e); } } catch (ExecutionException e) { if (ioe == null) ioe = new IOException(e.getCause()); } } } finally { storeFileCloserThreadPool.shutdownNow(); } if (ioe != null) throw ioe; } LOG.info("Closed " + this); return result; } finally { this.lock.writeLock().unlock(); } }
From source file:org.apache.hadoop.hbase.regionserver.Store.java
/** * Creates an unsorted list of StoreFile loaded in parallel * from the given directory.//w w w . ja va 2 s .co m * @throws IOException */ private List<StoreFile> loadStoreFiles() throws IOException { ArrayList<StoreFile> results = new ArrayList<StoreFile>(); FileStatus files[] = getStoreFiles(); if (files == null || files.length == 0) { return results; } // initialize the thread pool for opening store files in parallel.. ThreadPoolExecutor storeFileOpenerThreadPool = this.region .getStoreFileOpenAndCloseThreadPool("StoreFileOpenerThread-" + this.family.getNameAsString()); CompletionService<StoreFile> completionService = new ExecutorCompletionService<StoreFile>( storeFileOpenerThreadPool); int totalValidStoreFile = 0; for (int i = 0; i < files.length; i++) { // Skip directories. if (files[i].isDir()) { continue; } final Path p = files[i].getPath(); // Check for empty hfile. Should never be the case but can happen // after data loss in hdfs for whatever reason (upgrade, etc.): HBASE-646 // NOTE: that the HFileLink is just a name, so it's an empty file. if (!HFileLink.isHFileLink(p) && this.fs.getFileStatus(p).getLen() <= 0) { LOG.warn("Skipping " + p + " because its empty. HBASE-646 DATA LOSS?"); continue; } // open each store file in parallel completionService.submit(new Callable<StoreFile>() { public StoreFile call() throws IOException { StoreFile storeFile = new StoreFile(fs, p, conf, cacheConf, family.getBloomFilterType(), dataBlockEncoder, isAssistant()); passSchemaMetricsTo(storeFile); storeFile.createReader(); return storeFile; } }); totalValidStoreFile++; } try { for (int i = 0; i < totalValidStoreFile; i++) { Future<StoreFile> future = completionService.take(); StoreFile storeFile = future.get(); long length = storeFile.getReader().length(); this.storeSize += length; this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes(); if (LOG.isDebugEnabled()) { LOG.debug("loaded " + storeFile.toStringDetailed()); } results.add(storeFile); } } catch (InterruptedException e) { throw new IOException(e); } catch (ExecutionException e) { throw new IOException(e.getCause()); } finally { storeFileOpenerThreadPool.shutdownNow(); } return results; }
From source file:org.apache.hadoop.hbase.regionserver.Store.java
/** * Close all the readers/* w ww.j ava2 s . com*/ * * We don't need to worry about subsequent requests because the HRegion holds * a write lock that will prevent any more reads or writes. * * @throws IOException */ ImmutableList<StoreFile> close() throws IOException { this.lock.writeLock().lock(); try { ImmutableList<StoreFile> result = storefiles; // Clear so metrics doesn't find them. storefiles = ImmutableList.of(); if (!result.isEmpty()) { // initialize the thread pool for closing store files in parallel. ThreadPoolExecutor storeFileCloserThreadPool = this.region.getStoreFileOpenAndCloseThreadPool( "StoreFileCloserThread-" + this.family.getNameAsString()); // close each store file in parallel CompletionService<Void> completionService = new ExecutorCompletionService<Void>( storeFileCloserThreadPool); for (final StoreFile f : result) { completionService.submit(new Callable<Void>() { public Void call() throws IOException { f.closeReader(true); return null; } }); } try { for (int i = 0; i < result.size(); i++) { Future<Void> future = completionService.take(); future.get(); } } catch (InterruptedException e) { throw new IOException(e); } catch (ExecutionException e) { throw new IOException(e.getCause()); } finally { storeFileCloserThreadPool.shutdownNow(); } } LOG.info("Closed " + this); return result; } finally { this.lock.writeLock().unlock(); } }
From source file:com.delphix.session.test.ServiceTest.java
private Throwable loginFail(ClientNexus client) { Throwable t = null;/* ww w . java 2 s . co m*/ LoginFuture future = client.login(); try { future.get(); fail("login should have failed"); } catch (InterruptedException e) { fail("login interrupted", e); } catch (ExecutionException e) { t = e.getCause(); } return t; }
From source file:com.delphix.session.test.ServiceTest.java
private void login(ClientNexus client) { LoginFuture future = client.login(); try {// w w w . jav a 2 s.c o m future.get(); } catch (InterruptedException e) { fail("login interrupted", e); } catch (ExecutionException e) { fail("login failed", e.getCause()); } }
From source file:com.delphix.session.test.ServiceTest.java
private void close(ClientNexus client) { CloseFuture future = client.close(); try {/* w w w . ja v a 2s. c om*/ future.get(); } catch (InterruptedException e) { fail("interrupted while closing", e); } catch (ExecutionException e) { fail("failed to close", e.getCause()); } }