List of usage examples for java.lang InterruptedException getCause
public synchronized Throwable getCause()
From source file:Sandbox.ZipExtractor.ProcessZip.java
private void callableWriteToFile() throws FileNotFoundException, InterruptedException, ExecutionException { //private void callableWriteToFile(List<ZipFileEntry> partition, int partNo, ExecutorService refreshExecutor) { // int corePoolSize = 100; // int maxPoolSize = 500; // long keepAliveTime = 5000L; ////from ww w. j a va 2 s . com // // Create thread pool for any Manual Refreshes. // ThreadPoolExecutor refreshExecutor = new ThreadPoolExecutor( // corePoolSize, // maxPoolSize, // keepAliveTime, // TimeUnit.MILLISECONDS, // new LinkedBlockingQueue<Runnable>()); //ExecutorService executorServiceA = Executors.newFixedThreadPool(10); ExecutorService executorService = Executors.newCachedThreadPool(); // List to keep track of all futures (results of Callable) List<Future<Boolean>> futures = new ArrayList<Future<Boolean>>(); int counter = 0; for (ZipFileEntry zipFileEntry : practiceLicences) { //for (ZipFileEntry zipFileEntry : partition) { //System.out.println("\tPartition No: " + partNo + " Processing: " + ++counter); //Runnable worker = new WriteToFile("C:\\Temp\\App_Data\\TEST", zipFileEntry); //refreshExecutor.execute(worker); Callable<Boolean> worker = new WriteToFile("C:\\Temp\\App_Data\\TEST", zipFileEntry); Future<Boolean> submit = executorService.submit(worker); futures.add(submit); } // Process futures to create combined list for (Future<Boolean> future : futures) { try { if (future.get().equals(Boolean.FALSE)) { throw new FileNotFoundException("Failed to create file."); } } catch (InterruptedException e) { // Write error to log file then re-throw throw new InterruptedException(e.getMessage()); } catch (ExecutionException e) { // Write error to log file then re-throw throw new ExecutionException(e.getMessage(), e.getCause()); } } executorService.shutdown(); //while(!refreshExecutor.isTerminated()) { //} //System.out.println("Finished all threads!"); }
From source file:ch.cyberduck.core.s3.S3MultipartCopyFeature.java
@Override protected void copy(final Path source, final S3Object destination, final TransferStatus status) throws BackgroundException { try {//from ww w . j a v a 2 s . com final List<MultipartPart> completed = new ArrayList<MultipartPart>(); // ID for the initiated multipart upload. final MultipartUpload multipart = session.getClient().multipartStartUpload(destination.getBucketName(), destination); if (log.isDebugEnabled()) { log.debug(String.format("Multipart upload started for %s with ID %s", multipart.getObjectKey(), multipart.getUploadId())); } final long size = status.getLength(); long remaining = size; long offset = 0; final List<Future<MultipartPart>> parts = new ArrayList<Future<MultipartPart>>(); for (int partNumber = 1; remaining > 0; partNumber++) { // Last part can be less than 5 MB. Adjust part size. final Long length = Math.min( Math.max((size / S3DefaultMultipartService.MAXIMUM_UPLOAD_PARTS), partsize), remaining); // Submit to queue parts.add(this.submit(source, multipart, partNumber, offset, length)); remaining -= length; offset += length; } for (Future<MultipartPart> future : parts) { try { completed.add(future.get()); } catch (InterruptedException e) { log.error("Part upload failed with interrupt failure"); throw new ConnectionCanceledException(e); } catch (ExecutionException e) { log.warn(String.format("Part upload failed with execution failure %s", e.getMessage())); if (e.getCause() instanceof BackgroundException) { throw (BackgroundException) e.getCause(); } throw new BackgroundException(e.getCause()); } } // Combining all the given parts into the final object. Processing of a Complete Multipart Upload request // could take several minutes to complete. Because a request could fail after the initial 200 OK response // has been sent, it is important that you check the response body to determine whether the request succeeded. final MultipartCompleted complete = session.getClient().multipartCompleteUpload(multipart, completed); if (log.isDebugEnabled()) { log.debug(String.format("Completed multipart upload for %s with checksum %s", complete.getObjectKey(), complete.getEtag())); } } catch (ServiceException e) { throw new S3ExceptionMappingService().map("Cannot copy {0}", e, source); } finally { pool.shutdown(false); } }
From source file:dk.ange.octave.exec.OctaveExec.java
private RuntimeException getFromFuture(final Future<Void> future) { try {//from w ww . j a va 2s. co m future.get(); } catch (final InterruptedException e) { final String message = "InterruptedException should not happen"; log.error(message, e); return new RuntimeException(message, e); } catch (final ExecutionException e) { if (e.getCause() instanceof OctaveException) { final OctaveException oe = (OctaveException) e.getCause(); return reInstantiateException(oe); } // Can happen when there is an error in a OctaveWriter final String message = "ExecutionException should not happen"; log.error(message, e); return new RuntimeException(message, e); } catch (final CancellationException e) { return e; } catch (final RuntimeException e) { final String message = "RuntimeException should not happen"; log.error(message, e); return new RuntimeException(message, e); } return null; }
From source file:com.mirth.connect.plugins.datatypes.delimited.DelimitedBatchReader.java
/** * Finds the next message in the input stream and returns it. * /*from w ww. j av a 2 s. com*/ * @param in * The input stream (it's a BufferedReader, because operations on * it require in.mark()). * @param skipHeader * Pass true to skip the configured number of header rows, * otherwise false. * @return The next message, or null if there are no more messages. * @throws IOException * @throws InterruptedException */ public String getMessage(final BufferedReader in, final boolean skipHeader, final String batchScriptId) throws IOException, InterruptedException { char recDelim = reader.getRecordDelimiter().charAt(0); int ch; // If skipping the header, and the option is configured if (skipHeader && batchProperties.getBatchSkipRecords() > 0) { for (int i = 0; i < batchProperties.getBatchSkipRecords(); i++) { while ((ch = in.read()) != -1 && ((char) ch) != recDelim) { } } } StringBuilder message = new StringBuilder(); if (batchProperties.isBatchSplitByRecord()) { // Each record is treated as a message reader.getRecord(in, message); } else if (StringUtils.isNotEmpty(batchProperties.getBatchMessageDelimiter())) { if (batchMessageDelimiter == null) { batchMessageDelimiter = StringUtil.unescape(batchProperties.getBatchMessageDelimiter()); } // All records until the message delimiter (or end of input) is a // message. for (;;) { // Get the next record ArrayList<String> record = reader.getRecord(in, message); if (record == null) { break; } // If the next sequence of characters is the message delimiter String lookAhead = reader.peekChars(in, batchMessageDelimiter.length()); if (lookAhead.equals(batchMessageDelimiter)) { // Consume it. for (int i = 0; i < batchMessageDelimiter.length(); i++) { ch = reader.getChar(in, null); } // Append it if it is being included if (batchProperties.isBatchMessageDelimiterIncluded()) { message.append(batchMessageDelimiter); } break; } } } else if (StringUtils.isNotEmpty(batchProperties.getBatchGroupingColumn())) { // Each message is a collection of records with the same value in // the specified column. // The end of the current message occurs when a transition in the // value of the specified // column occurs. // Prime the pump: get the first record, and save the grouping // column. ArrayList<String> record = reader.getRecord(in, message); if (record != null) { if (groupingColumnIndex == null) { updateGroupingColumnIndex(batchProperties.getBatchGroupingColumn(), serializationProperties.getColumnNames()); } String lastColumnValue = record.get(groupingColumnIndex); // Read records until the value in the grouping column changes // or there are no more records for (;;) { StringBuilder recordText = new StringBuilder(); record = reader.getRecord(in, recordText); if (record == null) { break; } if (!record.get(groupingColumnIndex).equals(lastColumnValue)) { reader.ungetRecord(record, recordText.toString()); break; } message.append(recordText.toString()); } } } else if (StringUtils.isNotEmpty(batchProperties.getBatchScript())) { try { final int batchSkipRecords = batchProperties.getBatchSkipRecords(); String result = JavaScriptUtil.execute(new JavaScriptTask<String>() { @Override public String call() throws Exception { Script compiledScript = CompiledScriptCache.getInstance().getCompiledScript(batchScriptId); if (compiledScript == null) { logger.error("Batch script could not be found in cache"); return null; } else { Logger scriptLogger = Logger.getLogger(ScriptController.BATCH_SCRIPT_KEY.toLowerCase()); Scriptable scope = JavaScriptScopeUtil.getBatchProcessorScope(scriptLogger, batchScriptId, getScopeObjects(in, serializationProperties, skipHeader, batchSkipRecords)); return Context.toString(executeScript(compiledScript, scope)); } } }); if (result != null) { message.append(result); } } catch (InterruptedException e) { throw e; } catch (JavaScriptExecutorException e) { logger.error(e.getCause()); } catch (Throwable e) { logger.error(e); } } else { // There is no batching method configured. Treat the entire input // stream as the message. logger.warn("No batch splitting method configured (processing entire input as one message)"); while ((ch = in.read()) != -1) { message.append((char) ch); } } String result = message.toString(); if (result.length() == 0) { return null; } else { return result; } }
From source file:com.anrisoftware.prefdialog.miscswing.docks.dockingframes.core.DockingFramesDock.java
private void doLoadLayout(String name, InputStream stream, PropertyChangeListener... listeners) throws LayoutInterruptedException, LayoutLoadingException { try {/* ww w . j a v a 2s .co m*/ SwingWorker<InputStream, InputStream> worker = loadFactory.create(layoutListeners, this, name, control, stream); for (PropertyChangeListener l : listeners) { worker.addPropertyChangeListener(l); } worker.execute(); worker.get(); } catch (InterruptedException e) { throw new LayoutInterruptedException(name, e); } catch (ExecutionException e) { throw new LayoutLoadingException(name, e.getCause()); } }
From source file:org.apache.distributedlog.fs.DLFileSystem.java
@Override public boolean rename(Path src, Path dst) throws IOException { String srcLog = getStreamName(src); String dstLog = getStreamName(dst); try {//from ww w . j av a 2 s . c o m namespace.renameLog(srcLog, dstLog).get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new DLInterruptedException("Interrupted at renaming " + srcLog + " to " + dstLog, e); } catch (ExecutionException e) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } else { throw new IOException("Failed to rename " + srcLog + " to " + dstLog, e.getCause()); } } return true; }
From source file:com.sybase365.mobiliser.custom.project.channels.HttpChannelEnd.java
@SuppressWarnings("unchecked") @Override//from w w w .ja va2 s . com public void processRequest(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException { LOG.debug("Incoming {} request", request.getMethod()); checkAndPrepare(request, response, false); final MultiValueMap<String, String> result = (MultiValueMap<String, String>) this.converter.read(null, new ServletServerHttpRequest(request)); final List<String> textList = result.get("text"); final List<String> fromList = result.get("from"); final List<String> toList = result.get("to"); final List<String> typeList = result.get("type"); if (textList == null || textList.isEmpty()) { throw new MissingServletRequestParameterException("text", "string"); } if (fromList == null || fromList.isEmpty()) { throw new MissingServletRequestParameterException("from", "string"); } if (toList == null || toList.isEmpty()) { throw new MissingServletRequestParameterException("to", "string"); } final String type; if (null == typeList || typeList.isEmpty()) { type = "sms"; } else { type = typeList.get(0); } final Message req = this.messagingEngine.parseSimpleTextMessage(type, textList.get(0)); req.setSender(fromList.get(0)); req.setRecipient(toList.get(0)); if (LOG.isDebugEnabled()) { LOG.debug("{} message received for {} from {}", new Object[] { type, req.getRecipient(), req.getSender() }); } final Future<Message> responseMessage = this.receiveCallback.receiveAndRespondMessage(req, this.channelId, this.incomingChannelId); if (LOG.isDebugEnabled()) { LOG.debug("Handed off message to {} for {} awaiting response", this.receiveCallback, this.incomingChannelId); } final Message message; try { message = responseMessage.get(); if (message == null) { LOG.warn("Timed out waiting for response from {}", responseMessage); throw new NestedServletException("Timed out waiting for message"); } } catch (final InterruptedException e) { Thread.currentThread().interrupt(); // reset flag throw new NestedServletException("Interrupted during processing", e); } catch (final ExecutionException e) { if (e.getCause() instanceof InterruptedException) { throw new NestedServletException( // NOSONAR "Interrupted during processing", e.getCause()); } throw new NestedServletException("Processing message failed", // NOSONAR e.getCause()); } LOG.debug("Writing response back to client"); final LinkedMultiValueMap<String, Object> responseMap = new LinkedMultiValueMap<String, Object>(); responseMap.add("from", message.getSender().getAddress()); responseMap.add("to", message.getRecipient().getAddress()); if (message instanceof SmsMessage) { responseMap.add("text", new String(((SmsMessage) message).getText().getContent(), ((SmsMessage) message).getText().getCharset())); } else if (message instanceof UssdTextMessage) { responseMap.add("text", new String(((UssdTextMessage) message).getText().getContent(), ((UssdTextMessage) message).getText().getCharset())); } this.converter.write(responseMap, this.mediaType, new ServletServerHttpResponse(response)); }
From source file:ch.algotrader.service.ib.IBNativeHistoricalDataServiceImpl.java
private List<Bar> getBarsBlocking(final Promise<List<Bar>> promise) { try {/*from w w w . j av a 2s. com*/ int requestTimeout = this.iBConfig.getRequestTimeout(); return promise.get(requestTimeout, TimeUnit.SECONDS); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new ServiceException(ex); } catch (TimeoutException ex) { throw new ExternalServiceException("Service request timeout"); } catch (ExecutionException ex) { throw IBNativeSupport.rethrow(ex.getCause()); } }
From source file:org.codelibs.fess.crawler.client.http.HcHttpClientTest.java
public void test_doGet_accessTimeoutTarget() { HcHttpClient client = new HcHttpClient() { @Override/*from w ww . j av a 2 s.c o m*/ protected ResponseData processHttpMethod(final String url, final HttpUriRequest httpRequest) { try { Thread.sleep(10000); } catch (InterruptedException e) { throw new CrawlingAccessException(e); } return null; } }; client.setAccessTimeout(1); try { client.doGet("http://localhost/"); fail(); } catch (CrawlingAccessException e) { assertTrue(e.getCause() instanceof InterruptedException); } }
From source file:org.codelibs.fess.crawler.client.http.HcHttpClientTest.java
public void test_doHead_accessTimeoutTarget() { HcHttpClient client = new HcHttpClient() { @Override//w w w .j ava 2 s . co m protected ResponseData processHttpMethod(final String url, final HttpUriRequest httpRequest) { try { Thread.sleep(10000); } catch (InterruptedException e) { throw new CrawlingAccessException(e); } return null; } }; client.setAccessTimeout(1); try { client.doHead("http://localhost/"); fail(); } catch (CrawlingAccessException e) { assertTrue(e.getCause() instanceof InterruptedException); } }