List of usage examples for java.util.concurrent CompletionException CompletionException
public CompletionException(Throwable cause)
From source file:io.pravega.controller.store.stream.ZKStreamMetadataStore.java
@Override public CompletableFuture<Void> addUpdateStreamForAutoStreamCut(final String scope, final String stream, final RetentionPolicy retentionPolicy, final OperationContext context, final Executor executor) { Preconditions.checkNotNull(retentionPolicy); int bucket = getBucket(scope, stream); String retentionPath = String.format(ZKStoreHelper.RETENTION_PATH, bucket, encodedScopedStreamName(scope, stream)); byte[] serialize = SerializationUtils.serialize(retentionPolicy); return storeHelper.getData(retentionPath).exceptionally(e -> { if (e instanceof StoreException.DataNotFoundException) { return null; } else {/*from w ww . j a va 2 s. c o m*/ throw new CompletionException(e); } }).thenCompose(data -> { if (data == null) { return storeHelper.createZNodeIfNotExist(retentionPath, serialize); } else { return storeHelper.setData(retentionPath, new Data<>(serialize, data.getVersion())); } }); }
From source file:io.pravega.controller.store.stream.ZKStreamMetadataStore.java
@Override public CompletableFuture<Void> removeStreamFromAutoStreamCut(final String scope, final String stream, final OperationContext context, final Executor executor) { int bucket = getBucket(scope, stream); String retentionPath = String.format(ZKStoreHelper.RETENTION_PATH, bucket, encodedScopedStreamName(scope, stream)); return storeHelper.deleteNode(retentionPath).exceptionally(e -> { if (e instanceof StoreException.DataNotFoundException) { return null; } else {//from www .jav a2s. co m throw new CompletionException(e); } }); }
From source file:eu.interedition.collatex.tools.CollationServer.java
public void service(Request request, Response response) throws Exception { final Deque<String> path = path(request); if (path.isEmpty() || !"collate".equals(path.pop())) { response.sendError(404);/*from w w w . j a v a 2s .co m*/ return; } final SimpleCollation collation = JsonProcessor.read(request.getInputStream()); if (maxCollationSize > 0) { for (SimpleWitness witness : collation.getWitnesses()) { final int witnessLength = witness.getTokens().stream().filter(t -> t instanceof SimpleToken) .map(t -> (SimpleToken) t).mapToInt(t -> t.getContent().length()).sum(); if (witnessLength > maxCollationSize) { response.sendError(413, "Request Entity Too Large"); return; } } } response.suspend(60, TimeUnit.SECONDS, new EmptyCompletionHandler<>()); collationThreads.submit(() -> { try { final VariantGraph graph = new VariantGraph(); collation.collate(graph); // CORS support response.setHeader("Access-Control-Allow-Origin", Optional.ofNullable(request.getHeader("Origin")).orElse("*")); response.setHeader("Access-Control-Allow-Methods", Optional.ofNullable(request.getHeader("Access-Control-Request-Method")) .orElse("GET, POST, HEAD, OPTIONS")); response.setHeader("Access-Control-Allow-Headers", Optional.ofNullable(request.getHeader("Access-Control-Request-Headers")) .orElse("Content-Type, Accept, X-Requested-With")); response.setHeader("Access-Control-Max-Age", "86400"); response.setHeader("Access-Control-Allow-Credentials", "true"); final String clientAccepts = Optional.ofNullable(request.getHeader(Header.Accept)).orElse(""); if (clientAccepts.contains("text/plain")) { response.setContentType("text/plain"); response.setCharacterEncoding("utf-8"); try (final Writer out = response.getWriter()) { new SimpleVariantGraphSerializer(graph).toDot(out); } response.resume(); } else if (clientAccepts.contains("application/tei+xml")) { XMLStreamWriter xml = null; try { response.setContentType("application/tei+xml"); try (OutputStream responseStream = response.getOutputStream()) { xml = XMLOutputFactory.newInstance().createXMLStreamWriter(responseStream); xml.writeStartDocument(); new SimpleVariantGraphSerializer(graph).toTEI(xml); xml.writeEndDocument(); } finally { if (xml != null) { xml.close(); } } response.resume(); } catch (XMLStreamException e) { e.printStackTrace(); } } else if (clientAccepts.contains("application/graphml+xml")) { XMLStreamWriter xml = null; try { response.setContentType("application/graphml+xml"); try (OutputStream responseStream = response.getOutputStream()) { xml = XMLOutputFactory.newInstance().createXMLStreamWriter(responseStream); xml.writeStartDocument(); new SimpleVariantGraphSerializer(graph).toGraphML(xml); xml.writeEndDocument(); } finally { if (xml != null) { xml.close(); } } response.resume(); } catch (XMLStreamException e) { e.printStackTrace(); } } else if (clientAccepts.contains("image/svg+xml")) { if (dotPath == null) { response.sendError(204); response.resume(); } else { final StringWriter dot = new StringWriter(); new SimpleVariantGraphSerializer(graph).toDot(dot); final Process dotProc = new ProcessBuilder(dotPath, "-Grankdir=LR", "-Gid=VariantGraph", "-Tsvg").start(); final StringWriter errors = new StringWriter(); CompletableFuture.allOf(CompletableFuture.runAsync(() -> { final char[] buf = new char[8192]; try (final Reader errorStream = new InputStreamReader(dotProc.getErrorStream())) { int len; while ((len = errorStream.read(buf)) >= 0) { errors.write(buf, 0, len); } } catch (IOException e) { throw new CompletionException(e); } }, processThreads), CompletableFuture.runAsync(() -> { try (final Writer dotProcStream = new OutputStreamWriter(dotProc.getOutputStream(), "UTF-8")) { dotProcStream.write(dot.toString()); } catch (IOException e) { throw new CompletionException(e); } }, processThreads), CompletableFuture.runAsync(() -> { response.setContentType("image/svg+xml"); final byte[] buf = new byte[8192]; try (final InputStream in = dotProc.getInputStream(); final OutputStream out = response.getOutputStream()) { int len; while ((len = in.read(buf)) >= 0) { out.write(buf, 0, len); } } catch (IOException e) { throw new CompletionException(e); } }, processThreads), CompletableFuture.runAsync(() -> { try { if (!dotProc.waitFor(60, TimeUnit.SECONDS)) { throw new CompletionException(new RuntimeException( "dot processing took longer than 60 seconds, process was timed out.")); } if (dotProc.exitValue() != 0) { throw new CompletionException(new IllegalStateException(errors.toString())); } } catch (InterruptedException e) { throw new CompletionException(e); } }, processThreads)).exceptionally(t -> { t.printStackTrace(); return null; }).thenRunAsync(response::resume, processThreads); } } else { response.setContentType("application/json"); try (final OutputStream responseStream = response.getOutputStream()) { JsonProcessor.write(graph, responseStream); } response.resume(); } } catch (IOException e) { // FIXME: ignored } }); }
From source file:io.pravega.controller.server.eventProcessor.ControllerEventProcessors.java
private CompletableFuture<Void> handleOrphanedReaders( final EventProcessorGroup<? extends ControllerEvent> group, final Supplier<Set<String>> processes) { return withRetriesAsync(() -> CompletableFuture.supplyAsync(() -> { try {/*from w w w . ja va2 s. c om*/ return group.getProcesses(); } catch (CheckpointStoreException e) { if (e.getType().equals(CheckpointStoreException.Type.NoNode)) { return Collections.<String>emptySet(); } throw new CompletionException(e); } }, executor), RETRYABLE_PREDICATE, Integer.MAX_VALUE, executor) .thenComposeAsync(groupProcesses -> withRetriesAsync(() -> CompletableFuture.supplyAsync(() -> { try { return new ImmutablePair<>(processes.get(), groupProcesses); } catch (Exception e) { log.error(String.format("Error fetching current processes%s", group.toString()), e); throw new CompletionException(e); } }, executor), RETRYABLE_PREDICATE, Integer.MAX_VALUE, executor)).thenComposeAsync(pair -> { Set<String> activeProcesses = pair.getLeft(); Set<String> registeredProcesses = pair.getRight(); if (registeredProcesses == null || registeredProcesses.isEmpty()) { return CompletableFuture.completedFuture(null); } if (activeProcesses != null) { registeredProcesses.removeAll(activeProcesses); } List<CompletableFuture<Void>> futureList = new ArrayList<>(); for (String process : registeredProcesses) { futureList.add(withRetriesAsync(() -> CompletableFuture.runAsync(() -> { try { group.notifyProcessFailure(process); } catch (CheckpointStoreException e) { log.error(String.format( "Error notifying failure of process=%s in event processor group %s", process, group.toString()), e); throw new CompletionException(e); } }, executor), RETRYABLE_PREDICATE, Integer.MAX_VALUE, executor)); } return FutureHelpers.allOf(futureList); }); }
From source file:io.sqp.client.impl.SqpConnectionImpl.java
CompletableFuture<Void> send(SqpMessage msg, ResponseHandler responseHandler) { // TODO: use a timeout for response handler! if (responseHandler != null) { _messageHandler.addResponseHandler(responseHandler); }/*from ww w.j a va 2s. co m*/ // TODO: optionally depend on previous future, so consecutive operations aren't executed if one fails? // as we always use the same, single threaded ExecutionService to run this send operation, all operations // are queued. This allows async-operations while assuring their order is kept return CompletableFuture.runAsync(() -> { try { if (_config.getProtocolFormat() == DataFormat.Binary) { _messageEncoder.encode(_endpoint.getSendStream(), DataFormat.Binary, msg); } else { _messageEncoder.encode(_endpoint.getSendWriter(), msg); } } catch (IOException e) { throw new CompletionException(new SqpIOException(e)); } }, _sendingService).exceptionally(new FailHandler(this)); }
From source file:io.sqp.client.impl.SqpConnectionImpl.java
CompletableFuture<Void> send(InputStream stream) { // TODO: optionally depend on previous future, so consecutive operations aren't executed if one fails? int bufSize = Math.min(MAX_MSG_BUFFER_SIZE, _session.getMaxBinaryMessageBufferSize()); return CompletableFuture.runAsync(() -> { byte[] buffer = new byte[bufSize]; int bytesRead; try (OutputStream output = _endpoint.getSendStream()) { while ((bytesRead = stream.read(buffer)) > 0) { output.write(buffer, 0, bytesRead); }// www.jav a 2 s. c om } catch (IOException e) { throw new CompletionException(new SqpIOException(e)); } }, _sendingService).exceptionally(new FailHandler(this)); }
From source file:io.sqp.client.impl.SqpConnectionImpl.java
CompletableFuture<Void> send(Reader reader) { int bufSize = Math.min(MAX_MSG_BUFFER_SIZE, _session.getMaxTextMessageBufferSize()); return CompletableFuture.runAsync(() -> { char[] buffer = new char[bufSize]; int bytesRead; try (Writer output = _endpoint.getSendWriter()) { while ((bytesRead = reader.read(buffer)) > 0) { output.write(buffer, 0, bytesRead); }/* w w w. j a v a2s. co m*/ } catch (IOException e) { throw new CompletionException(new SqpIOException(e)); } }, _sendingService).exceptionally(new FailHandler(this)); }
From source file:com.xylocore.cassandra.query.TableScanQuery.java
/** * FILLIN/*from www . ja v a 2s. co m*/ * * @param aExecutionContext * @param aParameters * * @return */ public CompletableFuture<Void> execute(TableScanQueryExecutionContext<T> aExecutionContext, Map<String, Object> aParameters) { CompletableFuture<Void> myPartitionFuture = null; if (clusteringQueryNeeded) { Consumer<List<PartitionKeyInfo>> myPartitionKeyProcessor = (myPartitionKeyInfos) -> { ClusterQueryState myState = (ClusterQueryState) myPartitionKeyInfos.get(0); CompletableFuture<Void> myClusterFuture = clusterPagedQuery.execute(aExecutionContext, myState.getPartitionKeys()); try { myClusterFuture.get(); } catch (Exception myException) { throw new CompletionException(myException); } }; PagedQueryExecutionContext<PartitionKeyInfo> myPartitionExecutionContext = PagedQueryExecutionContextBuilder .builder(PartitionKeyInfo.class).entityCreator(() -> { return new ClusterQueryState(); }).reuseEntity(true).entityExtractor(this::partitionKeyExtractor) .entityFilter(aExecutionContext.getPartitionKeyFilter()) .entityProcessor(myPartitionKeyProcessor).build(); myPartitionFuture = partitionPagedQuery.execute(myPartitionExecutionContext); } else { myPartitionFuture = standalonePagedQuery.execute(aExecutionContext); } return myPartitionFuture; }
From source file:io.pravega.controller.store.stream.PersistentStreamBase.java
@Override public CompletableFuture<TxnStatus> checkTransactionStatus(final UUID txId) { return verifyLegalState().thenCompose(v -> getTransactionEpoch(txId).handle((epoch, ex) -> { if (ex != null && ExceptionHelpers.getRealException(ex) instanceof DataNotFoundException) { return null; } else if (ex != null) { throw new CompletionException(ex); }/* w ww . j a v a 2 s. c o m*/ return epoch; }).thenCompose(x -> { if (x == null) { return getCompletedTxnStatus(txId); } else { return checkTransactionStatus(x, txId); } })); }
From source file:io.pravega.controller.store.stream.PersistentStreamBase.java
private CompletableFuture<TxnStatus> checkTransactionStatus(final int epoch, final UUID txId) { return verifyLegalState().thenCompose(v -> getActiveTx(epoch, txId).handle((ok, ex) -> { if (ex != null && ExceptionHelpers.getRealException(ex) instanceof DataNotFoundException) { return TxnStatus.UNKNOWN; } else if (ex != null) { throw new CompletionException(ex); }/*from w w w . java2 s. com*/ return ActiveTxnRecord.parse(ok.getData()).getTxnStatus(); }).thenCompose(x -> { if (x.equals(TxnStatus.UNKNOWN)) { return getCompletedTxnStatus(txId); } else { return CompletableFuture.completedFuture(x); } })); }