List of usage examples for java.util.concurrent CancellationException CancellationException
public CancellationException()
From source file:org.gitools.ui.app.fileimport.wizard.text.CommandConvertAndLoadCsvFile.java
@Override protected IResource loadResource(IProgressMonitor progressMonitor) { try {//from w ww . j a v a 2 s .co m reader.setPreviewMode(false); if (reader.getFileHeaders().size() < 3) { throw new PersistenceException("At least 3 fields expected on one line."); } ReaderAssistant assistant = reader.getReaderAssistant(); MatrixLayer[] layers = assistant.getHeatmapLayers(); HashMatrix resultsMatrix = new HashMatrix(new MatrixLayers<>(layers), ROWS, COLUMNS); // read the file body while ((reader.readNext())) { if (progressMonitor.isCancelled()) { throw new CancellationException(); } // Write the read line into the Matrix assistant.fillMatrix(resultsMatrix); } reader.close(); return resultsMatrix; } catch (Exception e) { throw new PersistenceException(e); } finally { IOUtils.closeQuietly(reader); } }
From source file:com.diversityarrays.kdxplore.trialdesign.JobRunningTask.java
@Override public Either<String, AlgorithmRunResult> generateResult(Closure<Void> arg0) throws Exception { AlgorithmRunResult result = new AlgorithmRunResult(algorithmName, algorithmFolder); ProcessBuilder pb = new ProcessBuilder(command); File tempAlgorithmOutputFile = new File(algorithmFolder, "stdout.txt"); File tempAlgorithmErrorFile = new File(algorithmFolder, "stderr.txt"); //pb.redirectErrorStream(true); tempAlgorithmErrorFile.createNewFile(); tempAlgorithmOutputFile.createNewFile(); pb.redirectOutput(tempAlgorithmOutputFile); pb.redirectError(tempAlgorithmErrorFile); Process process = pb.start(); while (!process.waitFor(1000, TimeUnit.MILLISECONDS)) { if (backgroundRunner.isCancelRequested()) { process.destroy();/*from w w w.j av a 2 s . co m*/ throw new CancellationException(); } } int exitCode = process.exitValue(); if (exitCode != 0) { String errtxt = Algorithms.readContent("Error Output: (code=" + exitCode + ")", new FileInputStream(tempAlgorithmErrorFile)); return Either.left(errtxt); } if (!kdxploreOutputFile.exists()) { return Either.left("Missing output file: " + kdxploreOutputFile.getPath()); } result.addTrialEntries(kdxploreOutputFile, userTrialEntries); return Either.right(result); }
From source file:org.springside.modules.utils.concurrent.type.BasicFuture.java
private T getResult() throws ExecutionException { if (this.ex != null) { throw new ExecutionException(this.ex); }// w w w. j a v a 2 s .c o m if (cancelled) { throw new CancellationException(); } return this.result; }
From source file:org.diorite.impl.scheduler.DioriteFuture.java
@Override public synchronized T get(long timeout, final TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { timeout = unit.toMillis(timeout);//ww w .j ava2 s.c o m long period = this.getPeriod(); long timestamp = (timeout > 0) ? System.currentTimeMillis() : 0; while (true) { if ((period == STATE_SINGLE) || (period == STATE_FUTURE)) { this.wait(timeout); period = this.getPeriod(); if ((period == -STATE_SINGLE) || (period == STATE_FUTURE)) { if (timeout == 0) { continue; } timeout += timestamp - (timestamp = System.currentTimeMillis()); if (timeout > 0) { continue; } throw new TimeoutException(); } } if (period == -STATE_CANCEL) { throw new CancellationException(); } if (period == STATE_FUTURE_DONE) { if (this.exception == null) { return this.value; } throw new ExecutionException(this.exception); } throw new IllegalStateException("Expected from -1 to -4, but got " + period); } }
From source file:org.gitools.analysis.clustering.hierarchical.HierarchicalClusterer.java
public HierarchicalCluster cluster(IMatrix matrix, IMatrixLayer<Double> layer, IMatrixDimension clusterDimension, IMatrixDimension aggregationDimension, IProgressMonitor monitor) { Map<String, HierarchicalCluster> clusters = new HashMap<>(clusterDimension.size()); SortedSet<ClusterPair> linkages = new ConcurrentSkipListSet<>(); // Aggregate all the values to sort the clusters by weight monitor.begin("Aggregating values...", clusterDimension.size()); final Map<String, Double> aggregation = new HashMap<>(clusterDimension.size()); Set<String> allNullValues = new HashSet<>(); IMatrixPosition position = matrix.newPosition(); for (String id : position.iterate(clusterDimension)) { Double value = aggregator.aggregate(position.iterate(layer, aggregationDimension)); if (value != null) { aggregation.put(id, value);//from w ww. ja v a 2s .c o m } else { allNullValues.add(id); } } // First sort the clustering dimension to show the clusters ordered by weight at the end if (clusterDimension instanceof IMatrixViewDimension) { IMatrixViewDimension sortDimension = (IMatrixViewDimension) clusterDimension; sortDimension.sort(new Comparator<String>() { @Override public int compare(String o1, String o2) { return SortDirection.ASCENDING.compare(aggregation.get(o1), aggregation.get(o2)); } }); } // Calculate all the distances IMatrixPosition position1 = matrix.newPosition(); IMatrixPosition position2 = matrix.newPosition(); monitor.begin("Calculating distances...", clusterDimension.size()); for (String id1 : position1.iterate(clusterDimension)) { // Check user cancel action monitor.worked(1); if (monitor.isCancelled()) { throw new CancellationException(); } // Skip all null values if (allNullValues.contains(id1)) { continue; } HierarchicalCluster cluster1 = newCluster(clusters, id1); cluster1.setWeight(aggregation.get(id1)); for (String id2 : position2.iterate(clusterDimension.from(id1))) { // Skip equal ids if (id1.equals(id2)) continue; // Skip all null columns if (allNullValues.contains(id2)) { continue; } Double distance = measure.compute(position1.iterate(layer, aggregationDimension), position2.iterate(layer, aggregationDimension)); HierarchicalCluster cluster2 = newCluster(clusters, id2); linkages.add(new ClusterPair(distance, cluster1, cluster2)); } } // Create the clusters agglomerating nodes by the nearest distances HierarchyBuilder builder = new HierarchyBuilder(newHashSet(clusters.values()), linkages); builder.agglomerate(linkageStrategy, monitor, clusterDimension.size()); // Set cluster names ordered by weight HierarchicalCluster root = builder.getRootCluster(); root.setName(""); Color color = nameClusters(root.getChildren(), 1); root.setColor(color.getRGB()); root.setName("root"); return root; }
From source file:org.gitools.matrix.format.TdmMatrixFormat.java
@Override protected IMatrix readResource(IResourceLocator resourceLocator, IProgressMonitor progressMonitor) throws PersistenceException { try {/*from w w w . jav a 2s.c om*/ MTabixIndex index = readMtabixIndex(resourceLocator, progressMonitor); InputStream in = resourceLocator.openInputStream(progressMonitor); CSVReader parser = new CSVReader(new InputStreamReader(in)); String[] header = parser.readNext(); if (header.length < 3) { throw new PersistenceException("At least 3 fields expected on one line."); } MatrixLayer<Double> layers[] = new MatrixLayer[header.length - 2]; for (int i = 2; i < header.length; i++) { layers[i - 2] = new MatrixLayer<>(header[i], Double.class); } if (index != null) { in.close(); return new MTabixMatrix(index, new MatrixLayers<MatrixLayer>(layers), ROWS, COLUMNS); } // Load all the matrix into memory HashMatrix resultsMatrix = new HashMatrix(new MatrixLayers<MatrixLayer>(layers), ROWS, COLUMNS); // read body String fields[]; while ((fields = parser.readNext()) != null) { if (progressMonitor.isCancelled()) { throw new CancellationException(); } checkLine(fields, header, parser.getLineNumber()); final String columnId = fields[0]; final String rowId = fields[1]; for (int i = 2; i < fields.length; i++) { Double value = DoubleTranslator.get().stringToValue(fields[i]); resultsMatrix.set(layers[i - 2], value, rowId, columnId); } } in.close(); return resultsMatrix; } catch (Exception e) { throw new PersistenceException(e); } }
From source file:com.verigreen.common.concurrency.timeboundedexecuter.TimeBoundedExecuter.java
private <T> T loopWhileStillNotFinished(final Action<T> actionDelegate, long timeBoundInMillis, Future<T> future, TimeBoundedPolicy policy) { int times = 1; while (true) { try {//from ww w. java 2 s . com return future.get(timeBoundInMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // was interrupted; should cancel the execution. future.cancel(true); throw new CancellationException(); } catch (ExecutionException e) { // execution ended with an exception. _logger.error("Catastrophic failure when executing a TimeBoundedThread! Exception details: " + e.toString(), e); throw new RuntimeException(e); } catch (TimeoutException e) { // timed out reportAndActAccordingToTimeoutOption(actionDelegate, timeBoundInMillis, future, times, policy); times += 1; } catch (CancellationException e) { // was canceled throw e; } } }
From source file:com.ning.http.client.providers.apache.ApacheResponseFuture.java
public boolean cancel(boolean mayInterruptIfRunning) { if (!cancelled.get() && innerFuture != null) { method.abort();/* w ww . ja va 2 s .co m*/ try { asyncHandler.onThrowable(new CancellationException()); } catch (Throwable t) { logger.debug("asyncHandler.onThrowable", t); } cancelled.set(true); if (reaperFuture != null) { reaperFuture.cancel(true); } super.done(); return innerFuture.cancel(mayInterruptIfRunning); } else { super.done(); return false; } }
From source file:cn.ctyun.amazonaws.services.s3.transfer.internal.UploadMonitor.java
/** * Polls for a result from a multipart upload and either returns it if * complete, or reschedules to poll again later if not. *//* w w w.j ava 2 s. c om*/ private UploadResult poll() throws InterruptedException { for (Future<PartETag> f : futures) { if (!f.isDone()) { reschedule(); return null; } } for (Future<PartETag> f : futures) { if (f.isCancelled()) { throw new CancellationException(); } } return completeMultipartUpload(); }
From source file:com.amazonaws.services.simpleworkflow.flow.worker.WorkflowClockImpl.java
void handleTimerCanceled(HistoryEvent event) { TimerCanceledEventAttributes attributes = event.getTimerCanceledEventAttributes(); String timerId = attributes.getTimerId(); if (decisions.handleTimerCanceled(event)) { OpenRequestInfo<?, ?> scheduled = scheduledTimers.remove(timerId); if (scheduled != null) { ExternalTaskCompletionHandle completionHandle = scheduled.getCompletionHandle(); CancellationException exception = new CancellationException(); completionHandle.fail(exception); }/*w w w . java2 s . c o m*/ } else { log.debug("handleTimerCanceled not complete"); } }