List of usage examples for java.util.concurrent ExecutionException toString
public String toString()
From source file:com.streamsets.pipeline.stage.destination.hdfs.writer.DefaultFsHelper.java
@Override public Path getPath(FileSystem fs, Date recordDate, Record record) throws StageException, IOException { // runUuid is fixed for the current pipeline run. it avoids collisions with other SDCs running the same/similar // pipeline/*from w w w. j a v a 2 s . c o m*/ try { return dirPathCache.get(recordWriterManager.getDirPath(recordDate, record)); } catch (ExecutionException ex) { if (ex.getCause() instanceof StageException) { throw (StageException) ex.getCause(); } else { throw new StageException(Errors.HADOOPFS_24, ex.toString(), ex); } } }
From source file:it.anyplace.sync.discovery.utils.AddressRanker.java
private void testAndRankAndWait() throws InterruptedException { logger.trace("testing and ranking peer addresses"); List<Future<DeviceAddress>> futures = Lists.newArrayList(); for (final DeviceAddress deviceAddress : preprocessDeviceAddresses(sourceAddresses)) { futures.add(executorService.submit(new Callable<DeviceAddress>() { @Override//from ww w . j ava 2s . com public DeviceAddress call() { return testAndRank(deviceAddress); } })); } for (Future<DeviceAddress> future : futures) { try { DeviceAddress deviceAddress = future.get(TCP_CONNECTION_TIMEOUT * 2, TimeUnit.MILLISECONDS); if (deviceAddress != null) { targetAddresses.add(deviceAddress); } } catch (ExecutionException ex) { throw new RuntimeException(ex); } catch (TimeoutException ex) { logger.warn("test address timeout : {}", ex.toString()); } } Collections.sort(targetAddresses, Ordering.natural().onResultOf(new Function<DeviceAddress, Comparable>() { @Override public Comparable apply(DeviceAddress a) { return a.getScore(); } })); }
From source file:com.streamsets.pipeline.stage.processor.kv.redis.RedisLookupProcessor.java
private void doBatchLookup(Batch batch, BatchMaker batchMaker) throws StageException { List<Map<String, Pair<String, DataType>>> mapList = getMap(batch); Set<Pair<String, DataType>> keys = getKeys(mapList); Iterator<Record> records; Map<Pair<String, DataType>, LookupValue> values; try {/*from ww w . j a va2 s . c o m*/ values = cache.getAll(keys); } catch (ExecutionException e) { LOG.error("Failed to fetch values from cache: {}", e.toString(), e); // Send whole batch to error records = batch.getRecords(); while (records.hasNext()) { Record record = records.next(); error.onError(new OnRecordErrorException(record, Errors.LOOKUP_02, e.toString())); } return; } records = batch.getRecords(); Record record; int index = 0; Map<String, Pair<String, DataType>> map; while (records.hasNext()) { record = records.next(); map = mapList.get(index); // Now we have to get the key for each key configuration for (RedisLookupParameterConfig parameters : conf.lookups) { Pair<String, DataType> key = map.get(parameters.outputFieldPath); LookupValue value = values.get(key); updateRecord(value, parameters.outputFieldPath, record); } batchMaker.addRecord(record); index++; } }
From source file:com.googlecode.concurrentlinkedhashmap.MultiThreadedTest.java
private void executeWithTimeOut(ConcurrentLinkedHashMap<?, ?> map, Callable<Long> task) { ExecutorService es = Executors.newSingleThreadExecutor(); Future<Long> future = es.submit(task); try {/*www . j av a 2 s . c o m*/ long timeNS = future.get(timeOut, SECONDS); debug("\nExecuted in %d second(s)", NANOSECONDS.toSeconds(timeNS)); assertThat(map, is(valid())); } catch (ExecutionException e) { fail("Exception during test: " + e.toString(), e); } catch (TimeoutException e) { handleTimout(map, es, e); } catch (InterruptedException e) { fail("", e); } }
From source file:com.streamsets.datacollector.stagelibrary.ClassLoaderStageLibraryTask.java
@Override public List<StageDefinition> getStages() { try {/*from w w w . j ava 2s . c o m*/ return (LocaleInContext.get() == null) ? stageList : localizedStageList.get(LocaleInContext.get()); } catch (ExecutionException ex) { LOG.warn("Error loading locale '{}', {}", LocaleInContext.get(), ex.toString(), ex); return stageList; } }
From source file:fr.inria.lille.repair.nopol.NoPol.java
/** * Method used as proxy for runNopolProcessor to handle timeout */// w w w. j av a 2 s. c o m private List<Patch> executeNopolProcessor(final List<TestResult> tests, final SourceLocation sourceLocation, final SpoonedClass spoonCl, final NopolProcessor nopolProcessor) { final ExecutorService executor = Executors.newSingleThreadExecutor(); final Future nopolExecution = executor.submit(new Callable() { @Override public Object call() throws Exception { return runNopolProcessor(tests, sourceLocation, spoonCl, nopolProcessor); } }); try { executor.shutdown(); return (List) nopolExecution.get(nopolContext.getMaxTimeEachTypeOfFixInMinutes(), TimeUnit.MINUTES); } catch (ExecutionException exception) { LoggerFactory.getLogger(this.getClass()).error("Error ExecutionException " + exception.toString()); return Collections.emptyList(); } catch (InterruptedException execption) { LoggerFactory.getLogger(this.getClass()).error("Repair interrupted"); return Collections.emptyList(); } catch (TimeoutException exception) { LoggerFactory.getLogger(this.getClass()).error("Timeout: execution time > " + nopolContext.getMaxTimeEachTypeOfFixInMinutes() + " " + TimeUnit.MINUTES, exception); return Collections.emptyList(); } }
From source file:at.itbh.bev.rest.client.BevRestClient.java
/** * Query the ReST endpoint using the command line arguments * /*from ww w. j a v a2 s . c o m*/ * @param args * the command line arguments */ public void query(String[] args) { BevQueryExecutor executor = null; ResteasyClientBuilder clientBuilder = new ResteasyClientBuilder(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); int threadPoolSize = 1; if (line.hasOption("t")) { threadPoolSize = Integer.parseInt(line.getOptionValue("t")); } String postalCode = null; String place = null; String addressLine = null; String houseId = null; String radius = null; String longitude = null; String latitude = null; String separator = ";"; boolean enforceUnique = false; if (line.hasOption("z")) { postalCode = line.getOptionValue("z"); } if (line.hasOption("p")) { place = line.getOptionValue("p"); } if (line.hasOption("a")) { addressLine = line.getOptionValue("a"); } if (line.hasOption("i")) { houseId = line.getOptionValue("i"); } if (line.hasOption("radius")) { radius = line.getOptionValue("radius"); } if (line.hasOption("longitude")) { longitude = line.getOptionValue("longitude"); } if (line.hasOption("latitude")) { latitude = line.getOptionValue("latitude"); } if (line.hasOption("s")) { separator = line.getOptionValue("s"); } if (line.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar BevRestClient.jar", options, true); System.exit(0); } if (line.hasOption("u")) { enforceUnique = true; } if (line.hasOption("disable-certificate-validation")) { clientBuilder.disableTrustManager(); } if (!line.hasOption("proxy-port") && line.hasOption("proxy-host")) { throw new ParseException( "The option --proxy-host is only allowed in combination with the option --proxy-port."); } if (line.hasOption("proxy-port") && !line.hasOption("proxy-host")) { throw new ParseException( "The option --proxy-port is only allowed in combination with the option --proxy-host."); } if (line.hasOption("proxy-host") && line.hasOption("proxy-port")) { clientBuilder.defaultProxy(line.getOptionValue("proxy-host"), Integer.parseInt(line.getOptionValue("proxy-port"))); } OutputStreamWriter output; if (line.hasOption("o")) { output = new OutputStreamWriter(new FileOutputStream(line.getOptionValue("o"))); } else { output = new OutputStreamWriter(System.out); } // avoid concurrent access exceptions in the Apache http client clientBuilder.connectionPoolSize(threadPoolSize); executor = new BevQueryExecutor(clientBuilder.build(), line.getOptionValue("r"), threadPoolSize); CsvPreference csvPreference = new CsvPreference.Builder('"', Objects.toString(line.getOptionValue("s"), ";").toCharArray()[0], System.getProperty("line.separator")).build(); csvWriter = new CsvMapWriter(output, csvPreference, true); if (line.hasOption("b")) { ICsvMapReader mapReader = null; try { FileReader fileReader = new FileReader(line.getOptionValue("b")); mapReader = new CsvMapReader(fileReader, csvPreference); // calculate the output header (field names) final String[] header = mapReader.getHeader(true); ArrayList<String> tempFields = new ArrayList<>(Arrays.asList(defaultFieldNames)); for (int i = 0; i < header.length; i++) { if (!tempFields.contains(header[i])) { tempFields.add(header[i]); } } fieldNames = tempFields.toArray(new String[] {}); Map<String, String> inputData; List<Future<List<BevQueryResult>>> queryResults = new ArrayList<>(); while ((inputData = mapReader.read(header)) != null) { queryResults .add(executor.query(inputData.get(INPUT_POSTAL_CODE), inputData.get(INPUT_PLACE), inputData.get(INPUT_ADDRESS_LINE), inputData.get(INPUT_HOUSE_ID), inputData.get(INPUT_LONGITUDE), inputData.get(INPUT_LATITUDE), inputData.get(INPUT_RADIUS), inputData.get(INPUT_ENFORCE_UNIQUE) == null ? false : Boolean.parseBoolean(inputData.get(INPUT_ENFORCE_UNIQUE)), inputData)); } csvWriter.writeHeader(fieldNames); for (Future<List<BevQueryResult>> queryResult : queryResults) { List<BevQueryResult> results = queryResult.get(); outputResults(separator, results); } } finally { if (mapReader != null) { mapReader.close(); } } } else { fieldNames = defaultFieldNames; Map<String, String> inputData = new HashMap<String, String>(); Future<List<BevQueryResult>> queryResult = executor.query(postalCode, place, addressLine, houseId, longitude, latitude, radius, enforceUnique, inputData); try { List<BevQueryResult> results = queryResult.get(); if (enforceUnique && results.size() == 1) { if (!results.get(0).getFoundMatch()) throw new Exception("No unique result found."); } outputResults(separator, results); } catch (ExecutionException e) { throw e.getCause(); } } } catch (ParseException exp) { System.out.println(exp.getMessage()); System.out.println(); // automatically generate the help statement HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar BevRestClient.jar", options, true); System.exit(-2); } catch (BevRestException e) { System.err.println(e.toString()); System.exit(e.getErrorCode()); } catch (JsonProcessingException e) { System.err.println(e.toString()); System.exit(-3); } catch (IOException e) { System.err.println(e.toString()); System.exit(-4); } catch (Throwable t) { System.err.println(t.toString()); t.printStackTrace(); System.exit(-1); } finally { if (csvWriter != null) { try { csvWriter.close(); } catch (IOException e) { e.printStackTrace(); System.exit(-1); } } if (executor != null) { executor.dispose(); } } }
From source file:com.dilmus.dilshad.scabi.core.async.DComputeAsyncRun.java
void get() { HttpResponse httpResponse = null;/* w w w . ja v a2 s. c o m*/ String result = null; if (m_futureHttpResponse != null) { try { httpResponse = DComputeNoBlock.get(m_futureHttpResponse); result = DComputeNoBlock.getResult(httpResponse); log.debug("get() result : {}", result); m_computeNB.decCountRequests(); synchronized (m_config) { m_config.setResult(m_SU, result); } m_futureHttpResponse = null; } catch (ExecutionException e) { if (e.getCause() != null) { if (e.getCause() instanceof ConnectException || e.getCause() instanceof ConnectionClosedException || e.getCause() instanceof IllegalStateException || e.getCause() instanceof SocketException || e.getCause() instanceof NoHttpResponseException) { log.debug("get() e.getCause() : {}", e.getCause().toString()); log.debug("get() Exception : {}", e.toString()); // computeNB is faulty only in the case of Network Exception/ConnectException m_computeNB.setFaulty(true); result = DMJson.error(DMUtil.clientErrMsg(e)); // m_isError is set only in the case of network exception // only in this case retry has to be attempted if maxRetry > 0 is set by User m_isError = true; m_computeNB.decCountRequests(); synchronized (m_config) { m_config.appendResult(m_SU, result); } m_futureHttpResponse = null; } else { log.debug("get() ExecutionException : {}", e.toString()); // m_computeNB is faulty only in the case of ClientProtocolException/NetworkException which // is already handled above // // // m_computeNB.setFaulty(true); log.debug("run() m_SU : {}", m_SU); String errorJson = DMJson.error(DMUtil.clientErrMsg(e)); m_computeNB.decCountRequests(); synchronized (m_config) { m_config.appendResult(m_SU, errorJson); } m_futureHttpResponse = null; } // End if } else { log.debug("get() ExecutionException : {}", e.toString()); // m_computeNB is faulty only in the case of ClientProtocolException/NetworkException which // is already handled above // // // m_computeNB.setFaulty(true); log.debug("run() m_SU : {}", m_SU); String errorJson = DMJson.error(DMUtil.clientErrMsg(e)); m_computeNB.decCountRequests(); synchronized (m_config) { m_config.appendResult(m_SU, errorJson); } m_futureHttpResponse = null; } // End if } catch (InterruptedException | TimeoutException | ParseException | IOException e) { //e.printStackTrace(); log.debug("get() Exception : {}", e.toString()); // m_computeNB is faulty only in the case of ClientProtocolException/NetworkException which // is already handled above // // // m_computeNB.setFaulty(true); log.debug("run() m_SU : {}", m_SU); result = DMJson.error(DMUtil.clientErrMsg(e)); m_computeNB.decCountRequests(); synchronized (m_config) { m_config.appendResult(m_SU, result); } m_futureHttpResponse = null; } // try-catch } else { //log.debug("get() Inside else block (null == futureHttpResponse)"); result = DMJson.error( "DComputeAsyncRun:get() Client Side Issue : get() crun futureHttpResponse is null. Split no. : " + m_SU); m_computeNB.decCountRequests(); synchronized (m_config) { //if (false == config.isResultSet(crun.getSU())) m_config.appendResult(m_SU, result); } // if futureHttpResponse is null, exception should have been caught in crun.run() above // TODO check if setError() on crun here is needed to enable retry for this crun } // End if m_isDone = true; m_isRetrySubmitted = false; if (false == m_isRunOnce) { m_isRunOnce = true; } }