List of usage examples for java.util.concurrent FutureTask FutureTask
public FutureTask(Callable<V> callable)
From source file:uk.ac.kcl.tika.parsers.TesseractOCRParser.java
/** * Run external tesseract-ocr process./* ww w.ja va2s . co m*/ * * @param input * File to be ocred * @param output * File to collect ocr result * @param config * Configuration of tesseract-ocr engine * @throws TikaException * if the extraction timed out * @throws IOException * if an input error occurred */ private void doOCR(File input, File output, TesseractOCRConfig config) throws IOException, TikaException { String[] cmd = { config.getTesseractPath() + getTesseractProg(), input.getPath(), output.getPath(), "-l", config.getLanguage(), "-psm", config.getPageSegMode() }; ProcessBuilder pb = new ProcessBuilder(cmd); setEnv(config, pb); final Process process = pb.start(); process.getOutputStream().close(); InputStream out = process.getInputStream(); InputStream err = process.getErrorStream(); logStream("OCR MSG", out, input); logStream("OCR ERROR", err, input); FutureTask<Integer> waitTask = new FutureTask<Integer>(new Callable<Integer>() { public Integer call() throws Exception { return process.waitFor(); } }); Thread waitThread = new Thread(waitTask); waitThread.start(); try { waitTask.get(config.getTimeout(), TimeUnit.SECONDS); } catch (InterruptedException e) { waitThread.interrupt(); process.destroy(); Thread.currentThread().interrupt(); throw new TikaException("TesseractOCRParser interrupted", e); } catch (ExecutionException e) { // should not be thrown } catch (TimeoutException e) { waitThread.interrupt(); process.destroy(); throw new TikaException("TesseractOCRParser timeout", e); } }
From source file:com.test.test.ClientServerTest.java
@Test public void testMutliThreadProxyClient() throws Exception { // Number of threads final int size = 20; LOG.debug("clientSimple1:" + clientSimple); List<IServiceSimple> serviceSimpleList = new ArrayList<IServiceSimple>(); for (int i = 0; i < size; i++) { IServiceSimple proxyService = clientSimple.getProxy(IServiceSimple.class); LOG.debug("proxyService:" + proxyService); serviceSimpleList.add(proxyService); }//from ww w. j ava2s. com List<ClientCallable> clientCallableList = new ArrayList<ClientCallable>(); for (int i = 0; i < size; i++) { clientCallableList.add(new ClientCallable(serviceSimpleList.get(i), i)); } List<FutureTask<String>> futureTaskList = new ArrayList<FutureTask<String>>(); for (ClientCallable clientCallable : clientCallableList) { futureTaskList.add(new FutureTask<String>(clientCallable)); } long beginTime = System.currentTimeMillis(); ExecutorService executor = Executors.newFixedThreadPool(futureTaskList.size()); for (FutureTask<String> futureTask : futureTaskList) { executor.execute(futureTask); } boolean ready = false; int[] dones = new int[futureTaskList.size()]; String[] writes = new String[futureTaskList.size()]; int indexValue = 0; while (!ready) { int count = 0; indexValue = 0; for (FutureTask<String> futureTask : futureTaskList) { if (futureTask.isDone() & dones[indexValue] == 0) { writes[indexValue] = futureTask.get(); dones[indexValue] = 1; } indexValue++; } for (int k = 0; k < dones.length; k++) { if (dones[k] == 1) { count++; } } if (count == futureTaskList.size()) { ready = true; } // Thread.sleep(500); } LOG.debug("\n\n\n ====== DONE ====== "); LOG.debug(" time:" + (System.currentTimeMillis() - beginTime) + "ms\n\n"); executor.shutdown(); for (int i = 0; i < writes.length; i++) { LOG.debug("- " + writes[i]); } LOG.debug("\n\n\n ====== DONE ====== \n\n"); Thread.sleep(20000); LOG.debug("\n\n\n\n+++++++++++++++++++++++++"); LOG.debug("New system:"); IServiceSimple proxyService2 = clientSimple.getProxy(IServiceSimple.class); proxyService2.functionNumber1("1", "1"); }
From source file:org.sakaiproject.tool.impl.SessionComponentRegressionTest.java
/** * Verifies that a session created as a side-effect of * {@link SessionComponent#getCurrentSession()} is available in a * thread-scoped cache, i.e. that a call to {@link {@link SessionComponent#getCurrentSession()} * on a different, sessionless thread receives a different session. This is * distinct from {@link #testGetCurrentSessionLazilyCreatesTransientSession()} * which is concerned with whether or not lazily created "current" sessions * are findable by ID (they are <em>not</em>). * @throws TimeoutException /*from w ww . jav a2 s . c o m*/ * @throws ExecutionException * @throws InterruptedException */ public void testGetCurrentSessionCachesLazilyCreatedThreadScopedSession() throws InterruptedException, ExecutionException, TimeoutException { expectLazyCurrentSessionCreation("1234546789"); final Session session = sessionComponent.getCurrentSession(); assertNotNull("Should have allocated a new session", session); // Since we control the return value of the "gets" on threadLocalManager, // the important bit here is that the "get" expectation defined immediately // below is satisfied, much less so the "sameness" assertion following that. // The same basic point holds for the asynch further on down. checking(new Expectations() { { one(threadLocalManager).get(with(equal(SessionComponent.CURRENT_SESSION))); will(returnValue(session)); } }); assertSame("A thread should always receive the same \"current\" session", session, sessionComponent.getCurrentSession()); // other threads should get different "current" Session objects final SessionHolder sessionHolder = new SessionHolder(); expectLazyCurrentSessionCreation(sessionHolder, "987654321"); FutureTask<Session> asynchGet = new FutureTask<Session>(new Callable<Session>() { public Session call() { return sessionComponent.getCurrentSession(); } }); new Thread(asynchGet).start(); assertNotSame("Should have allocated a different \"current\" session for other thread", session, asynchGet.get(1, TimeUnit.SECONDS)); }
From source file:org.apache.kylin.provision.BuildCubeWithStream.java
public void build() throws Exception { clearSegment(cubeName);/*from w w w . j av a2 s .c om*/ new Thread(new Runnable() { @Override public void run() { SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd"); f.setTimeZone(TimeZone.getTimeZone("GMT")); long dateStart = 0; try { dateStart = f.parse("2012-01-01").getTime(); } catch (ParseException e) { } Random rand = new Random(); while (generateData == true) { long dateEnd = dateStart + 7 * 24 * 3600000; try { generateStreamData(dateStart, dateEnd, rand.nextInt(100)); dateStart = dateEnd; sleep(rand.nextInt(rand.nextInt(30)) * 1000); // wait random time } catch (Exception e) { e.printStackTrace(); } } generateDataDone = true; } }).start(); ExecutorService executorService = Executors.newCachedThreadPool(); List<FutureTask<ExecutableState>> futures = Lists.newArrayList(); for (int i = 0; i < BUILD_ROUND; i++) { if (i == (BUILD_ROUND - 1)) { // stop generating message to kafka generateData = false; int waittime = 0; while (generateDataDone == false && waittime < 100) { Thread.sleep(1000); waittime++; } if (generateDataDone == false) { throw new IllegalStateException("Timeout when wait all messages be sent to Kafka"); // ensure all messages have been flushed. } } else { Thread.sleep(30 * 1000); // wait for new messages } FutureTask futureTask = new FutureTask(new Callable<ExecutableState>() { @Override public ExecutableState call() { ExecutableState result = null; try { result = buildSegment(cubeName, 0, Long.MAX_VALUE); } catch (Exception e) { // previous build hasn't been started, or other case. e.printStackTrace(); } return result; } }); executorService.submit(futureTask); futures.add(futureTask); } generateData = false; executorService.shutdown(); int succeedBuild = 0; for (int i = 0; i < futures.size(); i++) { ExecutableState result = futures.get(i).get(20, TimeUnit.MINUTES); logger.info("Checking building task " + i + " whose state is " + result); Assert.assertTrue( result == null || result == ExecutableState.SUCCEED || result == ExecutableState.DISCARDED); if (result == ExecutableState.SUCCEED) succeedBuild++; } logger.info(succeedBuild + " build jobs have been successfully completed."); List<CubeSegment> segments = cubeManager.getCube(cubeName).getSegments(SegmentStatusEnum.READY); Assert.assertTrue(segments.size() == succeedBuild); if (fastBuildMode == false) { long endOffset = segments.get(segments.size() - 1).getSourceOffsetEnd(); //merge ExecutableState result = mergeSegment(cubeName, 0, endOffset); Assert.assertTrue(result == ExecutableState.SUCCEED); segments = cubeManager.getCube(cubeName).getSegments(); Assert.assertTrue(segments.size() == 1); CubeSegment toRefreshSeg = segments.get(0); refreshSegment(cubeName, toRefreshSeg.getSourceOffsetStart(), toRefreshSeg.getSourceOffsetEnd()); segments = cubeManager.getCube(cubeName).getSegments(); Assert.assertTrue(segments.size() == 1); } logger.info("Build is done"); }
From source file:org.apache.axis2.jaxws.server.dispatcher.ProviderDispatcher.java
public void invokeOneWay(MessageContext request) { if (log.isDebugEnabled()) { log.debug("Preparing to invoke javax.xml.ws.Provider based endpoint"); log.debug("Invocation pattern: one way"); }//from w w w.ja v a 2 s .c om initialize(request); providerInstance = getProviderInstance(); Object param = createRequestParameters(request); if (log.isDebugEnabled()) { Class providerType = getProviderType(); final Object input = providerType.cast(param); log.debug("Invoking Provider<" + providerType.getName() + ">"); if (input != null) { log.debug("Parameter type: " + input.getClass().getName()); } else { log.debug("Parameter is NULL"); } } ExecutorFactory ef = (ExecutorFactory) FactoryRegistry.getFactory(ExecutorFactory.class); Executor executor = ef.getExecutorInstance(ExecutorFactory.SERVER_EXECUTOR); // If the property has been set to disable thread switching, then we can // do so by using a SingleThreadedExecutor instance to continue processing // work on the existing thread. Boolean disable = (Boolean) request.getProperty(ServerConstants.SERVER_DISABLE_THREAD_SWITCH); if (disable != null && disable.booleanValue()) { if (log.isDebugEnabled()) { log.debug("Server side thread switch disabled. Setting Executor to the SingleThreadedExecutor."); } executor = new SingleThreadedExecutor(); } Method m = getJavaMethod(); Object[] params = new Object[] { param }; EndpointInvocationContext eic = (EndpointInvocationContext) request.getInvocationContext(); ClassLoader cl = Thread.currentThread().getContextClassLoader(); AsyncInvocationWorker worker = new AsyncInvocationWorker(m, params, cl, eic); FutureTask task = new FutureTask<AsyncInvocationWorker>(worker); executor.execute(task); return; }
From source file:org.alfresco.repo.rendition.executer.AbstractTransformationRenderingEngine.java
@Override protected void render(RenderingContext context) { ContentReader contentReader = context.makeContentReader(); // There will have been an exception if there is no content data so contentReader is not null. String sourceUrl = contentReader.getContentUrl(); String sourceMimeType = contentReader.getMimetype(); String targetMimeType = getTargetMimeType(context); // The child NodeRef gets created here TransformationOptions options = getTransformOptions(context); // Log the following getTransform() as trace so we can see the wood for the trees ContentTransformer transformer;/* www .j a v a2s .co m*/ boolean orig = TransformerDebug.setDebugOutput(false); try { transformer = this.contentService.getTransformer(sourceUrl, sourceMimeType, contentReader.getSize(), targetMimeType, options); } finally { TransformerDebug.setDebugOutput(orig); } if (null == transformer) { // There's no transformer available for the requested rendition! throw new RenditionServiceException( String.format(TRANSFORMER_NOT_EXISTS_MESSAGE_PATTERN, sourceMimeType, targetMimeType)); } if (!transformer.isTransformable(sourceMimeType, contentReader.getSize(), targetMimeType, options)) { throw new RenditionServiceException( String.format(NOT_TRANSFORMABLE_MESSAGE_PATTERN, sourceMimeType, targetMimeType)); } long startTime = new Date().getTime(); boolean actionCancelled = false; boolean actionCompleted = false; // Cache the execution summary to get details later ExecutionSummary executionSummary = null; try { executionSummary = getExecutionSummary(context); } catch (ActionServiceException e) { if (logger.isInfoEnabled()) { logger.info("Cancelling of multiple concurrent action instances " + "currently unsupported, this action can't be cancelled"); } } // Call the transform in a different thread so we can move on if cancelled FutureTask<ContentWriter> transformTask = new FutureTask<ContentWriter>(new TransformationCallable( contentReader, targetMimeType, options, context, AuthenticationUtil.getFullyAuthenticatedUser())); getExecutorService().execute(transformTask); // Start checking for cancellation or timeout while (true) { try { Thread.sleep(CANCELLED_ACTION_POLLING_INTERVAL); if (transformTask.isDone()) { actionCompleted = true; break; } // Check timeout in case transformer doesn't obey it if (options.getTimeoutMs() > 0 && new Date().getTime() - startTime > (options.getTimeoutMs() + CANCELLED_ACTION_POLLING_INTERVAL)) { // We hit a timeout, let the transform thread continue but results will be ignored if (logger.isDebugEnabled()) { logger.debug( "Transformation did not obey timeout limit, " + "rendition action is moving on"); } break; } if (executionSummary != null) { ExecutionDetails executionDetails = actionTrackingService.getExecutionDetails(executionSummary); if (executionDetails != null) { actionCancelled = executionDetails.isCancelRequested(); if (actionCancelled) { if (logger.isDebugEnabled()) { logger.debug("Cancelling transformation"); } transformTask.cancel(true); break; } } } } catch (InterruptedException e) { // entire thread was asked to stop actionCancelled = true; transformTask.cancel(true); break; } } if (actionCancelled) { throw new RenditionCancelledException("Rendition action cancelled"); } if (!actionCompleted && !actionCancelled) { throw new RenditionServiceException("Transformation failed to obey timeout limit"); } if (actionCompleted) { // Copy content from temp writer to real writer ContentWriter writer = context.makeContentWriter(); try { // We should not need another timeout here, things should be ready for us ContentWriter tempTarget = transformTask.get(); if (tempTarget == null) { // We should never be in this state, but just in case throw new RenditionServiceException("Target of transformation not present"); } writer.putContent(tempTarget.getReader().getContentInputStream()); } catch (ExecutionException e) { // Unwrap our cause and throw that Throwable transformException = e.getCause(); if (transformException instanceof RuntimeException) { throw (RuntimeException) e.getCause(); } throw new RenditionServiceException(TRANSFORMING_ERROR_MESSAGE + e.getCause().getMessage(), e.getCause()); } catch (InterruptedException e) { // We were asked to stop transformTask.cancel(true); } } }
From source file:com.intuit.tank.perfManager.workLoads.JobManager.java
private FutureTask<AgentData> sendCommand(final AgentData agent, final WatsAgentCommand cmd, final boolean retry) { FutureTask<AgentData> future = new FutureTask<AgentData>(new Callable<AgentData>() { public AgentData call() { int retries = retry ? MAX_RETRIES : 0; String url = agent.getInstanceUrl() + cmd.getPath(); while (retries >= 0) { retries--;/*from w w w. j a v a2 s . co m*/ try { LOG.info("Sending command " + cmd + "to url " + url); new URL(url).getContent(); break; } catch (Exception e) { LOG.error("Error sending command " + cmd.name() + ": " + e, e); // look up public ip if (!config.getStandalone()) { AmazonInstance amazonInstance = new AmazonInstance(null, agent.getRegion()); String dns = amazonInstance.findPublicName(agent.getInstanceId()); if (dns != null) { url = "http://" + dns + ":" + new TankConfig().getAgentConfig().getAgentPort(); } } if (retries >= 0) { try { Thread.sleep(RETRY_SLEEP); } catch (InterruptedException e1) { LOG.error("interrupted: " + e1); } continue; } return agent; } } return null; } }); executor.execute(future); return future; }
From source file:bi.meteorite.util.ITestBootstrap.java
protected String executeCommand(final String command, final Long timeout, final Boolean silent) { String response;//w ww .j av a 2 s . com final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(byteArrayOutputStream); final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class); final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err); FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() { public String call() { try { if (!silent) { System.err.println(command); } commandSession.execute(command); } catch (Exception e) { e.printStackTrace(System.err); } printStream.flush(); return byteArrayOutputStream.toString(); } }); try { executor.submit(commandFuture); response = commandFuture.get(timeout, TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(System.err); response = "SHELL COMMAND TIMED OUT: "; } return response; }
From source file:uk.co.tfd.symplectic.harvester.SymplecticFetch.java
/** * Executes the task//from w w w .j av a2s .c o m * * @param baseUrl * * @throws UnsupportedEncodingException * * @throws IOException * error processing search * @throws TransformerException * @throws TransformerFactoryConfigurationError * @throws ParserConfigurationException * @throws SAXException * @throws DOMException * @throws NoSuchAlgorithmException * @throws AtomEntryLoadException */ private void execute() throws DOMException, NoSuchAlgorithmException, UnsupportedEncodingException, IOException, SAXException, ParserConfigurationException, TransformerFactoryConfigurationError, TransformerException { ProgressTracker progress = null; try { progress = new JDBCProgressTrackerImpl(rh, limitListPages, updateLists, objectTypes, excludedRelationshipObjectTypes); } catch (SQLException e) { LOGGER.info(e.getMessage(), e); progress = new FileProgressTrackerImpl("loadstate", rh, limitListPages, updateLists, objectTypes, excludedRelationshipObjectTypes); } catch (IOException e) { LOGGER.info(e.getMessage(), e); progress = new FileProgressTrackerImpl("loadstate", rh, limitListPages, updateLists, objectTypes, excludedRelationshipObjectTypes); } // re-scan relationships to extract API objects // reScanRelationships(progress); // baseUrl + "/objects?categories=users&groups=3" progress.toload(seedQueryUrl, new APIObjects(rh, "users", progress, limitListPages, objectTypes)); // progress.toload(baseUrl+"publication", new APIObjects(rh, // "publications", progress)); int i = 0; int threadPoolSize = 20; ExecutorService executorService = Executors.newFixedThreadPool(threadPoolSize); final ConcurrentHashMap<String, FutureTask<String>> worklist = new ConcurrentHashMap<String, FutureTask<String>>(); while (i < maxUrlFetch) { Entry<String, AtomEntryLoader> next = progress.next(); if (next == null) { int startingWorklistSize = worklist.size(); while (worklist.size() > 0 && worklist.size() >= startingWorklistSize) { consumeTasks(worklist, progress); if (worklist.size() >= startingWorklistSize) { try { Thread.sleep(500); } catch (InterruptedException e) { } } } if (!progress.hasPending() && worklist.size() == 0) { break; // there are none left to come, the workers are empty, and so is pending } } else { final AtomEntryLoader loader = next.getValue(); final String key = next.getKey(); FutureTask<String> task = new FutureTask<String>(new Callable<String>() { @Override public String call() throws Exception { try { loader.loadEntry(key); } catch (Exception e) { LOGGER.error(e.getMessage(), e); } return "Done Loading " + key; } }); worklist.put(key, task); executorService.execute(task); i++; // dont overfill the queue while (worklist.size() > threadPoolSize * 2) { consumeTasks(worklist, progress); if (worklist.size() > threadPoolSize) { try { Thread.sleep(500); } catch (InterruptedException e) { } } } } } while (worklist.size() > 0) { consumeTasks(worklist, progress); Thread.yield(); } executorService.shutdown(); LOGGER.info("End ToDo list contains {} urls ", progress.pending()); progress.dumpLoaded(); progress.checkpoint(); }
From source file:SwingWorker.java
/** * Constructs this {@code SwingWorker}.//from w w w. ja v a2s . co m */ public SwingWorker() { Callable<T> callable = new Callable<T>() { public T call() throws Exception { setState(StateValue.STARTED); return doInBackground(); } }; future = new FutureTask<T>(callable) { @Override protected void done() { doneEDT(); setState(StateValue.DONE); } }; state = StateValue.PENDING; propertyChangeSupport = new SwingPropertyChangeSupport(this, true); doProcess = null; doNotifyProgressChange = null; }