List of usage examples for java.util.concurrent ExecutorService execute
void execute(Runnable command);
From source file:com.wavemaker.tools.apidocs.tools.spring.SpringSwaggerParserTest.java
@Test public void testMultiThread() throws InterruptedException { ExecutorService service = Executors.newFixedThreadPool(4); List<Class<?>> controllerClasses = new ArrayList<>(); controllerClasses.add(VacationController.class); controllerClasses.add(UserController.class); controllerClasses.add(DepartmentController.class); for (final Class<?> controllerClass : controllerClasses) { service.execute(new Runnable() { public void run() { Swagger swagger;/*ww w .j a v a 2 s .c o m*/ try { swagger = runForSingleClass(controllerClass); } catch (SwaggerParserException e) { throw new RuntimeException("Exception while parsing class:" + controllerClass.getName(), e); } Assert.assertNotNull(swagger); assertEquals(1, swagger.getTags().size()); assertEquals(controllerClass.getName(), swagger.getTags().get(0).getFullyQualifiedName()); try { writeToFile(swagger, "class_" + controllerClass.getSimpleName() + ".json"); } catch (IOException e) { throw new RuntimeException("Error while writing to file", e); } } }); } service.shutdown(); service.awaitTermination(10, TimeUnit.SECONDS); }
From source file:info.pancancer.arch3.test.TestWorker.java
@Test public void testWorker_endlessFromConfig() throws Exception { HierarchicalINIConfiguration configObj = new HierarchicalINIConfiguration(); configObj.addProperty("rabbit.rabbitMQQueueName", "seqware"); configObj.addProperty("rabbit.rabbitMQHost", "localhost"); configObj.addProperty("rabbit.rabbitMQUser", "guest"); configObj.addProperty("rabbit.rabbitMQPass", "guest"); configObj.addProperty("worker.heartbeatRate", "2.5"); configObj.addProperty("worker.max-runs", "1"); configObj.addProperty("worker.preworkerSleep", "1"); configObj.addProperty("worker.postworkerSleep", "1"); configObj.addProperty("worker.endless", "true"); configObj.addProperty("worker.hostUserName", System.getProperty("user.name")); byte[] body = setupMessage(); Delivery testDelivery = new Delivery(mockEnvelope, mockProperties, body); setupMockQueue(testDelivery);//from w w w . j ava 2 s .co m Mockito.when(Utilities.parseConfig(anyString())).thenReturn(configObj); //Because the code that does cleanup in calls resultHandler.waitFor(); we need to actually execute something, even if it does nothing. Mockito.doNothing().when(mockExecutor).execute(any(CommandLine.class), any(DefaultExecuteResultHandler.class)); // This is to mock the cleanup command - we don't really want to execute the command for deleting contents of /datastore, at least not when unit testing on a workstation! PowerMockito.whenNew(DefaultExecutor.class).withNoArguments().thenReturn(mockExecutor); Mockito.when(mockExecHandler.hasResult()).thenReturn(true); PowerMockito.whenNew(DefaultExecuteResultHandler.class).withNoArguments().thenReturn(mockExecHandler); final FutureTask<String> tester = new FutureTask<>(new Callable<String>() { @Override public String call() { LOG.info("tester thread started"); try { Worker.main(new String[] { "--config", "src/test/resources/workerConfig.ini", "--uuid", "vm123456", "--pidFile", "/var/run/arch3_worker.pid" }); } catch (CancellationException | InterruptedException e) { LOG.error("Exception caught: " + e.getMessage()); return e.getMessage(); } catch (Exception e) { e.printStackTrace(); fail("Unexpected exception"); return null; } finally { Mockito.verify(mockAppender, Mockito.atLeastOnce()).doAppend(argCaptor.capture()); String s = appendEventsIntoString(argCaptor.getAllValues()); return s; } } }); final Thread killer = new Thread(new Runnable() { @Override public void run() { LOG.info("killer thread started"); try { // The endless worker will not end on its own (because it's endless) so we need to wait a little bit (0.5 seconds) and // then kill it as if it were killed by the command-line script (kill_worker_daemon.sh). Thread.sleep(2500); } catch (InterruptedException e) { e.printStackTrace(); LOG.error(e.getMessage()); } tester.cancel(true); } }); ExecutorService es = Executors.newFixedThreadPool(2); es.execute(tester); es.execute(killer); try { tester.get(); } catch (CancellationException e) { Mockito.verify(mockAppender, Mockito.atLeastOnce()).doAppend(argCaptor.capture()); List<LoggingEvent> tmpList = new LinkedList<LoggingEvent>(argCaptor.getAllValues()); String output = this.appendEventsIntoString(tmpList); assertTrue("--endless flag was detected and set", output.contains("The \"--endless\" flag was set, this worker will run endlessly!")); int numJobsPulled = StringUtils.countMatches(output, " WORKER IS PREPARING TO PULL JOB FROM QUEUE "); LOG.info("Number of jobs attempted: " + numJobsPulled); assertTrue("number of jobs attempted > 1", numJobsPulled > 1); } catch (Exception e) { e.printStackTrace(); fail(); } }
From source file:org.commonjava.util.partyline.JoinableFileTest.java
@Test public void writeToFile() throws Exception { final ExecutorService execs = Executors.newFixedThreadPool(1); final CountDownLatch latch = new CountDownLatch(1); final File tempFile = temp.newFile(); String threadName = "writer" + writers++; final JoinableFile stream = new JoinableFile(tempFile, new LockOwner(tempFile.getAbsolutePath(), name.getMethodName(), LockLevel.write), true); execs.execute(() -> { Thread.currentThread().setName(threadName); new TimedFileWriter(stream, 0, latch).run(); });// w w w .ja v a 2s . co m System.out.println("Waiting for " + name.getMethodName() + " threads to complete."); latch.await(); final File file = new File(stream.getPath()); System.out.println("File length: " + file.length()); final List<String> lines = FileUtils.readLines(file); System.out.println(lines); assertThat(lines.size(), equalTo(COUNT)); }
From source file:org.codice.pubsub.stomp.SubscriptionQueryMessageListener.java
public void initialize() { //Starts this class in a thread ExecutorService executor = Executors.newFixedThreadPool(NTHREDS); SubscriptionQueryMessageListener lsnr = new SubscriptionQueryMessageListener(catalogFramework, bundleContext, stompHost, stompPort, destTopicName, queryAndSend); Runnable worker = lsnr;//from w w w.j a va 2 s . c o m executor.execute(worker); }
From source file:org.commonjava.util.partyline.JoinableFileManagerTest.java
@Test public void twoFileReaders_CleanupFileEntryOnLastClose() throws Exception { String src = "This is a test"; File f = temp.newFile();//from w w w. j a va2 s . c o m FileUtils.write(f, src); int count = 2; CountDownLatch start = new CountDownLatch(count); CountDownLatch end = new CountDownLatch(count); Logger logger = LoggerFactory.getLogger(getClass()); ExecutorService executor = Executors.newCachedThreadPool(); for (int i = 0; i < count; i++) { logger.info("Starting: {}", i); executor.execute(() -> { logger.info("Signaling thread: {} has started", Thread.currentThread().getName()); start.countDown(); try { logger.info("Waiting for other thread(s) to start..."); start.await(3, TimeUnit.SECONDS); assertThat("Threads did not start correctly!", start.getCount(), equalTo(0L)); logger.info("Opening: {}", f); try (InputStream in = mgr.openInputStream(f)) { assertThat(IOUtils.toString(in), equalTo(src)); } catch (IOException e) { e.printStackTrace(); fail("Cannot open: " + f); } } catch (InterruptedException e) { e.printStackTrace(); fail("Interrupted"); } finally { logger.info("Signaling thread: {} has ended", Thread.currentThread().getName()); end.countDown(); } }); } logger.info("Waiting for end of threads"); end.await(5, TimeUnit.SECONDS); assertThat("Threads did not end correctly!", end.getCount(), equalTo(0L)); AtomicInteger counter = new AtomicInteger(0); mgr.getFileTree().forAll(entry -> true, entry -> counter.incrementAndGet()); assertThat("FileEntry instance was not removed after closing!", counter.get(), equalTo(0)); }
From source file:org.commonjava.util.partyline.JoinableFileTest.java
@Test public void joinFileWriteContinueAfterInputStreamClose() throws Exception { final ExecutorService execs = Executors.newFixedThreadPool(1); final CountDownLatch latch = new CountDownLatch(1); final File tempFile = temp.newFile(); String threadName = "writer" + writers++; final JoinableFile stream = new JoinableFile(tempFile, new LockOwner(tempFile.getAbsolutePath(), name.getMethodName(), LockLevel.write), true); execs.execute(() -> { Thread.currentThread().setName(threadName); new TimedFileWriter(stream, 1, latch).run(); });/* ww w. j av a 2 s.co m*/ InputStream inStream = stream.joinStream(); InputStream inStream2 = stream.joinStream(); Thread.sleep(1000); inStream.close(); inStream2.close(); System.out.println( "All input stream closed. Waiting for " + name.getMethodName() + " writer thread to complete."); latch.await(); final File file = new File(stream.getPath()); System.out.println("File length: " + file.length()); final List<String> lines = FileUtils.readLines(file); System.out.println(lines); assertThat(lines.size(), equalTo(COUNT)); }
From source file:com.linkedin.pinot.tools.admin.command.CreateSegmentCommand.java
@Override public boolean execute() throws Exception { LOGGER.info("Executing command: {}", toString()); // Load generator config if exist. final SegmentGeneratorConfig segmentGeneratorConfig; if (_generatorConfigFile != null) { segmentGeneratorConfig = new ObjectMapper().readValue(new File(_generatorConfigFile), SegmentGeneratorConfig.class); } else {/* ww w .j ava 2 s . co m*/ segmentGeneratorConfig = new SegmentGeneratorConfig(); } // Load config from segment generator config. String configDataDir = segmentGeneratorConfig.getDataDir(); if (_dataDir == null) { if (configDataDir == null) { throw new RuntimeException("Must specify dataDir."); } _dataDir = configDataDir; } else { if (configDataDir != null && !configDataDir.equals(_dataDir)) { LOGGER.warn("Find dataDir conflict in command line and config file, use config in command line: {}", _dataDir); } } FileFormat configFormat = segmentGeneratorConfig.getFormat(); if (_format == null) { if (configFormat == null) { throw new RuntimeException("Format cannot be null in config file."); } _format = configFormat; } else { if (configFormat != _format && configFormat != FileFormat.AVRO) { LOGGER.warn("Find format conflict in command line and config file, use config in command line: {}", _format); } } String configOutDir = segmentGeneratorConfig.getOutDir(); if (_outDir == null) { if (configOutDir == null) { throw new RuntimeException("Must specify outDir."); } _outDir = configOutDir; } else { if (configOutDir != null && !configOutDir.equals(_outDir)) { LOGGER.warn("Find outDir conflict in command line and config file, use config in command line: {}", _outDir); } } if (segmentGeneratorConfig.isOverwrite()) { _overwrite = true; } String configTableName = segmentGeneratorConfig.getTableName(); if (_tableName == null) { if (configTableName == null) { throw new RuntimeException("Must specify tableName."); } _tableName = configTableName; } else { if (configTableName != null && !configTableName.equals(_tableName)) { LOGGER.warn( "Find tableName conflict in command line and config file, use config in command line: {}", _tableName); } } String configSegmentName = segmentGeneratorConfig.getSegmentName(); if (_segmentName == null) { if (configSegmentName == null) { throw new RuntimeException("Must specify segmentName."); } _segmentName = configSegmentName; } else { if (configSegmentName != null && !configSegmentName.equals(_segmentName)) { LOGGER.warn( "Find segmentName conflict in command line and config file, use config in command line: {}", _segmentName); } } // Filter out all input files. File dir = new File(_dataDir); if (!dir.exists() || !dir.isDirectory()) { throw new RuntimeException("Data directory " + _dataDir + " not found."); } File[] files = dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(_format.toString().toLowerCase()); } }); if ((files == null) || (files.length == 0)) { throw new RuntimeException("Data directory " + _dataDir + " does not contain " + _format.toString().toUpperCase() + " files."); } // Make sure output directory does not already exist, or can be overwritten. File outDir = new File(_outDir); if (outDir.exists()) { if (!_overwrite) { throw new IOException("Output directory " + _outDir + " already exists."); } else { FileUtils.deleteDirectory(outDir); } } // Set other generator configs from command line. segmentGeneratorConfig.setDataDir(_dataDir); segmentGeneratorConfig.setFormat(_format); segmentGeneratorConfig.setOutDir(_outDir); segmentGeneratorConfig.setOverwrite(_overwrite); segmentGeneratorConfig.setTableName(_tableName); segmentGeneratorConfig.setSegmentName(_segmentName); if (_schemaFile != null) { if (segmentGeneratorConfig.getSchemaFile() != null && !segmentGeneratorConfig.getSchemaFile().equals(_schemaFile)) { LOGGER.warn( "Find schemaFile conflict in command line and config file, use config in command line: {}", _schemaFile); } segmentGeneratorConfig.setSchemaFile(_schemaFile); } if (_readerConfigFile != null) { if (segmentGeneratorConfig.getReaderConfigFile() != null && !segmentGeneratorConfig.getReaderConfigFile().equals(_readerConfigFile)) { LOGGER.warn( "Find readerConfigFile conflict in command line and config file, use config in command line: {}", _readerConfigFile); } segmentGeneratorConfig.setReaderConfigFile(_readerConfigFile); } if (_enableStarTreeIndex) { segmentGeneratorConfig.setEnableStarTreeIndex(true); } if (_starTreeIndexSpecFile != null) { if (segmentGeneratorConfig.getStarTreeIndexSpecFile() != null && !segmentGeneratorConfig.getStarTreeIndexSpecFile().equals(_starTreeIndexSpecFile)) { LOGGER.warn( "Find starTreeIndexSpecFile conflict in command line and config file, use config in command line: {}", _starTreeIndexSpecFile); } segmentGeneratorConfig.setStarTreeIndexSpecFile(_starTreeIndexSpecFile); } ExecutorService executor = Executors.newFixedThreadPool(_numThreads); int cnt = 0; for (final File file : files) { final int segCnt = cnt; executor.execute(new Runnable() { @Override public void run() { try { SegmentGeneratorConfig config = new SegmentGeneratorConfig(segmentGeneratorConfig); config.setInputFilePath(file.getAbsolutePath()); config.setSegmentName(_segmentName + "_" + segCnt); config.loadConfigFiles(); final SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); driver.init(config); driver.build(); } catch (Exception e) { throw new RuntimeException(e); } } }); cnt += 1; } executor.shutdown(); return executor.awaitTermination(1, TimeUnit.HOURS); }
From source file:org.apache.zeppelin.hopshive.HopsHiveInterpreter.java
private SqlCompleter createOrUpdateSqlCompleter(SqlCompleter sqlCompleter, final Connection connection, String propertyKey, final String buf, final int cursor) { String schemaFiltersKey = String.format("%s.%s", propertyKey, COMPLETER_SCHEMA_FILTERS_KEY); String sqlCompleterTtlKey = String.format("%s.%s", propertyKey, COMPLETER_TTL_KEY); final String schemaFiltersString = getProperty(schemaFiltersKey); int ttlInSeconds = Integer .valueOf(StringUtils.defaultIfEmpty(getProperty(sqlCompleterTtlKey), DEFAULT_COMPLETER_TTL)); final SqlCompleter completer; if (sqlCompleter == null) { completer = new SqlCompleter(ttlInSeconds); } else {/*from ww w .j a v a 2 s . c o m*/ completer = sqlCompleter; } ExecutorService executorService = Executors.newFixedThreadPool(1); executorService.execute(new Runnable() { @Override public void run() { completer.createOrUpdateFromConnection(connection, schemaFiltersString, buf, cursor); } }); executorService.shutdown(); try { // protection to release connection executorService.awaitTermination(3, TimeUnit.SECONDS); } catch (InterruptedException e) { } return completer; }
From source file:org.apache.hive.hcatalog.templeton.tool.LaunchMapper.java
private KeepAlive startCounterKeepAlive(ExecutorService pool, Context context) throws IOException { KeepAlive k = new KeepAlive(context); pool.execute(k); return k;/* www.j a v a 2s .c om*/ }
From source file:org.latticesoft.app.ThreadCommand.java
public Object execute(Object o) throws CommandException { Map map = null;// ww w . ja v a2s. c o m if (o != null && o instanceof Map) { map = (Map) o; } else { map = new HashMap(); } try { ExecutorService es = Executors.newFixedThreadPool(threadCount); if (log.isInfoEnabled()) { log.info("JobSize: " + this.jobs.size()); } for (int i = 0; i < jobs.size(); i++) { Runnable r = (Runnable) jobs.get(i); if (r instanceof BeanCommand) { BeanCommand cmd = (BeanCommand) r; cmd.setExecuteParam(map); } es.execute(r); } if (this.waitFor && this.timeout > 0 && this.timeoutUnit != null) { es.shutdown(); long l = System.currentTimeMillis(); if (log.isInfoEnabled()) { log.info("Begin of waiting..."); } es.awaitTermination(this.timeout, this.timeoutUnit); long l2 = System.currentTimeMillis(); if (log.isInfoEnabled()) { log.info("End of waiting..." + (l2 - l)); } } //es.shutdown(); } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } if (log.isDebugEnabled()) { log.debug("end!"); } return map; }