List of usage examples for java.util.concurrent CompletionService submit
Future<V> submit(Callable<V> task);
From source file:WordLengthCallable.java
public static void main(String[] args) throws Exception { int THREAD_COUNT = 4; ExecutorService execService = Executors.newFixedThreadPool(THREAD_COUNT); CompletionService<Integer> completionService = new ExecutorCompletionService<>(execService); for (int i = 0; i < THREAD_COUNT; i++) { completionService.submit(new WordLengthCallable()); }/* w w w . j a v a 2 s.c o m*/ execService.shutdown(); while (!execService.isTerminated()) { int result = completionService.take().get().intValue(); System.out.println("Result is: " + result); } Thread.sleep(1000); System.out.println("done!"); }
From source file:com.siva.javamultithreading.MultiThreadExecutor.java
public static void main(String[] args) throws ExecutionException, IOException { //Populate the data List<DomainObject> list = new ArrayList<>(); DomainObject object = null;// w w w .ja v a 2s. c o m for (int i = 0; i < 230000; i++) { object = new DomainObject(); object.setId("ID" + i); object.setName("NAME" + i); object.setComment("COMMENT" + i); list.add(object); } int maxNoOfRows = 40000; int noOfThreads = 1; int remaining = 0; if (list.size() > 40000) { noOfThreads = list.size() / maxNoOfRows; remaining = list.size() % maxNoOfRows; if (remaining > 0) { noOfThreads++; } } List<List<DomainObject>> dos = ListUtils.partition(list, maxNoOfRows); ExecutorService threadPool = Executors.newFixedThreadPool(noOfThreads); CompletionService<HSSFWorkbook> pool = new ExecutorCompletionService<>(threadPool); // Excel creation through multiple threads long startTime = System.currentTimeMillis(); for (List<DomainObject> listObj : dos) { pool.submit(new ExcelChunkSheetWriter(listObj)); } HSSFWorkbook hSSFWorkbook = null; HSSFWorkbook book = new HSSFWorkbook(); HSSFSheet sheet = book.createSheet("Report"); try { for (int i = 0; i < 5; i++) { hSSFWorkbook = pool.take().get(); System.out.println( "sheet row count : sheet.PhysicalNumberOfRows() = " + sheet.getPhysicalNumberOfRows()); int currentCount = sheet.getPhysicalNumberOfRows(); int incomingCount = hSSFWorkbook.getSheetAt(0).getPhysicalNumberOfRows(); if ((currentCount + incomingCount) > 60000) { sheet = book.createSheet("Report" + i); } ExcelUtil.copySheets(book, sheet, hSSFWorkbook.getSheetAt(0)); } } catch (InterruptedException ex) { Logger.getLogger(MultiThreadExecutor.class.getName()).log(Level.SEVERE, null, ex); } catch (ExecutionException ex) { Logger.getLogger(MultiThreadExecutor.class.getName()).log(Level.SEVERE, null, ex); } try { writeFile(book, new FileOutputStream("Report.xls")); } catch (Exception e) { e.printStackTrace(); } //System.out.println("No of Threads : " + noOfThreads + " Size : " + list.size() + " remaining : " + remaining); long endTime = System.currentTimeMillis(); System.out.println("Time taken: " + (endTime - startTime) + " ms"); threadPool.shutdown(); //startProcess(); }
From source file:Main.java
public static Set<String> findMatches(List<String> searchList, Set<String> targetSet) throws InterruptedException, ExecutionException { Set<String> locatedMatchSet = new HashSet<String>(); int threadCount = Runtime.getRuntime().availableProcessors(); List<List<String>> partitionList = getChunkList(searchList, threadCount); if (partitionList.size() == 1) { // if we only have one "chunk" then don't bother with a thread-pool locatedMatchSet = new ListSearcher(searchList, targetSet).call(); } else {// w ww. ja va2 s . com ExecutorService executor = Executors.newFixedThreadPool(threadCount); CompletionService<Set<String>> completionService = new ExecutorCompletionService<Set<String>>(executor); for (List<String> chunkList : partitionList) completionService.submit(new ListSearcher(chunkList, targetSet)); for (int x = 0; x < partitionList.size(); x++) { Set<String> threadMatchSet = completionService.take().get(); locatedMatchSet.addAll(threadMatchSet); } executor.shutdown(); } return locatedMatchSet; }
From source file:com.siva.javamultithreading.MultiThreadExecutor.java
/** * This is sample./*from w w w .ja v a 2 s .c o m*/ */ private static void startProcess() { ExecutorService threadPool = Executors.newFixedThreadPool(4); CompletionService<HSSFWorkbook> pool = new ExecutorCompletionService<>(threadPool); // Excel creation through multiple threads long startTime = System.currentTimeMillis(); pool.submit(new ExcelChunkSheetWriter(0, 1000)); pool.submit(new ExcelChunkSheetWriter(1001, 20000)); pool.submit(new ExcelChunkSheetWriter(2, 3000)); pool.submit(new ExcelChunkSheetWriter(3, 40000)); pool.submit(new ExcelChunkSheetWriter(4, 50000)); HSSFWorkbook hSSFWorkbook = null; HSSFWorkbook book = new HSSFWorkbook(); HSSFSheet sheet = book.createSheet("Report"); try { for (int i = 0; i < 5; i++) { hSSFWorkbook = pool.take().get(); System.out.println( "sheet row count : sheet.PhysicalNumberOfRows() = " + sheet.getPhysicalNumberOfRows()); int currentCount = sheet.getPhysicalNumberOfRows(); int incomingCount = hSSFWorkbook.getSheetAt(0).getPhysicalNumberOfRows(); if ((currentCount + incomingCount) > 60000) { sheet = book.createSheet("Report" + i); } ExcelUtil.copySheets(book, sheet, hSSFWorkbook.getSheetAt(0)); } } catch (InterruptedException ex) { Logger.getLogger(MultiThreadExecutor.class.getName()).log(Level.SEVERE, null, ex); } catch (ExecutionException ex) { Logger.getLogger(MultiThreadExecutor.class.getName()).log(Level.SEVERE, null, ex); } try { writeFile(book, new FileOutputStream("Report.xls")); } catch (Exception e) { e.printStackTrace(); } /* FileOutputStream fos = new FileOutputStream("all.zip"); ZipOutputStream zos = new ZipOutputStream(fos); for (int i = 0; i < 5; i++) { try { hSSFWorkbook = pool.take().get(); ZipEntry ze = new ZipEntry("Excel" + i + ".xls"); zos.putNextEntry(ze); hSSFWorkbook.write(zos); zos.closeEntry(); } catch (InterruptedException ex) { Logger.getLogger(MultiThreadExecutor.class.getName()).log(Level.SEVERE, null, ex); } } zos.close(); */ long endTime = System.currentTimeMillis(); System.out.println("Time taken: " + (endTime - startTime) + " ms"); threadPool.shutdown(); }
From source file:org.apache.hadoop.hbase.util.ModifyRegionUtils.java
/** * Create new set of regions on the specified file-system. * NOTE: that you should add the regions to hbase:meta after this operation. * * @param exec Thread Pool Executor/*from w w w . ja va2 s.c om*/ * @param conf {@link Configuration} * @param rootDir Root directory for HBase instance * @param tableDir table directory * @param hTableDescriptor description of the table * @param newRegions {@link HRegionInfo} that describes the regions to create * @param task {@link RegionFillTask} custom code to populate region after creation * @throws IOException */ public static List<HRegionInfo> createRegions(final ThreadPoolExecutor exec, final Configuration conf, final Path rootDir, final Path tableDir, final HTableDescriptor hTableDescriptor, final HRegionInfo[] newRegions, final RegionFillTask task) throws IOException { if (newRegions == null) return null; int regionNumber = newRegions.length; CompletionService<HRegionInfo> completionService = new ExecutorCompletionService<HRegionInfo>(exec); List<HRegionInfo> regionInfos = new ArrayList<HRegionInfo>(); for (final HRegionInfo newRegion : newRegions) { completionService.submit(new Callable<HRegionInfo>() { @Override public HRegionInfo call() throws IOException { return createRegion(conf, rootDir, tableDir, hTableDescriptor, newRegion, task); } }); } try { // wait for all regions to finish creation for (int i = 0; i < regionNumber; i++) { Future<HRegionInfo> future = completionService.take(); HRegionInfo regionInfo = future.get(); regionInfos.add(regionInfo); } } catch (InterruptedException e) { LOG.error("Caught " + e + " during region creation"); throw new InterruptedIOException(e.getMessage()); } catch (ExecutionException e) { throw new IOException(e); } return regionInfos; }
From source file:uniol.apt.analysis.synthesize.FindWords.java
static private <T> int submitTasks(ForkJoinPool executor, CompletionService<T> completion, Iterator<Callable<T>> jobGenerator) { int submitted = 0; while (jobGenerator.hasNext() && executor.getQueuedSubmissionCount() < TARGET_JOB_QUEUE_SIZE) { completion.submit(jobGenerator.next()); submitted++;/*from www . j a v a 2s. c o m*/ } return submitted; }
From source file:com.linkedin.pinot.common.http.MultiGetRequest.java
/** * GET urls in parallel using the executor service. * @param urls absolute URLs to GET/* w ww . ja v a2s . c o m*/ * @param timeoutMs timeout in milliseconds for each GET request * @return instance of CompletionService. Completion service will provide * results as they arrive. The order is NOT same as the order of URLs */ public CompletionService<GetMethod> execute(@Nonnull List<String> urls, final int timeoutMs) { Preconditions.checkNotNull(urls); Preconditions.checkArgument(timeoutMs > 0, "Timeout value for multi-get must be greater than 0"); CompletionService<GetMethod> completionService = new ExecutorCompletionService<>(executor); for (final String url : urls) { completionService.submit(new Callable<GetMethod>() { @Override public GetMethod call() throws Exception { HttpClient client = new HttpClient(connectionManager); GetMethod getMethod = new GetMethod(url); getMethod.getParams().setSoTimeout(timeoutMs); // if all connections in the connection manager are busy this will wait to retrieve a connection // set time to wait to retrieve a connection from connection manager client.getParams().setConnectionManagerTimeout(timeoutMs); client.executeMethod(getMethod); return getMethod; } }); } return completionService; }
From source file:org.mule.module.db.performance.LoadGenerator.java
public void generateLoad(final LoadTask loadTask) throws InterruptedException, ExecutionException { Collection<Callable<Integer>> solvers = new ArrayList<Callable<Integer>>(getThreadCount()); for (int i = 1; i <= getThreadCount(); i++) { solvers.add(new Callable<Integer>() { public Integer call() throws Exception { for (int message = 1; message <= getMessagesPerThread(); message++) { try { loadTask.execute(message); } catch (Exception e) { // Ignore and continue logger.error("Error sending message: " + e.getMessage()); }//from w ww. jav a2 s. c o m Thread.sleep(getMessageDelay()); } return getMessagesPerThread(); } }); } ExecutorService exec = Executors.newFixedThreadPool(getThreadCount()); CompletionService<Integer> executorCompletionService = new ExecutorCompletionService<Integer>(exec); for (Callable<Integer> s : solvers) { executorCompletionService.submit(s); } Integer count = 0; for (int i = 0; i < getThreadCount(); ++i) { count = count + executorCompletionService.take().get(); logger.info("Current row processed count: " + count); } logger.info("Load generation completed"); }
From source file:com.sitewhere.test.MultithreadedRestTest.java
@Test public void doRestTest() throws Exception { java.util.logging.Logger.getLogger("org.apache.http.wire").setLevel(java.util.logging.Level.FINEST); java.util.logging.Logger.getLogger("org.apache.http.headers").setLevel(java.util.logging.Level.FINEST); System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true"); System.setProperty("org.apache.commons.logging.simplelog.log.httpclient.wire", "ERROR"); System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http", "ERROR"); System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http.headers", "ERROR"); ExecutorService executor = Executors.newFixedThreadPool(numThreads); CompletionService<SiteWhereClientTester.TestResults> completionService = new ExecutorCompletionService<SiteWhereClientTester.TestResults>( executor);//from ww w .j a v a2 s. com for (int i = 0; i < numThreads; i++) { completionService .submit(new SiteWhereClientTester("90389b40-7c25-401b-bf72-98673913d59e", 100, updateState)); } for (int i = 0; i < numThreads; ++i) { completionService.take().get(); } }
From source file:cn.clxy.codes.upload.UploadFileService.java
private void doUpload(final List<Integer> indexes) { log.debug("Start! ===--------------------"); BlockingQueue<Part> parts = new ArrayBlockingQueue<Part>(Config.MAX_READ); CompletionService<String> cs = new ExecutorCompletionService<String>(executor); log.debug("Reading started."); cs.submit(new ReadTask(file, indexes, parts)); log.debug("Uploading started."); for (int i = 0; i < Config.MAX_UPLOAD; i++) { cs.submit(new UploadTask("upload." + i, uploader, parts)); }/*from w w w . ja va 2 s . co m*/ // Wait all done. total count = maxUpload + 1. for (int i = 0; i <= Config.MAX_UPLOAD; i++) { Future<String> future = null; try { future = cs.take(); checkFuture(future); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } // Notify sever all done. Future<String> result = executor.submit(new NotifyTask(file, uploader)); checkFuture(result); log.debug("End! ===--------------------"); }