List of usage examples for java.util.concurrent ExecutorService execute
void execute(Runnable command);
From source file:byps.test.servlet.MyServerIF.java
@Override public int callClientParallel(int nbOfCalls) throws RemoteException { if (log.isDebugEnabled()) log.debug("callClientParallel(" + nbOfCalls); final ClientIF clientIF = getClientIF(); final AtomicInteger ret = new AtomicInteger(0); ExecutorService tpool = Executors.newCachedThreadPool(); for (int i = 0; i < nbOfCalls; i++) { Runnable run = new Runnable() { public void run() { try { if (log.isDebugEnabled()) log.debug("clientIF.incrementInt("); int v = clientIF.incrementInt(0); if (log.isDebugEnabled()) log.debug(")clientIF.incrementInt"); ret.addAndGet(v);// w w w . j a v a2 s . co m } catch (Exception e) { log.error(e); } } }; tpool.execute(run); } tpool.shutdown(); try { tpool.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { throw new BException(BExceptionC.CANCELLED, e.toString(), e); } if (log.isDebugEnabled()) log.debug(")callClientParallel"); return ret.get(); }
From source file:com.trellmor.berrymotes.sync.EmoteDownloader.java
public void start(SyncResult syncResult) { Log.info("EmoteDownload started"); this.updateNetworkInfo(); mSyncResult = syncResult;/*w w w .j ava 2s . c om*/ if (!mIsConnected) { Log.error("Network not available"); syncResult.stats.numIoExceptions++; return; } // Registers BroadcastReceiver to track network connection changes. IntentFilter filter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION); NetworkReceiver receiver = new NetworkReceiver(); mContext.registerReceiver(receiver, filter); ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT); mHttpClient = AndroidHttpClient.newInstance(USER_AGENT); try { String[] subreddits = getSubreddits(); for (String subreddit : subreddits) { if (mSubreddits.isChecked(subreddit)) { Runnable subredditEmoteDownloader = new SubredditEmoteDownloader(mContext, this, subreddit); executor.execute(subredditEmoteDownloader); } else { // Delete this subreddit deleteSubreddit(subreddit, mContentResolver); // Reset last download date SharedPreferences.Editor settings = PreferenceManager.getDefaultSharedPreferences(mContext) .edit(); settings.remove(SettingsActivity.KEY_SYNC_LAST_MODIFIED + subreddit); settings.commit(); } } executor.shutdown(); executor.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); } catch (URISyntaxException e) { Log.error("Emotes URL is malformed", e); synchronized (mSyncResult) { mSyncResult.stats.numParseExceptions++; if (mSyncResult.delayUntil < 60 * 60) mSyncResult.delayUntil = 60 * 60; } return; } catch (IOException e) { Log.error("Error reading from network: " + e.getMessage(), e); synchronized (mSyncResult) { mSyncResult.stats.numIoExceptions++; if (mSyncResult.delayUntil < 30 * 60) mSyncResult.delayUntil = 30 * 60; } return; } catch (InterruptedException e) { synchronized (mSyncResult) { syncResult.moreRecordsToGet = true; } Log.info("Sync interrupted"); executor.shutdownNow(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); } catch (InterruptedException e2) { } Thread.currentThread().interrupt(); } finally { Log.info("Deleted emotes: " + Long.toString(mSyncResult.stats.numDeletes)); Log.info("Added emotes: " + Long.toString(mSyncResult.stats.numInserts)); // Unregisters BroadcastReceiver at the end mContext.unregisterReceiver(receiver); mHttpClient.close(); } Log.info("EmoteDownload finished"); }
From source file:mitm.common.tools.SendMail.java
private void sendMultiThreaded(final MailTransport mailSender, final MimeMessage message, final Address[] recipients) throws InterruptedException { ExecutorService threadPool = Executors.newCachedThreadPool(); final Semaphore semaphore = new Semaphore(threads, true); final long startTime = System.currentTimeMillis(); for (int i = 1; i <= count; i++) { long threadStart = System.currentTimeMillis(); semaphore.acquireUninterruptibly(); threadPool.execute(new Runnable() { @Override//from w w w .java 2 s.c om public void run() { try { MimeMessage clone = MailUtils.cloneMessage(message); int sent = sentCount.incrementAndGet(); if (uniqueFrom) { Address[] froms = clone.getFrom(); if (froms != null && froms.length > 0) { clone.setFrom( new InternetAddress(sent + EmailAddressUtils.getEmailAddress(froms[0]))); } } mailSender.sendMessage(clone, recipients); long timePassed = DateTimeUtils .millisecondsToSeconds(System.currentTimeMillis() - startTime); StrBuilder sb = new StrBuilder(); sb.append("Message\t" + sent + "\tsent."); if (timePassed > 0) { float msgPerSec = (float) sent / timePassed; sb.append("\tmessages/second\t" + String.format("%.2f", msgPerSec)); } logger.info(sb.toString()); } catch (MessagingException e) { logger.error("Error sending message.", e); } finally { semaphore.release(); } } }); if (forceQuit.get()) { break; } if (throtllingSemaphore != null) { /* for throttling the sending of emails */ throtllingSemaphore.acquire(); } else { /* no throttling so use delay */ long sleepTime = delay - (System.currentTimeMillis() - threadStart); if (sleepTime > 0) { Thread.sleep(sleepTime); } } } threadPool.shutdown(); threadPool.awaitTermination(30, TimeUnit.SECONDS); waitForReceiveThreads(); logger.info("Total sent: " + sentCount.intValue() + ". Total time: " + DateTimeUtils.millisecondsToSeconds(System.currentTimeMillis() - startTime) + " (sec.)"); }
From source file:com.esri.cordova.geolocation.AdvancedGeolocation.java
private void startLocation() { // Misc. note: If you see the message "Attempted to send a second callback for ID:" then you need // to make sure to set pluginResult.setKeepCallback(true); // We want to prevent multiple instances of controllers from running! if (_gpsController != null || _networkLocationController != null || _cellLocationController != null) { stopLocation();/*from www. j a v a 2 s . c o m*/ } final boolean networkEnabled = isInternetConnected(_cordovaActivity.getApplicationContext()); ExecutorService threadPool = cordova.getThreadPool(); if (_providers.equalsIgnoreCase(PROVIDERS_ALL)) { _gpsController = new GPSController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _returnSatelliteData, _buffer, _bufferSize); threadPool.execute(_gpsController); _networkLocationController = new NetworkLocationController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _buffer, _bufferSize); threadPool.execute(_networkLocationController); // Reference: https://developer.android.com/reference/android/telephony/TelephonyManager.html#getAllCellInfo() // Reference: https://developer.android.com/reference/android/telephony/CellIdentityWcdma.html (added at API 18) if (Build.VERSION.SDK_INT < MIN_API_LEVEL) { cellDataNotAllowed(); } else { _cellLocationController = new CellLocationController(networkEnabled, _signalStrength, _cordova, _callbackContext); threadPool.execute(_cellLocationController); } } if (_providers.equalsIgnoreCase(PROVIDERS_SOME)) { _gpsController = new GPSController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _returnSatelliteData, _buffer, _bufferSize); threadPool.execute(_gpsController); _networkLocationController = new NetworkLocationController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _buffer, _bufferSize); threadPool.execute(_networkLocationController); } if (_providers.equalsIgnoreCase(PROVIDERS_GPS)) { _gpsController = new GPSController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _returnSatelliteData, _buffer, _bufferSize); threadPool.execute(_gpsController); } if (_providers.equalsIgnoreCase(PROVIDERS_NETWORK)) { _networkLocationController = new NetworkLocationController(_cordova, _callbackContext, _minDistance, _minTime, _useCache, _buffer, _bufferSize); threadPool.execute(_networkLocationController); } if (_providers.equalsIgnoreCase(PROVIDERS_CELL)) { // Reference: https://developer.android.com/reference/android/telephony/TelephonyManager.html#getAllCellInfo() // Reference: https://developer.android.com/reference/android/telephony/CellIdentityWcdma.html if (Build.VERSION.SDK_INT < MIN_API_LEVEL) { cellDataNotAllowed(); } else { _cellLocationController = new CellLocationController(networkEnabled, _signalStrength, _cordova, _callbackContext); threadPool.execute(_cellLocationController); } } }
From source file:com.liferay.sync.engine.document.library.handler.DownloadFileHandler.java
protected void copyFile(final SyncFile syncFile, Path filePath, InputStream inputStream, boolean append) throws Exception { OutputStream outputStream = null; Watcher watcher = WatcherManager.getWatcher(getSyncAccountId()); try {//ww w . j a va2s . com Path tempFilePath = FileUtil.getTempFilePath(syncFile); boolean exists = FileUtil.exists(filePath); if (append) { outputStream = Files.newOutputStream(tempFilePath, StandardOpenOption.APPEND); IOUtils.copyLarge(inputStream, outputStream); } else { if (exists && (boolean) getParameterValue("patch")) { if (_logger.isDebugEnabled()) { _logger.debug("Patching {}", syncFile.getFilePathName()); } Files.copy(filePath, tempFilePath, StandardCopyOption.REPLACE_EXISTING); IODeltaUtil.patch(tempFilePath, inputStream); } else { Files.copy(inputStream, tempFilePath, StandardCopyOption.REPLACE_EXISTING); } } watcher.addDownloadedFilePathName(filePath.toString()); if (GetterUtil.getBoolean(syncFile.getLocalExtraSettingValue("restoreEvent"))) { syncFile.unsetLocalExtraSetting("restoreEvent"); syncFile.setUiEvent(SyncFile.UI_EVENT_RESTORED_REMOTE); } else if (exists) { syncFile.setUiEvent(SyncFile.UI_EVENT_DOWNLOADED_UPDATE); } else { syncFile.setUiEvent(SyncFile.UI_EVENT_DOWNLOADED_NEW); } FileKeyUtil.writeFileKey(tempFilePath, String.valueOf(syncFile.getSyncFileId()), false); FileUtil.setModifiedTime(tempFilePath, syncFile.getModifiedTime()); if (MSOfficeFileUtil.isLegacyExcelFile(filePath)) { syncFile.setLocalExtraSetting("lastSavedDate", MSOfficeFileUtil.getLastSavedDate(tempFilePath)); } Files.move(tempFilePath, filePath, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); ExecutorService executorService = SyncEngine.getExecutorService(); Runnable runnable = new Runnable() { @Override public void run() { IODeltaUtil.checksums(syncFile); syncFile.setState(SyncFile.STATE_SYNCED); SyncFileService.update(syncFile); } }; executorService.execute(runnable); } catch (FileSystemException fse) { if (fse instanceof AccessDeniedException) { _logger.error(fse.getMessage(), fse); syncFile.setState(SyncFile.STATE_ERROR); syncFile.setUiEvent(SyncFile.UI_EVENT_ACCESS_DENIED_LOCAL); SyncFileService.update(syncFile); return; } else if (fse instanceof NoSuchFileException) { if (isEventCancelled()) { SyncFileService.deleteSyncFile(syncFile); return; } } watcher.removeDownloadedFilePathName(filePath.toString()); String message = fse.getMessage(); _logger.error(message, fse); syncFile.setState(SyncFile.STATE_ERROR); if (message.contains("File name too long")) { syncFile.setUiEvent(SyncFile.UI_EVENT_FILE_NAME_TOO_LONG); } SyncFileService.update(syncFile); } finally { StreamUtil.cleanUp(outputStream); } }
From source file:pt.ua.tm.neji.evaluation.craft.statistics.FolderBatchExecutor.java
public void run(final Context context) throws NejiException { logger.info("Initializing context..."); context.initialize();/*from w w w . jav a 2s . co m*/ logger.info("Installing multi-threading support..."); context.addMultiThreadingSupport(numThreads); ExecutorService executor; logger.info("Starting thread pool with support for {} threads...", numThreads); executor = Executors.newFixedThreadPool(numThreads); StopWatch timer = new StopWatch(); timer.start(); File inputFolder = new File(inputFolderPath); File[] files = inputFolder.listFiles(new FileUtil.Filter(new String[] { "txt" })); for (File file : files) { // File a1File = new File(file.getAbsolutePath().replaceAll(".txt", ".ann")); File a1File = new File(file.getAbsolutePath().replaceAll(".txt", ".a1")); Processor processor = getDocumentProcessor(file, a1File, context); // Process entry executor.execute(processor); } executor.shutdown(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); } catch (InterruptedException e) { throw new RuntimeException(e); } logger.info("Stopped thread pool."); logger.info("Terminating context..."); context.terminate(); timer.stop(); logger.info("Processed {} files in {}", processedCorpora.size(), timer.toString()); }
From source file:com.norconex.collector.core.crawler.AbstractCrawler.java
protected void processReferences(final ICrawlDataStore refStore, final JobStatusUpdater statusUpdater, final JobSuite suite, final boolean delete) { int numThreads = getCrawlerConfig().getNumThreads(); final CountDownLatch latch = new CountDownLatch(numThreads); ExecutorService pool = Executors.newFixedThreadPool(numThreads); for (int i = 0; i < numThreads; i++) { final int threadIndex = i + 1; LOG.debug(getId() + ": Crawler thread #" + threadIndex + " started."); pool.execute(new ProcessReferencesRunnable(suite, statusUpdater, refStore, delete, latch)); }//from w w w. ja va2 s . c om try { latch.await(); pool.shutdown(); } catch (InterruptedException e) { throw new CollectorException(e); } }
From source file:org.springframework.amqp.rabbit.core.RabbitTemplatePublisherCallbacksIntegrationTests.java
@Test public void testConfirmReceivedAfterPublisherCallbackChannelScheduleClose() throws Exception { final CountDownLatch latch = new CountDownLatch(40); templateWithConfirmsEnabled.setConfirmCallback((correlationData, ack, cause) -> latch.countDown()); ExecutorService executorService = Executors.newCachedThreadPool(); for (int i = 0; i < 20; i++) { executorService.execute(() -> { templateWithConfirmsEnabled.convertAndSend(ROUTE, (Object) "message", new CorrelationData("abc")); templateWithConfirmsEnabled.convertAndSend("BAD_ROUTE", (Object) "bad", new CorrelationData("cba")); });//from w w w . j a v a2s. c o m } assertTrue(latch.await(10, TimeUnit.SECONDS)); assertNull(templateWithConfirmsEnabled.getUnconfirmed(-1)); }
From source file:org.springframework.amqp.rabbit.core.RabbitTemplatePublisherCallbacksIntegrationTests.java
@Test public void testPublisherConfirmNotReceivedMultiThreads() throws Exception { ConnectionFactory mockConnectionFactory = mock(ConnectionFactory.class); Connection mockConnection = mock(Connection.class); Channel mockChannel1 = mock(Channel.class); Channel mockChannel2 = mock(Channel.class); when(mockChannel1.isOpen()).thenReturn(true); when(mockChannel2.isOpen()).thenReturn(true); when(mockChannel1.getNextPublishSeqNo()).thenReturn(1L, 2L, 3L, 4L); when(mockChannel2.getNextPublishSeqNo()).thenReturn(1L, 2L, 3L, 4L); when(mockConnectionFactory.newConnection(any(ExecutorService.class), anyString())) .thenReturn(mockConnection); when(mockConnection.isOpen()).thenReturn(true); PublisherCallbackChannelImpl channel1 = new PublisherCallbackChannelImpl(mockChannel1); PublisherCallbackChannelImpl channel2 = new PublisherCallbackChannelImpl(mockChannel2); when(mockConnection.createChannel()).thenReturn(channel1).thenReturn(channel2); CachingConnectionFactory ccf = new CachingConnectionFactory(mockConnectionFactory); ccf.setPublisherConfirms(true);/* w w w .j a v a 2 s . co m*/ ccf.setChannelCacheSize(3); final RabbitTemplate template = new RabbitTemplate(ccf); final AtomicBoolean confirmed = new AtomicBoolean(); template.setConfirmCallback((correlationData, ack, cause) -> confirmed.set(true)); // Hold up the first thread so we get two channels final CountDownLatch threadLatch = new CountDownLatch(1); final CountDownLatch threadSentLatch = new CountDownLatch(1); //Thread 1 ExecutorService exec = Executors.newSingleThreadExecutor(); exec.execute(() -> template.execute(channel -> { try { threadLatch.await(10, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } template.doSend(channel, "", ROUTE, new SimpleMessageConverter().toMessage("message", new MessageProperties()), false, new CorrelationData("def")); threadSentLatch.countDown(); return null; })); // Thread 2 template.convertAndSend(ROUTE, (Object) "message", new CorrelationData("abc")); // channel y threadLatch.countDown(); assertTrue(threadSentLatch.await(5, TimeUnit.SECONDS)); assertEquals(2, template.getUnconfirmedCount()); Collection<CorrelationData> unconfirmed = template.getUnconfirmed(-1); assertEquals(2, unconfirmed.size()); assertEquals(0, template.getUnconfirmedCount()); Set<String> ids = new HashSet<String>(); Iterator<CorrelationData> iterator = unconfirmed.iterator(); ids.add(iterator.next().getId()); ids.add(iterator.next().getId()); assertTrue(ids.remove("abc")); assertTrue(ids.remove("def")); assertFalse(confirmed.get()); DirectFieldAccessor dfa = new DirectFieldAccessor(template); Map<?, ?> pendingConfirms = (Map<?, ?>) dfa.getPropertyValue("publisherConfirmChannels"); assertThat(pendingConfirms.size(), greaterThan(0)); // might use 2 or only 1 channel exec.shutdown(); assertTrue(exec.awaitTermination(10, TimeUnit.SECONDS)); ccf.destroy(); assertEquals(0, pendingConfirms.size()); }
From source file:org.deri.iris.performance.IRISPerformanceTest.java
/** * Executes a set of datalog queries using the given configuration * @param queries The set of Datalog queries * @param config The configuration for the test suite * @return a list of IRISTestCase objects with the result of the test campaign *//* w w w.j a va 2 s .c o m*/ public List<IRISTestCase> executeTests(final List<String> queries, final TestConfiguration config) { // Get the logger LOGGER = Logger.getLogger(IRISPerformanceTest.class.getName()); // Construct a valid IRIS+- program using the queries and the configuration file String program = ""; // add the query and its IRIS execution command to the program program += "/// Query ///\n"; for (final String s : queries) { program += s + "\n"; program += "?-" + s.substring(0, s.indexOf(":-")) + ".\n"; } program += "\n"; // If reasoning is enabled, add the TBOX to the program program += "/// TBox ///\n"; if (config.getReasoning()) { String tboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/tbox"; if (config.getExpressiveness().compareTo("RDFS") == 0) { tboxPath += "/rdfs"; } if (config.getExpressiveness().compareTo("OWL-QL") == 0) { tboxPath += "/owlql"; } final String tbox = loadFile(tboxPath + "/" + config.getDataset() + ".dtg"); program += tbox + "\n"; } else { program += "/// EMPTY ///\n"; } // Add the SBox program += "/// SBox ///\n"; String sboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/sbox"; if (config.getExpressiveness().compareTo("RDFS") == 0) { sboxPath += "/rdfs"; } if (config.getExpressiveness().compareTo("OWL-QL") == 0) { sboxPath += "/owlql"; } final String sbox = loadFile(sboxPath + "/" + config.getDataset() + ".dtg"); program += sbox + "\n\n"; LOGGER.debug(program); // Get the parser final Parser parser = new Parser(); // Parse the program try { parser.parse(program); } catch (final ParserException e) { e.printStackTrace(); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(parser.getRules(), parser.getQueries()); // Get the query bodies final List<IRule> bodies = new ArrayList<IRule>(parser.getRules()); final List<IRule> datalogQueries = RewritingUtils.getQueries(bodies, parser.getQueries()); // Get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(parser.getRules(), parser.getQueries()); // Get the SBox rules from the set of rules final List<IRule> storageRules = RewritingUtils.getSBoxRules(parser.getRules(), parser.getQueries()); // Check that the TBox is FO-reducible IRuleSafetyProcessor ruleProc = new LinearReducibleRuleSafetyProcessor(); try { ruleProc.process(tgds); } catch (final RuleUnsafeException e) { e.printStackTrace(); } // Check that the SBox rules are Safe Datalog ruleProc = new StandardRuleSafetyProcessor(); try { ruleProc.process(storageRules); } catch (final RuleUnsafeException e) { e.printStackTrace(); } // Connect to the storage StorageManager.getInstance(); try { StorageManager.connect(config.getDBVendor(), config.getDBProtocol(), config.getDBHost(), config.getDBPort(), config.getDBName(), config.getSchemaName(), config.getDBUsername(), config.getDBPassword()); } catch (final SQLException e) { e.printStackTrace(); } // Evaluate the queries final List<IRISTestCase> output = new LinkedList<IRISTestCase>(); for (final IQuery q : parser.getQueries()) { // Generate a new test-case final IRISTestCase currentTest = new IRISTestCase(); int nTask = -10; // Get the Factories final IRelationFactory rf = new RelationFactory(); // Get the Rewriter Engine final ParallelRewriter rewriter = new ParallelRewriter(DecompositionStrategy.DECOMPOSE, RewritingLanguage.UCQ, SubCheckStrategy.TAIL, NCCheck.TAIL); // Get and log the rule corresponding to the query final IRule ruleQuery = getRuleQuery(q, datalogQueries); currentTest.setQuery(ruleQuery); final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils .computePositionDependencyGraph(tgds); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); // Compute and log the FO-Rewriting LOGGER.info("Computing TBox Rewriting"); float duration = -System.nanoTime(); final Set<IRule> rewriting = rewriter.getRewriting(ruleQuery, tgds, constraints, deps, exprs); duration = ((duration + System.nanoTime()) / 1000000); currentTest.getTasks() .add(new Task(nTask++, "TBox Rewriting", duration, 0, 0, "ms", rewriting.toString())); LOGGER.info("done."); int count = 0; for (final IRule r : rewriting) { LOGGER.debug("(Qr" + ++count + ")" + r); } // Produce the rewriting according to the Nyaya Data Model final IQueryRewriter ndmRewriter = new NDMRewriter(storageRules); // Create a buffer for the output final IRelation outRelation = rf.createRelation(); // Get the SBox rewriting try { LOGGER.info("Computing SBox Rewriting"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); duration = -System.nanoTime(); for (final IRule pr : rewriting) { sboxRewriting.addAll(ndmRewriter.getRewriting(pr)); } duration = ((duration + System.nanoTime()) / 1000000); currentTest.getTasks() .add(new Task(nTask++, "SBox Rewriting", duration, 0, 0, "ms", sboxRewriting.toString())); LOGGER.info("done."); count = 0; for (final IRule n : sboxRewriting) { LOGGER.debug("(Qn" + ++count + ")" + n); } // Produce the SQL rewriting for each query in the program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // Get the SQL rewriting as Union of Conjunctive Queries (UCQ) LOGGER.info("Computing SQL Rewriting"); duration = -System.nanoTime(); final List<String> ucqSQLRewriting = new LinkedList<String>(); ucqSQLRewriting.add(sqlRewriter.getUCQSQLRewriting("", 10000, 0)); duration = ((duration + System.nanoTime()) / 1000000); currentTest.getTasks() .add(new Task(nTask++, "SQL Rewriting", duration, 0, 0, "ms", ucqSQLRewriting.toString())); LOGGER.info("done."); count = 0; for (final String s : ucqSQLRewriting) { LOGGER.debug("(Qs" + ++count + ") " + s); } // Execute the UCQ LOGGER.info("Executing SQL"); // float ansConstructOverall = 0; // The synchronized structure to store the output tuples final Set<ITuple> result = Collections.synchronizedSet(new HashSet<ITuple>()); /* * Prepare a set of runnable objects representing each partial rewriting to be executed in parallel */ final List<RunnableQuery> rql = new LinkedList<RunnableQuery>(); for (final String cq : ucqSQLRewriting) { // Construct a Runnable Query rql.add(new RunnableQuery(cq, result, currentTest.getTasks())); } // Get an executor that allows a number of parallel threads equals to the number of available processors // ExecutorService queryExecutor = // Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()*5); final ExecutorService queryExecutor = Executors.newSingleThreadScheduledExecutor(); // Execute all the partial rewritings in parallel float ucqExecOverall = -System.nanoTime(); for (final RunnableQuery rq : rql) { queryExecutor.execute(rq); } queryExecutor.shutdown(); if (queryExecutor.awaitTermination(1, TimeUnit.DAYS)) { LOGGER.info("done."); } else throw new InterruptedException("Timeout Occured"); ucqExecOverall = ((ucqExecOverall + System.nanoTime()) / 1000000); StorageManager.disconnect(); // inizio aggiunta float minTime = System.nanoTime(); float maxTime = 0; float avgTime = 0; int n = 0; for (final Task t : currentTest.getTasks()) { if (t.getName().contains("Execution")) { avgTime += (t.getFinalTime() - t.getInitTime()) / 1000000; n++; if (t.getFinalTime() > maxTime) { maxTime = t.getFinalTime(); } if (t.getInitTime() < minTime) { minTime = t.getInitTime(); } } } ucqExecOverall = (maxTime - minTime) / 1000000; // fine aggiunta currentTest.getTasks() .add(new Task(nTask++, "UCQ Overall Execution Time", ucqExecOverall, 0, 0, "ms")); // inizio aggiunta avgTime = (avgTime / n); System.out.println(n); currentTest.getTasks().add(new Task(nTask++, "UCQ Average Execution Time", avgTime, 0, 0, "ms")); Collections.sort(currentTest.getTasks()); // fine aggiunta for (final ITuple t : result) { outRelation.add(t); } } catch (final SQLException e) { e.printStackTrace(); } catch (final EvaluationException e) { e.printStackTrace(); } catch (final InterruptedException e) { e.printStackTrace(); } currentTest.setAnswer(outRelation); output.add(currentTest); } return (output); }