List of usage examples for java.util.concurrent ExecutorService isTerminated
boolean isTerminated();
From source file:de.uni_rostock.goodod.evaluator.OntologyTest.java
public void executeTest() throws Throwable { ExecutorService executor = Executors.newFixedThreadPool(threadCount); Set<URI> fromOntologies = new HashSet<URI>(25); Set<URI> toOntologies = new HashSet<URI>(25); Set<? extends OWLOntologyIRIMapper> bioTopLiteMapper = null; if (null != bioTopLiteURI) { bioTopLiteMapper = Collections.singleton(new SimpleIRIMapper( IRI.create("http://purl.org/biotop/biotoplite.owl"), IRI.create(bioTopLiteURI))); }//w w w.ja v a 2 s . c o m OntologyCache cache = OntologyCache.setupSharedCache(bioTopLiteMapper, getIgnoredImports(), threadCount); NormalizerChainFactory chain = new NormalizerChainFactory();/* new NormalizerChainFactory(importer, intersector, namer, decomposer, subsumer);*/ cache.setNormalizerFactory(chain); fromOntologies.addAll(groupAOntologies); if (globalConfig.getBoolean("one-way", false)) { /* * If one way comparisons are requested, we only compare group A to * group B (and model). */ toOntologies.addAll(groupBOntologies); } else { /* * By default, we do cross-comparisons between the groups, so we * create a global set for both. For simplicity, this just * means adding the second set to fromOntologies and aliasing it * as toOntologies. */ fromOntologies.addAll(groupBOntologies); toOntologies = fromOntologies; } if (null != modelOntology) { toOntologies.add(modelOntology); } logger.info("Running comparisons for test '" + getTestName() + "'."); for (URI u1 : fromOntologies) { for (URI u2 : toOntologies) { if (u1.equals(u2)) { continue; } /* * Working with the ontologies is resource intensive. We want * to handle more than one at a time, especially on multicore * machines, but neigher starving ourselves from I/O nor * generating massive cache or memory churn is very smart. */ int waitCount = 0; while (inProgressCount.get() > threadCount) { if (0 == ++waitCount % 8) { /* * Thight loop a few times, then yield in order to let * the other threads finish. */ Thread.yield(); } } comparisonStarted(); try { OntologyPair p = new OntologyPair(cache, u1, u2); executor.execute(new ComparisonRunner(u1, u2, p)); } catch (Throwable e) { logger.warn("Could not compare " + u1.toString() + " and " + u2.toString() + ".", e); Set<URI> values = failedComparisons.get(u1); if (null != values) { values.add(u2); } else { values = new HashSet<URI>(); values.add(u2); failedComparisons.put(u2, values); } } } } executor.shutdown(); while (false == executor.isTerminated()) { // wait until we're done. } logger.info("Comparisons on '" + getTestName() + "' completed."); if (logger.isDebugEnabled()) { writeNormalizedOntologiesTo(fromOntologies, cache, new File(System.getProperty("java.io.tmpdir"))); } cache.teardown(); cache = null; }
From source file:structuredPredictionNLG.SFX.java
/** * Infers the feature/cost vectors for the content and word actions. * @return The feature/cost vectors for the content and word actions *//* ww w.j av a 2 s . c o m*/ public Object[] inferFeatureAndCostVectors() { ConcurrentHashMap<DatasetInstance, HashMap<String, ArrayList<Instance>>> contentTrainingData = new ConcurrentHashMap<>(); ConcurrentHashMap<DatasetInstance, HashMap<String, HashMap<String, ArrayList<Instance>>>> wordTrainingData = new ConcurrentHashMap<>(); if (!getAvailableWordActions().isEmpty() && !getPredicates().isEmpty()) { // Initialize collections getTrainingData().stream().map((di) -> { contentTrainingData.put(di, new HashMap<String, ArrayList<Instance>>()); return di; }).map((di) -> { wordTrainingData.put(di, new HashMap<String, HashMap<String, ArrayList<Instance>>>()); return di; }).forEachOrdered((di) -> { getPredicates().stream().map((predicate) -> { contentTrainingData.get(di).put(predicate, new ArrayList<Instance>()); return predicate; }).map((predicate) -> { wordTrainingData.get(di).put(predicate, new HashMap<String, ArrayList<Instance>>()); return predicate; }).forEachOrdered((predicate) -> { getAttributes().get(predicate).stream().filter( (attribute) -> (!wordTrainingData.get(di).get(predicate).containsKey(attribute))) .forEachOrdered((attribute) -> { wordTrainingData.get(di).get(predicate).put(attribute, new ArrayList<Instance>()); }); }); }); // Infer the vectors in parallel processes to save time ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT); getTrainingData().forEach((di) -> { executor.execute(new InferSFXVectorsThread(di, this, contentTrainingData, wordTrainingData)); }); executor.shutdown(); while (!executor.isTerminated()) { } } Object[] results = new Object[2]; results[0] = contentTrainingData; results[1] = wordTrainingData; return results; }
From source file:watch.oms.omswatch.actioncenter.helpers.WatchTransDBParser.java
/** * Parses Service response and stores into respective DB table. * /* w w w.j a v a 2 s . c om*/ * @param pStringReader */ private void readJsonStream(Reader pStringReader) { JsonReader reader = null; List<ContentValues> rows = null; String tableName = null; String colName = null; ExecutorService executor = Executors.newFixedThreadPool(10); double latestModifiedTimeStamp = 0.0f; final String VISITED_DATE = "visiteddate"; final String MESSAGE = "message"; final String ADDITION_MESSAGE = "additionMessage"; final String VISITED_DATE_MAPPER = "visiteddatemapper"; List<String> tableNames = new ArrayList<String>(); final String DB_PROCESS_DURATION = "dbprocessduration"; final String SERVER_PROCESS_DURATION = "serverprocessduration"; try { Log.d(TAG, "@@@@@@@@@@ Trans DB Tables Start @@@@@@@@@@"); reader = new JsonReader(pStringReader); reader.setLenient(true); reader.beginObject(); // Iterate through each table data while (reader.hasNext()) { colName = reader.nextName(); if (colName.equals(VISITED_DATE)) { latestModifiedTimeStamp = reader.nextDouble(); // Update Trans Table /*servermapperhelper.updateModifiedTimeStampForTransTable( ALL_TABLES, latestModifiedTimeStamp);*/ if (Integer.parseInt(OMSApplication.getInstance().getAppId()) == 10) { servermapperhelper.updateModifiedTimeStampForVisitedDateMapper( OMSApplication.getInstance().getEditTextHiddenVal(), latestModifiedTimeStamp); } continue; } else if (colName.equals(MESSAGE)) { Log.e(TAG, "Trans DB gave error response - message - " + reader.nextString()); continue; } else if (colName.equals(ADDITION_MESSAGE)) { Log.e(TAG, "Trans DB gave error response - additionMessage - " + reader.nextString()); continue; } else if (VISITED_DATE_MAPPER.equalsIgnoreCase(colName)) { Log.d(TAG, "Skipping internal Table " + VISITED_DATE_MAPPER + " lookup"); reader.skipValue(); continue; } //Fetch dbprocess duration serverprocess duration else if (DB_PROCESS_DURATION.equalsIgnoreCase(colName)) { String dbDuration = reader.nextString(); OMSApplication.getInstance().setDatabaseProcessDuration(dbDuration); /*Log.i(TAG, "DB Process Duration" + dbDuration);*/ continue; } else if (SERVER_PROCESS_DURATION.equalsIgnoreCase(colName)) { String serverProcessDuration = reader.nextString(); OMSApplication.getInstance().setServerProcessDuration(serverProcessDuration); /*Log.i(TAG, "server process duration " + serverProcessDuration);*/ continue; } Log.d(TAG, "ColName::::" + colName); // Get Table Name tableName = servermapperhelper.getClientTableName(colName); if (tableName == null) { Log.e(TAG, "Table Name was not found in ServerMapperHelper - " + colName); // Tables created only on the server sometimes dont find // entry in ServerMapper. So, allowing those tables here tableNames.add(colName); } else { tableNames.add(tableName); } rows = readAllRowDataForTable(reader, tableName); // Update DB only if we have valid Table name if (tableName != null) { Runnable worker = new DbWorkerThread(colName, rows); executor.execute(worker); } } reader.endObject(); Log.d(TAG, "Waiting for DB Worker Threads to Complete"); // Request for Shutdown. This will wait till the db updates are // complete. Wait till the db update is complete and then invoke the // time stamp update to avoid db locks. executor.shutdown(); while (!executor.isTerminated()) { } Log.d(TAG, "DB Worker Threads Completed"); // Update Modified Time Stamp for All Trans Tables executor = Executors.newFixedThreadPool(1); Runnable worker = new DbWorkerThreadToUpdateTimeStamp(tableNames, latestModifiedTimeStamp); executor.execute(worker); // Request for Shutdown. This will wait till the db updates are // complete Log.d(TAG, "Waiting for DB Timestamp Update Worker Thread to Complete"); executor.shutdown(); while (!executor.isTerminated()) { } Log.d(TAG, "DB Timestamp Update Worker Thread Completed"); Log.d(TAG, "@@@@@@@@@@ Trans DB Tables End @@@@@@@@@@"); } catch (IOException e) { e.printStackTrace(); } finally { executor.shutdown(); while (!executor.isTerminated()) { } try { if (reader != null) { reader.close(); } } catch (IOException e) { e.printStackTrace(); } } }
From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluationMultithreaded.java
private void evaluateClasses(final SparqlEndpointKS ks) throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException { Set<NamedClass> classes = new SPARQLTasks(ks.getEndpoint()).getAllClasses(); logger.info("Evaluating " + classes.size() + " classes..."); for (final Class<? extends LearningAlgorithm> algorithmClass : classAlgorithms) { ExecutorService threadPool = null; if (algorithmClass == CELOE.class) { } else {/*from www .j a v a 2 s.co m*/ threadPool = Executors.newFixedThreadPool(maxNrOfThreads); } int classesCnt = 0; Thread.sleep(5000); Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); algorithm2Ontology.put(algorithmClass, axioms); for (final NamedClass cls : classes) { try { String algName = ""; if (algorithmClass == CELOE.class) { algName = CELOE.class.getAnnotation(ComponentAnn.class).name(); } else { LearningAlgorithm learner = algorithmClass.getConstructor(SparqlEndpointKS.class) .newInstance(ks); algName = AnnComponentManager.getName(learner); } List<EvaluatedAxiom> learnedAxioms = new ArrayList<EvaluatedAxiom>(); boolean emptyEntity = sparqlReasoner.getPopularity(cls) == 0; if (emptyEntity) { logger.warn("Empty entity: " + cls); writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0, false); } else { long startTime = System.currentTimeMillis(); boolean timeout = false; if (algorithmClass == CELOE.class) { logger.info("Applying " + algName + " on " + cls + " ... "); learnedAxioms = applyCELOE(ks, cls, false); long runTime = System.currentTimeMillis() - startTime; if (timeout && learnedAxioms.isEmpty()) { writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0, runTime, false); } else if (learnedAxioms == null || learnedAxioms.isEmpty()) { writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0, runTime, false); } else { for (EvaluatedAxiom learnedAxiom : learnedAxioms) { double score = learnedAxiom.getScore().getAccuracy(); if (Double.isNaN(score)) { score = -1; } writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes).toString(), algName, learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes), score, runTime, isEntailed(learnedAxiom)); } } } else { threadPool.execute(new Runnable() { @Override public void run() { String algName = ""; try { LearningAlgorithm learner = algorithmClass .getConstructor(SparqlEndpointKS.class).newInstance(ks); algName = AnnComponentManager.getName(learner); ((AbstractAxiomLearningAlgorithm) learner).setReasoner(sparqlReasoner); ConfigHelper.configure(learner, "classToDescribe", cls.toString()); ConfigHelper.configure(learner, "maxExecutionTimeInSeconds", maxExecutionTimeInSeconds); learner.init(); applyLearningAlgorithm((AxiomLearningAlgorithm) learner, cls); } catch (Exception e) { logger.error("Error occured for class " + cls.getName() + " with algorithm " + algName, e); } } }); } } classesCnt++; if (maxClasses != 0 && classesCnt == maxClasses) { break; } } catch (Exception e) { logger.error("Error occured for class " + cls.getName(), e); } } if (algorithmClass != CELOE.class) { threadPool.shutdown(); while (!threadPool.isTerminated()) { } } } }
From source file:mamo.vanillaVotifier.VotifierServer.java
public synchronized void start() throws IOException { if (isRunning()) { throw new IllegalStateException("Server is already running!"); }/*from ww w . jav a 2s. c o m*/ notifyListeners(new ServerStartingEvent()); serverSocket = new ServerSocket(); serverSocket.bind(votifier.getConfig().getInetSocketAddress()); running = true; notifyListeners(new ServerStartedEvent()); new Thread(new Runnable() { @Override public void run() { ExecutorService executorService = Executors.newSingleThreadExecutor(); while (isRunning()) { try { final Socket socket = serverSocket.accept(); executorService.execute(new Runnable() { @Override public void run() { try { notifyListeners(new ConnectionEstablishedEvent(socket)); socket.setSoTimeout(SocketOptions.SO_TIMEOUT); // SocketException: handled by try/catch. BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(socket.getOutputStream())); writer.write("VOTIFIER 2.9\n"); writer.flush(); BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); // IOException: handled by try/catch. byte[] request = new byte[((RSAPublicKey) votifier.getConfig().getKeyPair() .getPublic()).getModulus().bitLength() / Byte.SIZE]; in.read(request); // IOException: handled by try/catch. notifyListeners(new EncryptedInputReceivedEvent(socket, new String(request))); request = RsaUtils .getDecryptCipher(votifier.getConfig().getKeyPair().getPrivate()) .doFinal(request); // IllegalBlockSizeException: can't happen. String requestString = new String(request); notifyListeners(new DecryptedInputReceivedEvent(socket, requestString)); String[] requestArray = requestString.split("\n"); if ((requestArray.length == 5 || requestArray.length == 6) && requestArray[0].equals("VOTE")) { notifyListeners(new VoteEventVotifier(socket, new Vote(requestArray[1], requestArray[2], requestArray[3], requestArray[4]))); for (VoteAction voteAction : votifier.getConfig().getVoteActions()) { String[] params = new String[4]; try { for (int i = 0; i < params.length; i++) { params[i] = SubstitutionUtils.applyRegexReplacements( requestArray[i + 1], voteAction.getRegexReplacements()); } } catch (PatternSyntaxException e) { notifyListeners(new RegularExpressionPatternErrorException(e)); params = new String[] { requestArray[1], requestArray[2], requestArray[3], requestArray[4] }; } if (voteAction.getCommandSender() instanceof RconCommandSender) { RconCommandSender commandSender = (RconCommandSender) voteAction .getCommandSender(); StrSubstitutor substitutor = SubstitutionUtils.buildStrSubstitutor( new SimpleEntry<String, Object>("service-name", params[0]), new SimpleEntry<String, Object>("user-name", params[1]), new SimpleEntry<String, Object>("address", params[2]), new SimpleEntry<String, Object>("timestamp", params[3])); for (String command : voteAction.getCommands()) { String theCommand = substitutor.replace(command); notifyListeners(new SendingRconCommandEvent( commandSender.getRconConnection(), theCommand)); try { notifyListeners(new RconCommandResponseEvent( commandSender.getRconConnection(), commandSender .sendCommand(theCommand).getPayload())); } catch (Exception e) { notifyListeners(new RconExceptionEvent( commandSender.getRconConnection(), e)); } } } if (voteAction.getCommandSender() instanceof ShellCommandSender) { ShellCommandSender commandSender = (ShellCommandSender) voteAction .getCommandSender(); HashMap<String, String> environment = new HashMap<String, String>(); environment.put("voteServiceName", params[0]); environment.put("voteUserName", params[1]); environment.put("voteAddress", params[2]); environment.put("voteTimestamp", params[3]); for (String command : voteAction.getCommands()) { notifyListeners(new SendingShellCommandEvent(command)); try { commandSender.sendCommand(command, environment); notifyListeners(new ShellCommandSentEvent()); } catch (Exception e) { notifyListeners(new ShellCommandExceptionEvent(e)); } } } } } else { notifyListeners(new InvalidRequestEvent(socket, requestString)); } } catch (SocketTimeoutException e) { notifyListeners(new ReadTimedOutExceptionEvent(socket, e)); } catch (BadPaddingException e) { notifyListeners(new DecryptInputExceptionEvent(socket, e)); } catch (Exception e) { notifyListeners(new CommunicationExceptionEvent(socket, e)); } try { socket.close(); notifyListeners(new ConnectionClosedEvent(socket)); } catch (Exception e) { // IOException: catching just in case. Continue even if socket doesn't close. notifyListeners(new ConnectionCloseExceptionEvent(socket, e)); } } }); } catch (Exception e) { if (running) { // Show errors only while running, to hide error while stopping. notifyListeners(new ConnectionEstablishExceptionEvent(e)); } } } executorService.shutdown(); if (!executorService.isTerminated()) { notifyListeners(new ServerAwaitingTaskCompletionEvent()); try { executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); } catch (Exception e) { // InterruptedException: can't happen. } } notifyListeners(new ServerStoppedEvent()); } }).start(); }