List of usage examples for java.util.concurrent ConcurrentMap keySet
Set<K> keySet();
From source file:org.opendaylight.ovsdb.plugin.impl.ConfigurationServiceImpl.java
private String getTableNameForRowUuid(Node node, String databaseName, UUID rowUuid) { ConcurrentMap<String, ConcurrentMap<String, Row>> cache = ovsdbInventoryService.getCache(node, databaseName);/*w w w .j av a2 s . c o m*/ if (cache == null) return null; for (String tableName : cache.keySet()) { ConcurrentMap<String, Row> rows = cache.get(tableName); if (rows.get(rowUuid.toString()) != null) { return tableName; } } return null; }
From source file:org.opendaylight.ovsdb.plugin.impl.ConfigurationServiceImpl.java
@Override public List<String> getTables(Node node, String databaseName) throws OvsdbPluginException { ConcurrentMap<String, ConcurrentMap<String, Row>> cache = ovsdbInventoryService.getCache(node, databaseName);//from w w w. ja v a 2 s . co m if (cache == null) return null; return new ArrayList<String>(cache.keySet()); }
From source file:org.opendaylight.ovsdb.plugin.impl.ConfigurationServiceImpl.java
@Override public ConcurrentMap<UUID, Row<GenericTableSchema>> getRows(Node node, String databaseName, String tableName) throws OvsdbPluginException { ConcurrentMap<String, Row> ovsTable = ovsdbInventoryService.getTableCache(node, databaseName, tableName); if (ovsTable == null) return null; ConcurrentMap<UUID, Row<GenericTableSchema>> tableDB = Maps.newConcurrentMap(); for (String uuidStr : ovsTable.keySet()) { tableDB.put(new UUID(uuidStr), ovsTable.get(uuidStr)); }//from ww w . j a v a 2s.c om return tableDB; }
From source file:com.github.podd.example.ExamplePoddClient.java
/** * Gets a material URI matching the given pot and genotype URIs, creating a new entry if * necessary and giving it a temporary URI. * /*from www .jav a 2 s .c o m*/ * @param materialUriMap * @param nextProjectID * @param nextPotUri * @return */ private URI getMaterialUri(final ConcurrentMap<URI, ConcurrentMap<URI, Model>> materialUriMap, final URI nextGenotypeUri, final InferredOWLOntologyID nextProjectID, final URI nextPotUri, final String potNumber, final String lineNumber, final String control) { URI nextMaterialURI = null; if (materialUriMap.containsKey(nextPotUri)) { final ConcurrentMap<URI, Model> nextPotMaterialMap = materialUriMap.get(nextPotUri); for (final URI existingMaterialURI : nextPotMaterialMap.keySet()) { final Model nextModel = nextPotMaterialMap.get(existingMaterialURI); if (nextModel.contains(existingMaterialURI, PODD.PODD_SCIENCE_REFERS_TO_GENOTYPE, nextGenotypeUri)) { nextMaterialURI = existingMaterialURI; } else { this.log.debug("Did not find any materials with the given genotype in this pot: {} {}", nextPotUri, nextGenotypeUri); } } } // If no material was found, then create a new description and assign it a temporary URI if (nextMaterialURI == null) { this.log.debug( "Could not find an existing material for description provided, assigning a temporary URI: {} {} {}", nextProjectID, nextPotUri, nextGenotypeUri); nextMaterialURI = RestletPoddClientImpl.vf .createURI(RestletPoddClientImpl.TEMP_UUID_PREFIX + "material:" + UUID.randomUUID().toString()); final Model newModel = new LinkedHashModel(); newModel.add(nextPotUri, PODD.PODD_SCIENCE_HAS_MATERIAL, nextMaterialURI); newModel.add(nextMaterialURI, RDF.TYPE, PODD.PODD_SCIENCE_MATERIAL); newModel.add(nextMaterialURI, RDFS.LABEL, RestletPoddClientImpl.vf .createLiteral("Material for pot " + potNumber + " containing line " + lineNumber)); newModel.add(nextMaterialURI, PODD.PODD_SCIENCE_REFERS_TO_GENOTYPE, nextGenotypeUri); if (control.equalsIgnoreCase("Yes")) { newModel.add(nextMaterialURI, PODD.PODD_SCIENCE_HAS_CONTROL, PODD.PODD_SCIENCE_HAS_CONTROL_YES); } else if (control.equalsIgnoreCase("No")) { newModel.add(nextMaterialURI, PODD.PODD_SCIENCE_HAS_CONTROL, PODD.PODD_SCIENCE_HAS_CONTROL_NO); } else { this.log.warn("Did not recognise control label: {} (should be Yes or No", control); newModel.add(nextMaterialURI, PODD.PODD_SCIENCE_HAS_CONTROL, PODD.PODD_SCIENCE_HAS_CONTROL_UNKNOWN); } ConcurrentMap<URI, Model> nextGenotypeUriMap = new ConcurrentHashMap<>(); final ConcurrentMap<URI, Model> putIfAbsent = materialUriMap.putIfAbsent(nextPotUri, nextGenotypeUriMap); if (putIfAbsent != null) { nextGenotypeUriMap = putIfAbsent; } final Model putIfAbsent2 = nextGenotypeUriMap.putIfAbsent(nextMaterialURI, newModel); if (putIfAbsent2 != null) { this.log.error("ERROR: Generated two temporary Material URIs that were identical! : {} {}", nextPotUri, nextMaterialURI); } } return nextMaterialURI; }
From source file:com.github.podd.example.ExamplePoddClient.java
/** * Gets a genotype URI matching the given genus, species, and plantName (line) from the given * cache, creating a new entry if necessary and giving it a temporary URI. * //from w w w.j a va 2 s. c o m * @param genotypeUriMap * @param genus * @param species * @param plantName * @param control * @param nextProjectID * @param nextProjectUri * @return */ private URI getGenotypeUri(final ConcurrentMap<URI, ConcurrentMap<URI, Model>> genotypeUriMap, final String genus, final String species, final String plantName, final String plantLineNumber, final String control, final InferredOWLOntologyID nextProjectID, final URI nextProjectUri) { URI nextGenotypeURI = null; if (genotypeUriMap.containsKey(nextProjectUri)) { final ConcurrentMap<URI, Model> nextProjectGenotypeMap = genotypeUriMap.get(nextProjectUri); for (final URI existingGenotypeURI : nextProjectGenotypeMap.keySet()) { final Model nextModel = nextProjectGenotypeMap.get(existingGenotypeURI); if (nextModel.contains(existingGenotypeURI, PODD.PODD_SCIENCE_HAS_GENUS, RestletPoddClientImpl.vf.createLiteral(genus))) { if (nextModel.contains(existingGenotypeURI, PODD.PODD_SCIENCE_HAS_SPECIES, RestletPoddClientImpl.vf.createLiteral(species))) { if (nextModel.contains(existingGenotypeURI, PODD.PODD_SCIENCE_HAS_LINE, RestletPoddClientImpl.vf.createLiteral(plantName))) { nextGenotypeURI = existingGenotypeURI; break; } else { this.log.debug( "Did not find any genotypes with the given genus and species and line in this project: {} {} {} {}", nextProjectUri, genus, species, plantName); } } else { this.log.debug( "Did not find any genotypes with the given genus and species in this project: {} {} {}", nextProjectUri, genus, species); } } else { this.log.debug("Did not find any genotypes with the given genus in this project: {} {}", nextProjectUri, genus); } } } // If no genotype was found, then create a new description and assign it a temporary URI if (nextGenotypeURI == null) { this.log.debug( "Could not find an existing genotype for description provided, assigning a temporary URI: {} {} {} {}", nextProjectID, genus, species, plantName); nextGenotypeURI = RestletPoddClientImpl.vf.createURI(RestletPoddClientImpl.TEMP_UUID_PREFIX + "genotype:" + plantLineNumber + ":" + UUID.randomUUID().toString()); final Model newModel = new LinkedHashModel(); newModel.add(nextProjectUri, PODD.PODD_SCIENCE_HAS_GENOTYPE, nextGenotypeURI); newModel.add(nextGenotypeURI, RDF.TYPE, PODD.PODD_SCIENCE_GENOTYPE); newModel.add(nextGenotypeURI, RDFS.LABEL, RestletPoddClientImpl.vf.createLiteral(genus + " " + species + " (" + plantName + ")")); newModel.add(nextGenotypeURI, RDFS.COMMENT, RestletPoddClientImpl.vf.createLiteral("Plant line in : " + genus + " " + species + " named, " + plantName + " : labelled as number " + plantLineNumber)); newModel.add(nextGenotypeURI, PODD.PODD_SCIENCE_HAS_GENUS, RestletPoddClientImpl.vf.createLiteral(genus)); newModel.add(nextGenotypeURI, PODD.PODD_SCIENCE_HAS_SPECIES, RestletPoddClientImpl.vf.createLiteral(species)); newModel.add(nextGenotypeURI, PODD.PODD_SCIENCE_HAS_LINE, RestletPoddClientImpl.vf.createLiteral(plantName)); newModel.add(nextGenotypeURI, PODD.PODD_SCIENCE_HAS_LINE_NUMBER, RestletPoddClientImpl.vf.createLiteral(plantLineNumber)); ConcurrentMap<URI, Model> nextGenotypeUriMap = new ConcurrentHashMap<>(); final ConcurrentMap<URI, Model> putIfAbsent = genotypeUriMap.putIfAbsent(nextProjectUri, nextGenotypeUriMap); if (putIfAbsent != null) { nextGenotypeUriMap = putIfAbsent; } final Model putIfAbsent2 = nextGenotypeUriMap.putIfAbsent(nextGenotypeURI, newModel); if (putIfAbsent2 != null) { this.log.error("ERROR: Generated two temporary Genotype URIs that were identical! : {} {}", nextProjectUri, nextGenotypeURI); } } return nextGenotypeURI; }
From source file:com.github.podd.example.ExamplePoddClient.java
private void populateGenotypeUriMap( final ConcurrentMap<String, ConcurrentMap<URI, InferredOWLOntologyID>> projectUriMap, final ConcurrentMap<URI, ConcurrentMap<URI, Model>> genotypeUriMap) throws PoddClientException { for (final String nextProjectName : projectUriMap.keySet()) { final ConcurrentMap<URI, InferredOWLOntologyID> nextProjectNameMapping = projectUriMap .get(nextProjectName);/*w w w .j av a 2 s . c o m*/ for (final URI projectUri : nextProjectNameMapping.keySet()) { final InferredOWLOntologyID artifactId = nextProjectNameMapping.get(projectUri); final Model nextSparqlResults = this.doSPARQL( String.format(ExampleSpreadsheetConstants.TEMPLATE_SPARQL_BY_TYPE_ALL_PROPERTIES, RenderUtils.getSPARQLQueryString(PODD.PODD_SCIENCE_GENOTYPE)), Arrays.asList(artifactId)); if (nextSparqlResults.isEmpty()) { this.log.debug("Could not find any existing genotypes for project: {} {}", nextProjectName, projectUri); } for (final Resource nextGenotype : nextSparqlResults .filter(null, RDF.TYPE, PODD.PODD_SCIENCE_GENOTYPE).subjects()) { if (!(nextGenotype instanceof URI)) { this.log.error("Found genotype that was not assigned a URI: {} artifact={}", nextGenotype, artifactId); } else { ConcurrentMap<URI, Model> nextGenotypeMap = new ConcurrentHashMap<>(); final ConcurrentMap<URI, Model> putIfAbsent = genotypeUriMap.put(projectUri, nextGenotypeMap); if (putIfAbsent != null) { nextGenotypeMap = putIfAbsent; } final Model putIfAbsent2 = nextGenotypeMap.putIfAbsent((URI) nextGenotype, nextSparqlResults); if (putIfAbsent2 != null) { this.log.info( "Found existing description for genotype URI within the same project: {} {}", projectUri, nextGenotype); } } } } } }
From source file:com.github.podd.example.ExamplePoddClient.java
public ConcurrentMap<InferredOWLOntologyID, InferredOWLOntologyID> uploadArtifacts( final ConcurrentMap<InferredOWLOntologyID, Model> uploadQueue) throws PoddClientException { final ConcurrentMap<InferredOWLOntologyID, InferredOWLOntologyID> resultMap = new ConcurrentHashMap<>(); for (final InferredOWLOntologyID nextUpload : uploadQueue.keySet()) { try {/*from w w w.ja v a 2 s.com*/ final StringWriter writer = new StringWriter(4096); Rio.write(uploadQueue.get(nextUpload), writer, RDFFormat.RDFJSON); final InferredOWLOntologyID newID = this.appendArtifact(nextUpload, new ByteArrayInputStream(writer.toString().getBytes(Charset.forName("UTF-8"))), RDFFormat.RDFJSON); if (newID == null) { this.log.error("Did not find a valid result from append artifact: {}", nextUpload); } else if (nextUpload.equals(newID)) { this.log.error("Result from append artifact was not changed, as expected. {} {}", nextUpload, newID); } else { resultMap.putIfAbsent(nextUpload, newID); } } catch (final RDFHandlerException e) { this.log.error("Found exception generating upload body: ", e); } } return resultMap; }
From source file:com.github.podd.example.ExamplePoddClient.java
private void populateExperimentUriMap( final ConcurrentMap<String, ConcurrentMap<URI, InferredOWLOntologyID>> projectUriMap, final ConcurrentMap<String, ConcurrentMap<URI, URI>> experimentUriMap) throws PoddClientException { for (final String nextProjectName : projectUriMap.keySet()) { final ConcurrentMap<URI, InferredOWLOntologyID> nextProjectNameMapping = projectUriMap .get(nextProjectName);// w ww . j a v a 2 s . com for (final URI projectUri : nextProjectNameMapping.keySet()) { final InferredOWLOntologyID artifactId = nextProjectNameMapping.get(projectUri); final Model nextSparqlResults = this.doSPARQL( String.format(ExampleSpreadsheetConstants.TEMPLATE_SPARQL_BY_TYPE, RenderUtils.getSPARQLQueryString(PODD.PODD_SCIENCE_EXPERIMENT)), Arrays.asList(artifactId)); if (nextSparqlResults.isEmpty()) { this.log.info("Could not find any existing experiments for project: {} {}", nextProjectName, projectUri); } for (final Resource nextExperiment : nextSparqlResults .filter(null, RDF.TYPE, PODD.PODD_SCIENCE_EXPERIMENT).subjects()) { if (!(nextExperiment instanceof URI)) { this.log.error("Found experiment that was not assigned a URI: {} artifact={}", nextExperiment, artifactId); } else { final Model label = nextSparqlResults.filter(nextExperiment, RDFS.LABEL, null); // DebugUtils.printContents(label); if (label.isEmpty()) { this.log.error("Experiment did not have a label: {} {}", artifactId, nextExperiment); } else { for (final Value nextLabel : label.objects()) { if (!(nextLabel instanceof Literal)) { this.log.error("Project had a non-literal label: {} {} {}", artifactId, nextExperiment, nextLabel); } else { String nextLabelString = nextLabel.stringValue(); // take off any descriptions and leave the // project number behind nextLabelString = nextLabelString.split(" ")[0]; final Matcher matcher = ExampleSpreadsheetConstants.REGEX_EXPERIMENT .matcher(nextLabelString); if (!matcher.matches()) { this.log.error( "Found experiment label that did not start with expected format: {}", nextLabel); } else { this.log.debug( "Found experiment label with the expected format: '{}' original=<{}>", nextLabelString, nextLabel); final int nextProjectYear = Integer.parseInt(matcher.group(1)); final int nextProjectNumber = Integer.parseInt(matcher.group(2)); final int nextExperimentNumber = Integer.parseInt(matcher.group(3)); nextLabelString = String.format( ExampleSpreadsheetConstants.TEMPLATE_EXPERIMENT, nextProjectYear, nextProjectNumber, nextExperimentNumber); this.log.debug("Reformatted experiment label to: '{}' original=<{}>", nextLabelString, nextLabel); ConcurrentMap<URI, URI> labelMap = new ConcurrentHashMap<>(); final ConcurrentMap<URI, URI> putIfAbsent = experimentUriMap .putIfAbsent(nextLabelString, labelMap); if (putIfAbsent != null) { this.log.error( "Found duplicate experiment name, inconsistent results may follow: {} {} {}", artifactId, nextExperiment, nextLabel); // Overwrite our reference with the one that already // existed labelMap = putIfAbsent; } final URI existingProject = labelMap.putIfAbsent((URI) nextExperiment, projectUri); // Check for the case where project name maps to different // artifacts if (existingProject != null && !existingProject.equals(projectUri)) { this.log.error( "Found duplicate experiment name across different projects, inconsistent results may follow: {} {} {} {}", artifactId, existingProject, projectUri, nextLabel); } } } } } } } } } }
From source file:org.opendaylight.ovsdb.plugin.ConfigurationService.java
@Override public List<String> getTables(Node node) { ConcurrentMap<String, ConcurrentMap<String, Table<?>>> cache = inventoryServiceInternal.getCache(node); if (cache == null) return null; return new ArrayList<String>(cache.keySet()); }
From source file:org.dkpro.lab.engine.impl.MultiThreadBatchTaskEngine.java
@Override protected void executeConfiguration(BatchTask aConfiguration, TaskContext aContext, Map<String, Object> aConfig, Set<String> aExecutedSubtasks) throws ExecutionException, LifeCycleException { if (log.isTraceEnabled()) { // Show all subtasks executed so far for (String est : aExecutedSubtasks) { log.trace("-- Already executed: " + est); }//from ww w . j a v a2 s . co m } // Set up initial scope used by sub-batch-tasks using the inherited scope. The scope is // extended as the subtasks of this batch are executed with the present configuration. // FIXME: That means that sub-batch-tasks in two different configurations cannot see // each other. Is that intended? Mind that the "executedSubtasks" set is intentionally // maintained *across* configurations, so maybe the scope should also be maintained // *across* configurations? - REC 2014-06-15 Set<String> scope = new HashSet<>(); if (aConfiguration.getScope() != null) { scope.addAll(aConfiguration.getScope()); } // Configure subtasks for (Task task : aConfiguration.getTasks()) { // Now the setup is complete aContext.getLifeCycleManager().configure(aContext, task, aConfig); } Queue<Task> queue = new LinkedList<>(aConfiguration.getTasks()); // keeps track of the execution threads; // TODO MW: do we really need this or can we work with the futures list only? Map<Task, ExecutionThread> threads = new HashMap<>(); // keeps track of submitted Futures and their associated tasks Map<Future<?>, Task> futures = new HashMap<Future<?>, Task>(); // will be instantiated with all exceptions from current loop ConcurrentMap<Task, Throwable> exceptionsFromLastLoop = null; ConcurrentMap<Task, Throwable> exceptionsFromCurrentLoop = new ConcurrentHashMap<>(); int outerLoopCounter = 0; // main loop do { outerLoopCounter++; threads.clear(); futures.clear(); ExecutorService executor = Executors.newFixedThreadPool(maxThreads); // set the exceptions from the last loop exceptionsFromLastLoop = new ConcurrentHashMap<>(exceptionsFromCurrentLoop); // Fix MW: Clear exceptionsFromCurrentLoop; otherwise the loop with run at most twice. exceptionsFromCurrentLoop.clear(); // process all tasks from the queue while (!queue.isEmpty()) { Task task = queue.poll(); TaskContextMetadata execution = getExistingExecution(aConfiguration, aContext, task, aConfig, aExecutedSubtasks); // Check if a subtask execution compatible with the present configuration has // does already exist ... if (execution == null) { // ... otherwise execute it with the present configuration log.info("Executing task [" + task.getType() + "]"); // set scope here so that the inherited scopes are considered if (task instanceof BatchTask) { ((BatchTask) task).setScope(scope); } ExecutionThread thread = new ExecutionThread(aContext, task, aConfig, aExecutedSubtasks); threads.put(task, thread); futures.put(executor.submit(thread), task); } else { log.debug("Using existing execution [" + execution.getId() + "]"); // Record new/existing execution aExecutedSubtasks.add(execution.getId()); scope.add(execution.getId()); } } // try and get results from all futures to check for failed executions for (Map.Entry<Future<?>, Task> entry : futures.entrySet()) { try { entry.getKey().get(); } catch (java.util.concurrent.ExecutionException ex) { Task task = entry.getValue(); // TODO MW: add a retry-counter here to prevent endless loops? log.info("Task exec failed for [" + task.getType() + "]"); // record the failed task, so that it can be re-added to the queue exceptionsFromCurrentLoop.put(task, ex); } catch (InterruptedException ex) { // thread interrupted, exit throw new RuntimeException(ex); } } log.debug("Calling shutdown"); executor.shutdown(); log.debug("All threads finished"); // collect the results for (Map.Entry<Task, ExecutionThread> entry : threads.entrySet()) { Task task = entry.getKey(); ExecutionThread thread = entry.getValue(); TaskContextMetadata execution = thread.getTaskContextMetadata(); // probably failed if (execution == null) { Throwable exception = exceptionsFromCurrentLoop.get(task); if (!(exception instanceof UnresolvedImportException) && !(exception instanceof java.util.concurrent.ExecutionException)) { throw new RuntimeException(exception); } exceptionsFromCurrentLoop.put(task, exception); // re-add to the queue queue.add(task); } else { // Record new/existing execution aExecutedSubtasks.add(execution.getId()); scope.add(execution.getId()); } } } // finish if the same tasks failed again while (!exceptionsFromCurrentLoop.keySet().equals(exceptionsFromLastLoop.keySet())); // END OF DO; finish if the same tasks failed again if (!exceptionsFromCurrentLoop.isEmpty()) { // collect all details StringBuilder details = new StringBuilder(); for (Throwable throwable : exceptionsFromCurrentLoop.values()) { details.append("\n -"); details.append(throwable.getMessage()); } // we re-throw the first exception Throwable next = exceptionsFromCurrentLoop.values().iterator().next(); if (next instanceof RuntimeException) { throw (RuntimeException) next; } // otherwise wrap it throw new RuntimeException(details.toString(), next); } log.info("MultiThreadBatchTask completed successfully. Total number of outer loop runs: " + outerLoopCounter); }