List of usage examples for com.google.common.collect Multimap putAll
boolean putAll(@Nullable K key, Iterable<? extends V> values);
From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.GenericLogicDiscoverer.java
/** * Generic implementation for finding all the Services or Operations that have SOME of the given types as inputs or outputs. * * @param entityType the MSM URI of the type of entity we are looking for. Only supports Service and Operation. * @param relationship the MSM URI of the relationship we are looking for. Only supports hasInput and hasOutput. * @param types the input/output types (modelReferences that is) we are looking for * @return a Map mapping operation/services URIs to MatchResults. */// w ww .ja v a 2 s.com private Map<URI, MatchResult> findSome(URI entityType, URI relationship, Set<URI> types) { // Ensure that we have been given correct parameters if (types == null || types.isEmpty() || (!entityType.toASCIIString().equals(MSM.Service.getURI()) && !entityType.toASCIIString().equals(MSM.Operation.getURI())) || (!relationship.toASCIIString().equals(MSM.hasInput.getURI()) && !entityType.toASCIIString().equals(MSM.hasOutput.getURI()) && !relationship.toASCIIString().equals(SAWSDL.modelReference.getURI()))) { return ImmutableMap.of(); } // Expand the input types to get all that match enough to be consumed // TODO: The leastOfType should be configurable Table<URI, URI, MatchResult> expandedTypes; if (relationship.toASCIIString().equals(SAWSDL.modelReference.getURI())) { expandedTypes = HashBasedTable.create(); //TODO: fix this properly for (URI type : types) { expandedTypes.putAll(this.conceptMatcher.listMatchesAtMostOfType(ImmutableSet.of(type), LogicConceptMatchType.Subsume)); expandedTypes.putAll( this.conceptMatcher.listMatchesOfType(ImmutableSet.of(type), LogicConceptMatchType.Exact)); } } else { expandedTypes = this.conceptMatcher.listMatchesAtLeastOfType(types, LogicConceptMatchType.Plugin); } // Track all the results in a multimap to push the details up the stack Multimap<URI, MatchResult> result = ArrayListMultimap.create(); // Find all the entities with modelReferences to the expanded types // The column view is the one with all the possible matches since a class will always match itself Map<URI, Map<URI, MatchResult>> columnMap = expandedTypes.columnMap(); for (URI type : columnMap.keySet()) { Set<URI> entities = ImmutableSet.of(); if (relationship.toASCIIString().equals(SAWSDL.modelReference.getURI())) { entities = listEntitesWithModelReference(entityType, type); } else if (relationship.toASCIIString().equals(MSM.hasInput.getURI()) || relationship.toASCIIString().equals(MSM.hasOutput.getURI())) { entities = listEntitiesWithType(entityType, relationship, type); } for (URI entity : entities) { result.putAll(entity, columnMap.get(type).values()); } } // Merge the results into a single map using Union return Maps.transformValues(result.asMap(), MatchResultsMerger.UNION); }
From source file:com.palantir.atlasdb.keyvalue.partition.map.DynamicPartitionMapImpl.java
/** * Copies rows within the specified range from all the tables. * @param destKve// ww w .j a v a 2 s.c o m * @param srcKve * @param rangeToCopy */ private void copyData(KeyValueService destKvs, RangeRequest rangeToCopy) { ImmutableList<PartitionMapService> mapServices = ImmutableList .<PartitionMapService>of(InMemoryPartitionMapService.create(this)); PartitionedKeyValueService pkvs = PartitionedKeyValueService.create(quorumParameters, mapServices); for (String tableName : pkvs.getAllTableNames()) { // TODO: getRangeOfTimestamps? try (ClosableIterator<RowResult<Set<Value>>> allRows = pkvs.getRangeWithHistory(tableName, rangeToCopy, Long.MAX_VALUE)) { while (allRows.hasNext()) { RowResult<Set<Value>> row = allRows.next(); for (Entry<Cell, Set<Value>> cell : row.getCells()) { Multimap<Cell, Value> rowMap = HashMultimap.create(); rowMap.putAll(cell.getKey(), cell.getValue()); Multimap<Cell, Long> rowTsMap = HashMultimap.create(); for (Entry<Cell, Value> entry : rowMap.entries()) { rowTsMap.put(entry.getKey(), entry.getValue().getTimestamp()); } destKvs.putWithTimestamps(tableName, rowMap); } } } } }
From source file:fr.ens.biologie.genomique.eoulsan.core.workflow.CommandWorkflow.java
/** * Configure the steps of the Workflow.//from ww w .jav a 2 s . c o m * @throws EoulsanException if an error occurs while creating the step */ private void configureSteps() throws EoulsanException { // Configure all the steps for (CommandStep step : this.steps) { step.configure(); } Multimap<CommandStep, Requirement> requirements = ArrayListMultimap.create(); // Get the requiement of all steps for (CommandStep step : this.steps) { Set<Requirement> stepRequirements = step.getModule().getRequirements(); if (stepRequirements != null && !stepRequirements.isEmpty()) { requirements.putAll(step, stepRequirements); } } int installerCount = 0; for (Map.Entry<CommandStep, Requirement> e : requirements.entries()) { final String stepId = e.getKey().getId(); final Requirement r = e.getValue(); if (r.isAvailable()) { getLogger().fine("Requierement found for step \"" + stepId + "\": " + r); continue; } getLogger().fine("Requierement not found for step \"" + stepId + "\": " + r); if (!r.isInstallable()) { if (r.isOptional()) { continue; } else { throw new EoulsanException( "Requirement for step \"" + e.getKey().getId() + "\" is not available: " + r.getName()); } } installerCount++; // Create an installer step final CommandStep step = new CommandStep(this, r.getName() + "install" + installerCount, RequirementInstallerModule.MODULE_NAME, Globals.APP_VERSION.toString(), r.getParameters(), false, false, -1, -1, ""); // Configure the installer step step.configure(); // Add the new step to the workflow addStep(indexOfStep(getFirstStep()), step); } }
From source file:org.openscience.cdk.app.DepictController.java
private void abbreviate(IReaction rxn, String mode, String annotate) { Multimap<IAtomContainer, Sgroup> sgroupmap = ArrayListMultimap.create(); switch (mode.toLowerCase()) { case "true": case "on": case "yes": for (IAtomContainer mol : rxn.getReactants().atomContainers()) { contractHydrates(mol);/*from w ww .ja v a 2 s . c om*/ Set<IAtom> atoms = new HashSet<>(); List<Sgroup> newSgroups = new ArrayList<>(); for (Sgroup sgroup : abbreviations.generate(mol)) { if (add(atoms, sgroup.getAtoms())) newSgroups.add(sgroup); } sgroupmap.putAll(mol, newSgroups); } for (IAtomContainer mol : rxn.getProducts().atomContainers()) { contractHydrates(mol); Set<IAtom> atoms = new HashSet<>(); List<Sgroup> newSgroups = new ArrayList<>(); for (Sgroup sgroup : abbreviations.generate(mol)) { if (add(atoms, sgroup.getAtoms())) newSgroups.add(sgroup); } sgroupmap.putAll(mol, newSgroups); } for (IAtomContainer mol : rxn.getAgents().atomContainers()) { contractHydrates(mol); reagents.apply(mol); abbreviations.apply(mol); } break; case "groups": for (IAtomContainer mol : rxn.getAgents().atomContainers()) { contractHydrates(mol); abbreviations.apply(mol); } break; case "reagents": for (IAtomContainer mol : rxn.getAgents().atomContainers()) { contractHydrates(mol); reagents.apply(mol); } break; } Set<String> include = new HashSet<>(); for (Map.Entry<IAtomContainer, Sgroup> e : sgroupmap.entries()) { final IAtomContainer mol = e.getKey(); final Sgroup abbrv = e.getValue(); int numAtoms = mol.getAtomCount(); if (abbrv.getBonds().isEmpty()) { include.add(abbrv.getSubscript()); } else { int numAbbr = abbrv.getAtoms().size(); double f = numAbbr / (double) numAtoms; if (numAtoms - numAbbr > 1 && f <= 0.4) { include.add(abbrv.getSubscript()); } } } for (Map.Entry<IAtomContainer, Collection<Sgroup>> e : sgroupmap.asMap().entrySet()) { final IAtomContainer mol = e.getKey(); List<Sgroup> sgroups = mol.getProperty(CDKConstants.CTAB_SGROUPS); if (sgroups == null) sgroups = new ArrayList<>(); else sgroups = new ArrayList<>(sgroups); mol.setProperty(CDKConstants.CTAB_SGROUPS, sgroups); for (Sgroup abbrv : e.getValue()) { if (include.contains(abbrv.getSubscript())) sgroups.add(abbrv); } } }
From source file:com.giaybac.traprange.extractor.PDFTableExtractor.java
public List<Table> extract() { List<Table> retVal = new ArrayList<>(); Multimap<Integer, Range<Integer>> pageIdNLineRangesMap = LinkedListMultimap.create(); Multimap<Integer, TextPosition> pageIdNTextsMap = LinkedListMultimap.create(); try {/* w w w.j a va 2 s. c om*/ this.document = PDDocument.load(inputStream); for (int pageId = 0; pageId < document.getNumberOfPages(); pageId++) { boolean b = !exceptedPages.contains(pageId) && (extractedPages.isEmpty() || extractedPages.contains(pageId)); if (b) { PDPage pdPage = (PDPage) document.getDocumentCatalog().getAllPages().get(pageId); List<TextPosition> texts = extractTextPositions(pdPage);//sorted by .getY() ASC //extract line ranges List<Range<Integer>> lineRanges = getLineRanges(pageId, texts); //extract column ranges List<TextPosition> textsByLineRanges = getTextsByLineRanges(lineRanges, texts); pageIdNLineRangesMap.putAll(pageId, lineRanges); pageIdNTextsMap.putAll(pageId, textsByLineRanges); } } //Calculate columnRanges List<Range<Integer>> columnRanges = getColumnRanges(pageIdNTextsMap.values()); for (int pageId : pageIdNTextsMap.keySet()) { Table table = buildTable(pageId, (List) pageIdNTextsMap.get(pageId), (List) pageIdNLineRangesMap.get(pageId), columnRanges); retVal.add(table); //debug logger.debug("Found " + table.getRows().size() + " row(s) and " + columnRanges.size() + " column(s) of a table in page " + pageId); } } catch (IOException ex) { throw new RuntimeException("Parse pdf file fail", ex); } //return return retVal; }
From source file:com.giaybac.traprange.PDFTableExtractor.java
public List<Table> extract() { List<Table> retVal = new ArrayList<>(); Multimap<Integer, Range<Integer>> pageIdNLineRangesMap = LinkedListMultimap.create(); Multimap<Integer, TextPosition> pageIdNTextsMap = LinkedListMultimap.create(); try {/*from w w w. jav a 2 s .c o m*/ this.document = this.password != null ? PDDocument.load(inputStream, this.password) : PDDocument.load(inputStream); for (int pageId = 0; pageId < document.getNumberOfPages(); pageId++) { boolean b = !exceptedPages.contains(pageId) && (extractedPages.isEmpty() || extractedPages.contains(pageId)); if (b) { List<TextPosition> texts = extractTextPositions(pageId);//sorted by .getY() ASC //extract line ranges List<Range<Integer>> lineRanges = getLineRanges(pageId, texts); //extract column ranges List<TextPosition> textsByLineRanges = getTextsByLineRanges(lineRanges, texts); pageIdNLineRangesMap.putAll(pageId, lineRanges); pageIdNTextsMap.putAll(pageId, textsByLineRanges); } } //Calculate columnRanges List<Range<Integer>> columnRanges = getColumnRanges(pageIdNTextsMap.values()); for (int pageId : pageIdNTextsMap.keySet()) { Table table = buildTable(pageId, (List) pageIdNTextsMap.get(pageId), (List) pageIdNLineRangesMap.get(pageId), columnRanges); retVal.add(table); //debug logger.debug("Found " + table.getRows().size() + " row(s) and " + columnRanges.size() + " column(s) of a table in page " + pageId); } } catch (IOException ex) { throw new RuntimeException("Parse pdf file fail", ex); } finally { if (this.document != null) { try { this.document.close(); } catch (IOException ex) { logger.error(null, ex); } } } //return return retVal; }
From source file:org.eclipse.xtext.util.formallang.PdaUtil.java
public <S, P, T, D extends Pda<S, P>> D expand(Pda<S, P> pda, Function<S, Pda<S, P>> expand, Function<S, T> tokens, PdaFactory<D, S, P, T> fact) { D result = fact.create(tokens.apply(pda.getStart()), tokens.apply(pda.getStop())); Identity<S> identity = new Identity<S>(); Map<S, S> idstates = Maps.newIdentityHashMap(); Multimap<S, S> followers = LinkedHashMultimap.create(); for (S s_old : nfaUtil.collect(pda)) { S s_new = idstates.get(s_old); if (s_new == null) { Pda<S, P> sub = expand.apply(s_old); if (sub != null) { S s_start = identity.get(fact.createPush(result, tokens.apply(s_old))); S s_stop = identity.get(fact.createPop(result, tokens.apply(s_old))); idstates.put(s_old, s_start); idstates.put(sub.getStart(), s_start); idstates.put(sub.getStop(), s_stop); followers.putAll(s_start, sub.getFollowers(sub.getStart())); followers.putAll(s_stop, pda.getFollowers(s_old)); for (S f_old : nfaUtil.collect(sub)) if (f_old != sub.getStart() && f_old != sub.getStop()) { S f_new = idstates.get(f_old); if (f_new == null) { idstates.put(f_old, f_new = clone(f_old, sub, result, tokens, fact, identity)); followers.putAll(f_new, pda.getFollowers(f_old)); }// www . j ava 2 s .c o m } } else { idstates.put(s_old, s_new = clone(s_old, pda, result, tokens, fact, identity)); followers.putAll(s_new, pda.getFollowers(s_old)); } } } for (Map.Entry<S, Collection<S>> entry : followers.asMap().entrySet()) { Set<S> f = Sets.newLinkedHashSet(); for (S s : entry.getValue()) f.add(idstates.get(s)); fact.setFollowers(result, entry.getKey(), f); } return result; }
From source file:gg.pistol.sweeper.core.Analyzer.java
/** * Select all the duplicates from the targets based on a criteria function. * If the function returns the same value for two input targets then those targets are considered duplicates (in * the context of the criteria function). * * @return a multimap with function values as keys and the targets that are considered duplicates as values for * the key//ww w. j a va2 s.c om */ private <T> Multimap<T, TargetImpl> filterDuplicates(Collection<TargetImpl> targets, Function<TargetImpl, T> indexFunction) throws SweeperAbortException { // Dumping all the targets into the multimap (Multimaps.index() doesn't work because it does not support // skipping null function values and also because of checking the abort flag). Multimap<T, TargetImpl> map = ArrayListMultimap.create(); for (TargetImpl target : targets) { T key = indexFunction.apply(target); if (key != null) { // ignore null values map.put(key, target); } checkAbortFlag(); } // Filtering the targets (Multimaps.filterKeys() and/or Multimaps.filterValues() don't work because of checking // the abort flag). Multimap<T, TargetImpl> ret = ArrayListMultimap.create(); for (T key : map.keySet()) { checkAbortFlag(); Collection<TargetImpl> collection = map.get(key); // Ignore all the targets that are not duplicates. if (collection.size() == 1) { continue; } // Ignore all the targets that are a single child of a directory. In this case the directory will represent // the child's content. Collection<TargetImpl> values = new ArrayList<TargetImpl>(); for (TargetImpl target : collection) { if (target.getParent() == null || target.getParent().getChildren().size() > 1) { values.add(target); } } if (values.size() > 1) { ret.putAll(key, values); } } return ret; }
From source file:edu.buaa.satla.analysis.core.predicate.BlockFormulaSlicer.java
/** This function handles all outgoing edges of the current state. * Their important vars are joined and returned. */ private Collection<String> handleEdgesForState(ARGState current, Map<ARGState, Collection<String>> s2v, Multimap<ARGState, ARGState> s2s, Set<ARGState> block) { final List<ARGState> usedChildren = from(current.getChildren()).filter(in(block)).toList(); assert usedChildren.size() > 0 : "no child for " + current.getStateId(); // if we have an assumption, and the branches are completely unimportant, // the assumption itself is unimportant, so we can ignore it if (isAssumptionWithSameImpChild(usedChildren, current, s2s)) { final ARGState child1 = usedChildren.get(0); s2s.putAll(current, s2s.get(child1)); final Collection<String> iVars = new LinkedHashSet<>(); // vars from latest important child, // we have to copy them, there could be another parent somewhere else iVars.addAll(s2v.get(child1));/*from w w w . j av a 2 s .c om*/ return iVars; } else { // there can be several children --> collect their vars and join them // normally there is 1 or 2 children. Collection<String> iVars = null; for (ARGState child : usedChildren) { final Collection<String> oldVars = s2v.get(child); final Collection<String> newVars; // if there is only one parent for the child, we re-use oldVars. // TODO better solution: if (allParentsExceptThisDone(child)) { if (child.getParents().size() == 1) { newVars = oldVars; } else { // copy oldVars, they will be used later for a second parent newVars = new LinkedHashSet<>(); newVars.addAll(oldVars); } // do the hard work final CFAEdge edge = current.getEdgeToChild(child); final boolean isImportant = handleEdge(edge, newVars); assert !importantEdges.containsEntry(current, child); if (isImportant) { importantEdges.put(current, child); s2s.put(current, current); } else { s2s.putAll(current, s2s.get(child)); } if (iVars == null) { iVars = newVars; } else { iVars.addAll(newVars); } } Preconditions.checkNotNull(iVars); return iVars; } }
From source file:co.cask.cdap.internal.app.runtime.adapter.PluginRepository.java
/** * Gathers plugin class information by parsing an external configuration file. * * @return {@code true} if there is an external configuration file, {@code false} otherwise. *//* www. j a va 2s . c om*/ private boolean configureByFile(PluginFile pluginFile, Multimap<PluginInfo, PluginClass> templatePlugins) throws IOException { String pluginFileName = pluginFile.getFile().getName(); String configFileName = pluginFileName.substring(0, pluginFileName.length() - ".jar".length()) + ".json"; File configFile = new File(pluginFile.getFile().getParentFile(), configFileName); if (!configFile.isFile()) { return false; } // The config file is a json array of PluginClass object (except the PluginClass.configFieldName) try (Reader reader = Files.newReader(configFile, Charsets.UTF_8)) { List<PluginClass> pluginClasses = GSON.fromJson(reader, CONFIG_OBJECT_TYPE); // Put it one by one so that we can log duplicate plugin class for (PluginClass pluginClass : pluginClasses) { if (!templatePlugins.put(pluginFile.getPluginInfo(), pluginClass)) { LOG.warn("Plugin already exists in {}. Ignore plugin class {}", pluginFile.getPluginInfo(), pluginClass); } } templatePlugins.putAll(pluginFile.getPluginInfo(), pluginClasses); } return true; }