List of usage examples for com.google.common.collect Multimap keySet
Set<K> keySet();
From source file:org.hudsonci.maven.plugin.dependencymonitor.internal.ProjectArtifactCacheImpl.java
private Collection<AbstractProject> projectsContaining( final Multimap<AbstractProject, MavenCoordinatesDTO> source, final MavenCoordinatesDTO artifact) { assert source != null; assert artifact != null; Set<AbstractProject> projects = Sets.newHashSet(); for (AbstractProject project : source.keySet()) { if (source.containsEntry(project, artifact)) { projects.add(project);// ww w .j a va 2 s . co m } } return projects; }
From source file:org.nuxeo.ecm.platform.groups.audit.service.acl.AclExcelLayoutBuilder.java
protected void renderAcl(Multimap<String, Pair<String, Boolean>> userAcls, CellStyle style) throws ClientException { for (String user : userAcls.keySet()) { int column = layout.getUserColumn(user); String info = formatAcl(userAcls.get(user)); excel.setCell(treeLineCursor, column, info, style); }//from www. ja v a 2 s . com }
From source file:edu.harvard.med.screensaver.service.cherrypicks.CherryPickRequestAllocator.java
/** * @return the set of <i>unfulfillable</i> cherry picks *//* w w w . j av a 2 s . co m*/ @Transactional public Set<LabCherryPick> allocate(CherryPickRequest cherryPickRequestIn) throws DataAccessException { // TODO: handle concurrency; perform appropriate locking to prevent race conditions (overdrawing well) among multiple allocate() calls CherryPickRequest cherryPickRequest = _dao.reloadEntity(cherryPickRequestIn, false, CherryPickRequest.labCherryPicks.to(LabCherryPick.sourceWell)); validateAllocationBusinessRules(cherryPickRequest); Set<LabCherryPick> unfulfillableLabCherryPicks = new HashSet<LabCherryPick>(); Multimap<Integer, LabCherryPick> labCherryPicksBySourcePlate = getLabCherryPicksBySourcePlate( cherryPickRequest); for (Integer plateNumber : labCherryPicksBySourcePlate.keySet()) { log.debug("allocating " + cherryPickRequest + " lab cherry picks from plate " + plateNumber); unfulfillableLabCherryPicks.addAll(allocate(labCherryPicksBySourcePlate.get(plateNumber))); } return unfulfillableLabCherryPicks; }
From source file:eu.mondo.driver.fourstore.FourStoreGraphDriverReadWrite.java
@Override public void insertEdgesWithVertex(final Multimap<String, String> edges, final String edgeType, final String targetVertexType) throws IOException { if (edges.isEmpty()) { return;/*from w ww . j a va 2 s. co m*/ } final ArrayList<String> sourceVertices = new ArrayList<>(edges.keySet()); final List<List<String>> sourceVerticesPartitions = Lists.partition(sourceVertices, PARTITION_SIZE); for (final List<String> sourceVerticesPartition : sourceVerticesPartitions) { final Multimap<String, String> edgePartition = ArrayListMultimap.create(); for (final String sourceVertexURI : sourceVerticesPartition) { final Collection<String> targetVertexURIs = edges.get(sourceVertexURI); edgePartition.putAll(sourceVertexURI, targetVertexURIs); } insertEdgesWithVertexPartition(edgePartition, edgeType, targetVertexType); } }
From source file:org.sonarlint.intellij.issue.IssueProcessor.java
public void process(final SonarlintAnalyzer.SonarLintJob job, final Collection<Issue> issues) { Multimap<PsiFile, IssueStore.StoredIssue> map; final VirtualFile moduleBaseDir = SonarLintUtils.getModuleRoot(job.module()); long start = System.currentTimeMillis(); AccessToken token = ReadAction.start(); try {//from w ww. j ava 2 s .co m Collection<PsiFile> psiFiles = getPsi(job.files()); clearFiles(psiFiles); map = transformIssues(moduleBaseDir, issues); for (PsiFile file : map.keySet()) { store.store(file, map.get(file)); } // restart analyzer for all files analyzed (even the ones without issues) so that our external annotator is called for (PsiFile f : psiFiles) { codeAnalyzer.restart(f); } } finally { token.finish(); } console.debug("Stored matched issues in " + (System.currentTimeMillis() - start) + " ms"); String end; if (issues.size() == 1) { end = " issue"; } else { end = " issues"; } console.info("Found " + issues.size() + end); }
From source file:org.robotframework.ide.eclipse.main.plugin.project.build.libs.LibrariesBuilder.java
public void forceLibrariesRebuild(final Multimap<IProject, LibrarySpecification> groupedSpecifications, final SubMonitor monitor) { monitor.subTask("generating libdocs"); final Multimap<IProject, GeneratorWithSource> groupedGenerators = LinkedHashMultimap.create(); for (final IProject project : groupedSpecifications.keySet()) { for (final LibrarySpecification specification : groupedSpecifications.get(project)) { final GeneratorWithSource generatorWithSource = new GeneratorWithSource( specification.getSourceFile(), provideGenerator(specification)); groupedGenerators.put(project, generatorWithSource); }// www . ja v a2s. c om } monitor.setWorkRemaining(groupedGenerators.size()); for (final IProject project : groupedGenerators.keySet()) { final RobotProject robotProject = RedPlugin.getModelManager().createProject(project); final RobotRuntimeEnvironment runtimeEnvironment = robotProject.getRuntimeEnvironment(); for (final GeneratorWithSource generatorWithSource : groupedGenerators.get(project)) { monitor.subTask(generatorWithSource.generator.getMessage()); try { if (project.exists()) { generatorWithSource.generator.generateLibdocForcibly(runtimeEnvironment, new RedEclipseProjectConfig(robotProject.getRobotProjectConfig()) .createEnvironmentSearchPaths(project)); } } catch (final RobotEnvironmentException e) { final IPath libspecFileLocation = generatorWithSource.sourceLibdocFile.getLocation(); if (libspecFileLocation != null) { libspecFileLocation.toFile().delete(); } throw e; } monitor.worked(1); } } monitor.done(); }
From source file:org.dragoneronca.nlp.wol.disambiguation.SenseSolver.java
private void disambiguateSense(LightSense sense) { Set<LightSemanticEdge> outEdges = sense.getOutEdges(); // group semantic edges by referred term Multimap<Integer, LightSemanticEdge> wordMap = LinkedHashMultimap.create(); for (LightSemanticEdge lightSemanticEdge : outEdges) { wordMap.put(lightSemanticEdge.getTargetWordHash(), lightSemanticEdge); }//from w ww . j a v a 2 s. c om // disambiguate each term for (int wordHash : wordMap.keySet()) { processedTerms++; if (disambiguateSenseTerm(sense, wordHash, wordMap.get(wordHash))) { convergedTerms++; } } // update weights in the light wol graph ArrayList<LightSemanticEdge> semanticEdgeList = new ArrayList<>(outEdges); Collections.sort(semanticEdgeList, new Comparator<LightSemanticEdge>() { @Override public int compare(LightSemanticEdge lightSemanticEdge, LightSemanticEdge lightSemanticEdge2) { return Integer.compare(lightSemanticEdge.getId(), lightSemanticEdge2.getId()); } }); EdgeList outEdgesOf = sense.getLightWolGraph().getOutEdgesOf(sense.getId()); for (int i = 0; i < outEdges.size(); i++) { outEdgesOf.setWeight(i, semanticEdgeList.get(i).getWeight()); } }
From source file:org.eclipse.xtext.ui.refactoring.impl.AbstractReferenceUpdater.java
protected void createReferenceUpdatesForCluster(ElementRenameArguments elementRenameArguments, Multimap<URI, IReferenceDescription> resource2references, ResourceSet resourceSet, IRefactoringUpdateAcceptor updateAcceptor, StatusWrapper status, IProgressMonitor monitor) { SubMonitor progress = SubMonitor.convert(monitor, 100); List<URI> unloadableResources = loadReferringResources(resourceSet, resource2references.keySet(), status, progress.newChild(10));//from w w w . ja va2 s. c o m if (progress.isCanceled()) { throw new OperationCanceledException(); } for (URI unloadableResouce : unloadableResources) resource2references.removeAll(unloadableResouce); List<IReferenceDescription> unresolvableReferences = resolveReferenceProxies(resourceSet, resource2references.values(), status, progress.newChild(70)); if (progress.isCanceled()) { throw new OperationCanceledException(); } for (IReferenceDescription unresolvableReference : unresolvableReferences) { URI unresolvableReferringResource = unresolvableReference.getSourceEObjectUri().trimFragment(); resource2references.remove(unresolvableReferringResource, unresolvableReference); } elementRenameArguments.getRenameStrategy().applyDeclarationChange(elementRenameArguments.getNewName(), resourceSet); if (progress.isCanceled()) { throw new OperationCanceledException(); } createReferenceUpdates(elementRenameArguments, resource2references, resourceSet, updateAcceptor, progress.newChild(20)); if (progress.isCanceled()) { throw new OperationCanceledException(); } elementRenameArguments.getRenameStrategy().revertDeclarationChange(resourceSet); }
From source file:org.opentestsystem.authoring.testauth.publish.SharedPublisherHelper.java
private List<TestComputationRule> buildTestComputationRulesForLeafNodeRules( final List<ScoringRule> scoringRuleList, final List<BlueprintElement> blueprintElementList) { final List<TestComputationRule> testComputationRuleList = Lists.newArrayList(); if (Iterables.any(scoringRuleList, LEAF_NODE_TYPE_FILTER)) { // winnow blueprintElement list down to parentedChildren only Iterables.removeIf(blueprintElementList, PARENT_KEY_FILTER); // build multimap of bpe keyed by parent key final Multimap<String, BlueprintElement> blueprintElementParentKeyMultimap = Multimaps .index(blueprintElementList, BP_PARENT_KEY_TRANSFORMER); // filter bpe objects that are parents from parentedChildren Iterables.removeIf(blueprintElementList, NON_LEAF_NODE_FILTER.getInstance(blueprintElementParentKeyMultimap.keySet())); // construct each leaf node into a separate scoring rule final List<ScoringRule> leafNodeScoringRules = Lists .newArrayList(Iterables.filter(scoringRuleList, LEAF_NODE_TYPE_FILTER)); for (final ScoringRule scoringRule : leafNodeScoringRules) { final int i = 1; // transform every leaf node bp element into a scoring rule mimicking this scoringRule testComputationRuleList.addAll(Lists.transform(blueprintElementList, LEAF_NODE_LEVEL_SCORING_RULE_TRANSFORMER.getInstance(scoringRule, i))); }//from w w w . j av a 2s . c o m } return testComputationRuleList; }
From source file:com.streamsets.pipeline.stage.destination.datalake.DataLakeTarget.java
@Override public void write(Batch batch) throws StageException { Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression(dirPathTemplateEval, dirPathTemplateVars, conf.dirPathTemplate, timeDriverEval, timeDriverVars, conf.timeDriver, calendar, batch);//from ww w.j a va2s . co m OutputStream stream = null; Record record = null; DataGenerator generator = null; for (String key : partitions.keySet()) { Iterator<Record> iterator = partitions.get(key).iterator(); // for uniqueness of the file name filePath = key + conf.uniquePrefix + "-" + UUID.randomUUID(); try { if (!client.checkExists(filePath)) { stream = client.createFile(filePath, IfExists.FAIL); } else { stream = client.getAppendStream(filePath); } while (iterator.hasNext()) { record = iterator.next(); baos.reset(); generator = conf.dataFormatConfig.getDataGeneratorFactory().getGenerator(baos); generator.write(record); generator.close(); stream.write(baos.toByteArray()); } } catch (IOException ex) { if (record == null) { // possible permission error to the directory or connection issues, then throw stage exception LOG.error(Errors.ADLS_02.getMessage(), ex.toString(), ex); throw new StageException(Errors.ADLS_02, ex, ex); } else { LOG.error(Errors.ADLS_03.getMessage(), ex.toString(), ex); errorRecordHandler .onError(new OnRecordErrorException(record, Errors.ADLS_03, ex.toString(), ex)); } } finally { try { if (generator != null) { generator.close(); } if (stream != null) { stream.close(); } } catch (IOException ex2) { //no-op LOG.error("fail to close stream or generator: {}. reason: {}", ex2.toString(), ex2); } } } }