List of usage examples for com.google.common.collect Multimap keySet
Set<K> keySet();
From source file:io.github.swagger2markup.internal.document.PathsDocument.java
/** * Builds the paths section. Groups the paths either as-is, by tags or using regex. * * @param paths the Swagger paths/*from w w w.ja v a2 s . c om*/ */ private void buildsPathsSection(MarkupDocBuilder markupDocBuilder, Map<String, Path> paths) { List<PathOperation> pathOperations = PathUtils.toPathOperationsList(paths, getBasePath(), config.getOperationOrdering()); if (CollectionUtils.isNotEmpty(pathOperations)) { if (config.getPathsGroupedBy() == GroupBy.AS_IS) { pathOperations.forEach(operation -> buildOperation(markupDocBuilder, operation, config)); } else if (config.getPathsGroupedBy() == GroupBy.TAGS) { Validate.notEmpty(context.getSwagger().getTags(), "Tags must not be empty, when operations are grouped by tags"); // Group operations by tag Multimap<String, PathOperation> operationsGroupedByTag = TagUtils .groupOperationsByTag(pathOperations, config.getOperationOrdering()); Map<String, Tag> tagsMap = TagUtils.toSortedMap(context.getSwagger().getTags(), config.getTagOrdering()); tagsMap.forEach((String tagName, Tag tag) -> { markupDocBuilder.sectionTitleWithAnchorLevel2(WordUtils.capitalize(tagName), tagName + "_resource"); String description = tag.getDescription(); if (StringUtils.isNotBlank(description)) { markupDocBuilder.paragraph(description); } operationsGroupedByTag.get(tagName) .forEach(operation -> buildOperation(markupDocBuilder, operation, config)); }); } else if (config.getPathsGroupedBy() == GroupBy.REGEX) { Validate.notNull(config.getHeaderPattern(), "Header regex pattern must not be empty when operations are grouped using regex"); Pattern headerPattern = config.getHeaderPattern(); Multimap<String, PathOperation> operationsGroupedByRegex = RegexUtils .groupOperationsByRegex(pathOperations, headerPattern); Set<String> keys = operationsGroupedByRegex.keySet(); String[] sortedHeaders = RegexUtils.toSortedArray(keys); for (String header : sortedHeaders) { markupDocBuilder.sectionTitleWithAnchorLevel2(WordUtils.capitalize(header), header + "_resource"); operationsGroupedByRegex.get(header) .forEach(operation -> buildOperation(markupDocBuilder, operation, config)); } } } }
From source file:com.tomtom.camera.api.v2.VideoCapabilitiesV2.java
private void createVideoModeSettings(Video.Mode mode, Multimap<Resolution, Framerate> framerates, Multimap<Resolution, Integer> intervals, Multimap<Resolution, Integer> slowMotionRates, Multimap<Resolution, String> fieldOfViews) { ArrayList<VideoModeSetting> videoModeSettings = new ArrayList<>(); ArrayList<Resolution> resolutions = new ArrayList<>(framerates.keySet()); Collections.sort(resolutions); for (Resolution resolution : resolutions) { ArrayList<Framerate> framerateArrayList = new ArrayList<>(framerates.get(resolution)); Collections.sort(framerateArrayList); ArrayList<Integer> intervalsArrayList = new ArrayList<Integer>(intervals.get(resolution)); Collections.sort(intervalsArrayList); VideoModeSetting videoModeSetting = new VideoModeSetting(resolution, framerateArrayList, intervalsArrayList, new ArrayList<Integer>(slowMotionRates.get(resolution)), new ArrayList<String>(fieldOfViews.get(resolution))); videoModeSettings.add(videoModeSetting); }/* www. ja v a2 s .c om*/ mVideoModeSettingsMap.put(mode, videoModeSettings); }
From source file:org.eclipse.incquery.viewmodel.traceablilty.generic.GenericReferencedPQuery.java
public GenericReferencedPQuery(PQuery baseQuery, Multimap<PParameter, PParameter> traceSources, Map<PParameter, String> traceIds, String traceabilityId) throws QueryInitializationException { this.baseQuery = baseQuery; this.parameters = Lists.newArrayList(baseQuery.getParameters()); this.parameters.addAll(traceSources.keySet()); this.traceSources = traceSources; this.traceIds = traceIds; this.traceabilityId = traceabilityId; ensureInitialized();/*w ww.j av a 2s .c om*/ }
From source file:io.github.infolis.algorithm.PatternApplier.java
List<String> getContextsForPatterns(Collection<InfolisPattern> patterns) { // for all patterns, retrieve documents in which they occur (using lucene) Multimap<String, InfolisPattern> filenamesForPatterns = getFilenamesForPatterns(patterns); List<String> textualReferences = new ArrayList<>(); // open each file once and search for all regex for which a corresponding (but more general) // lucene pattern has been found in it for (String fileUri : filenamesForPatterns.keySet()) { Collection<InfolisPattern> patternList = filenamesForPatterns.get(fileUri); List<String> patternURIs = getPatternUris(patternList); Execution regexExec = new Execution(); regexExec.getInputFiles().add(fileUri); regexExec.setPatterns(patternURIs); regexExec.setTags(getExecution().getTags()); regexExec.setUpperCaseConstraint(getExecution().isUpperCaseConstraint()); regexExec.setAlgorithm(RegexSearcher.class); regexExec.instantiateAlgorithm(this).run(); getExecution().setTextualReferences(regexExec.getTextualReferences()); textualReferences.addAll(regexExec.getTextualReferences()); }//from ww w .j a v a 2s. c o m return textualReferences; }
From source file:it.units.malelab.ege.cfggp.operator.StandardTreeCrossover.java
@Override public List<Node<T>> apply(List<Node<T>> parents, Random random) { //build maps of leaf-subtrees Node<T> child1 = new Node<>(parents.get(0)); Node<T> child2 = new Node<>(parents.get(1)); child1.propagateParentship();/* w w w. j ava2s. c o m*/ child2.propagateParentship(); Multimap<T, Node<T>> child1subtrees = ArrayListMultimap.create(); Multimap<T, Node<T>> child2subtrees = ArrayListMultimap.create(); populateMultimap(child1, child1subtrees); populateMultimap(child2, child2subtrees); //build common non-terminals List<T> nonTerminals = new ArrayList<>(); nonTerminals.addAll(child1subtrees.keySet()); nonTerminals.retainAll(child2subtrees.keySet()); if (nonTerminals.isEmpty()) { return null; } Collections.shuffle(nonTerminals, random); //iterate (just once, if successfully) on non-terminals boolean done = false; for (T chosenNonTerminal : nonTerminals) { List<Node<T>> subtrees1 = new ArrayList<>(child1subtrees.get(chosenNonTerminal)); List<Node<T>> subtrees2 = new ArrayList<>(child2subtrees.get(chosenNonTerminal)); Collections.shuffle(subtrees1, random); Collections.shuffle(subtrees2, random); for (Node<T> subtree1 : subtrees1) { for (Node<T> subtree2 : subtrees2) { if ((subtree1.getAncestors().size() + subtree2.depth() <= maxDepth) && (subtree2.getAncestors().size() + subtree1.depth() <= maxDepth)) { List<Node<T>> swappingChildren = new ArrayList<>(subtree1.getChildren()); subtree1.getChildren().clear(); subtree1.getChildren().addAll(subtree2.getChildren()); subtree2.getChildren().clear(); subtree2.getChildren().addAll(swappingChildren); done = true; break; } } if (done) { break; } } if (done) { break; } } if (!done) { return null; } //return List<Node<T>> children = new ArrayList<>(2); children.add(child1); children.add(child2); return children; }
From source file:com.b2international.snowowl.snomed.datastore.internal.id.reservations.UniqueInStoreReservation.java
@Override public Set<SnomedIdentifier> intersection(Set<SnomedIdentifier> identifiers) { final Multimap<ComponentCategory, String> identifiersByCategory = HashMultimap.create(); identifiers.forEach(identifier -> { identifiersByCategory.put(identifier.getComponentCategory(), identifier.toString()); });/*from ww w .j av a2s .co m*/ final ImmutableSet.Builder<SnomedIdentifier> intersection = ImmutableSet.builder(); for (ComponentCategory category : identifiersByCategory.keySet()) { final SnomedSearchRequestBuilder<?, ? extends PageableCollectionResource<?>> searchRequest; final Collection<String> identifiersToCheck = identifiersByCategory.get(category); switch (category) { case CONCEPT: searchRequest = SnomedRequests.prepareSearchConcept(); break; case DESCRIPTION: searchRequest = SnomedRequests.prepareSearchDescription(); break; case RELATIONSHIP: searchRequest = SnomedRequests.prepareSearchRelationship(); break; default: throw new NotImplementedException("Cannot check whether components of type '%s' are unique.", category); } final PageableCollectionResource<?> results = searchRequest.all().filterByIds(identifiersToCheck) .setFields(SnomedComponentDocument.Fields.ID) .build(SnomedDatastoreActivator.REPOSITORY_UUID, BranchPathUtils.createMainPath().getPath()) .execute(bus.get()).getSync(); results.stream().filter(SnomedComponent.class::isInstance).map(SnomedComponent.class::cast) .map(SnomedComponent::getId).map(SnomedIdentifiers::create).forEach(intersection::add); } return intersection.build(); }
From source file:org.apache.brooklyn.entity.group.DynamicMultiGroupImpl.java
@Override public void distributeEntities() { synchronized (memberChangeMutex) { Function<Entity, String> bucketFunction = getConfig(BUCKET_FUNCTION); EntitySpec<? extends BasicGroup> bucketSpec = getConfig(BUCKET_SPEC); if (bucketFunction == null || bucketSpec == null) return; Map<String, BasicGroup> buckets = MutableMap.copyOf(getAttribute(BUCKETS)); // Bucketize the members where the function gives a non-null bucket Multimap<String, Entity> entityMapping = Multimaps.index( Iterables.filter(getMembers(), Predicates.compose(Predicates.notNull(), bucketFunction)), bucketFunction);/* www. j a v a 2 s .c om*/ // Now fill the buckets for (String name : entityMapping.keySet()) { BasicGroup bucket = buckets.get(name); if (bucket == null) { bucket = addChild(EntitySpec.create(bucketSpec).displayName(name)); buckets.put(name, bucket); } bucket.setMembers(entityMapping.get(name)); } // Remove any now-empty buckets Set<String> empty = ImmutableSet.copyOf(Sets.difference(buckets.keySet(), entityMapping.keySet())); for (String name : empty) { Group removed = buckets.remove(name); removeChild(removed); Entities.unmanage(removed); } // Save the bucket mappings sensors().set(BUCKETS, ImmutableMap.copyOf(buckets)); } }
From source file:com.opengamma.bbg.referencedata.MockReferenceDataProvider.java
@Override protected ReferenceDataProviderGetResult doBulkGet(ReferenceDataProviderGetRequest request) { if (_expectedFields.size() > 0) { for (String field : _expectedFields) { assertTrue(request.getFields().contains(field)); }/* w w w .j ava 2 s . c o m*/ } ReferenceDataProviderGetResult result = new ReferenceDataProviderGetResult(); for (String identifier : request.getIdentifiers()) { if (_mockDataMap.containsKey(identifier)) { // known security ReferenceData refData = new ReferenceData(identifier); MutableFudgeMsg msg = OpenGammaFudgeContext.getInstance().newMessage(); Multimap<String, String> fieldMap = _mockDataMap.get(identifier); if (fieldMap != null) { // security actually has data for (String field : request.getFields()) { Collection<String> values = fieldMap.get(field); assertTrue("Field not found: " + field + " in " + fieldMap.keySet(), values.size() > 0); assertNotNull(values); for (String value : values) { if (value != null) { if (value.contains("=")) { MutableFudgeMsg submsg = OpenGammaFudgeContext.getInstance().newMessage(); submsg.add(StringUtils.substringBefore(value, "="), StringUtils.substringAfter(value, "=")); msg.add(field, submsg); } else { msg.add(field, value); } } } } } refData.setFieldValues(msg); result.addReferenceData(refData); } else { // security wasn't marked as known fail("Security not found: " + identifier + " in " + _mockDataMap.keySet()); } } return result; }
From source file:brooklyn.entity.group.DynamicMultiGroupImpl.java
@Override public void distributeEntities() { synchronized (memberChangeMutex) { Function<Entity, String> bucketFunction = getConfig(BUCKET_FUNCTION); EntitySpec<? extends BasicGroup> bucketSpec = getConfig(BUCKET_SPEC); if (bucketFunction == null || bucketSpec == null) return; Map<String, BasicGroup> buckets = MutableMap.copyOf(getAttribute(BUCKETS)); // Bucketize the members where the function gives a non-null bucket Multimap<String, Entity> entityMapping = Multimaps.index( Iterables.filter(getMembers(), Predicates.compose(Predicates.notNull(), bucketFunction)), bucketFunction);//from www.j a v a 2 s .c om // Now fill the buckets for (String name : entityMapping.keySet()) { BasicGroup bucket = buckets.get(name); if (bucket == null) { bucket = addChild(EntitySpec.create(bucketSpec).displayName(name)); Entities.manage(bucket); buckets.put(name, bucket); } bucket.setMembers(entityMapping.get(name)); } // Remove any now-empty buckets Set<String> empty = ImmutableSet.copyOf(Sets.difference(buckets.keySet(), entityMapping.keySet())); for (String name : empty) { Group removed = buckets.remove(name); removeChild(removed); Entities.unmanage(removed); } // Save the bucket mappings setAttribute(BUCKETS, ImmutableMap.copyOf(buckets)); } }
From source file:org.sonar.plugins.csharp.CSharpSensor.java
void analyze(boolean includeRules, SensorContext context) { if (includeRules) { LOG.warn("***********************************************************************************"); LOG.warn("* Use MSBuild 14 to get the best analysis results *"); LOG.warn("* The use of MSBuild 12 or the sonar-scanner to analyze C# projects is DEPRECATED *"); LOG.warn("***********************************************************************************"); Multimap<String, RuleKey> activeRoslynRulesByPartialRepoKey = RoslynProfileExporter .activeRoslynRulesByPartialRepoKey( context.activeRules().findAll().stream().map(ActiveRule::ruleKey).collect(toList())); if (activeRoslynRulesByPartialRepoKey.keySet().size() > 1) { throw new IllegalArgumentException( "Custom and 3rd party Roslyn analyzers are only by MSBuild 14. Either use MSBuild 14, or disable the custom/3rd party Roslyn analyzers in your quality profile."); }//from www. j ava 2 s . com } String analysisSettings = AnalysisInputXml.generate(true, settings.getBoolean("sonar.cs.ignoreHeaderComments"), includeRules, context, CSharpSonarRulesDefinition.REPOSITORY_KEY, CSharpPlugin.LANGUAGE_KEY, context.fileSystem().encoding().name()); Path analysisInput = toolInput(context.fileSystem()); Path analysisOutput = protobufReportPathForMSBuild12(context); try { Files.write(analysisInput, analysisSettings.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { throw new IllegalStateException(e); } File executableFile = extractor.executableFile(CSharpPlugin.LANGUAGE_KEY); Command command = Command.create(executableFile.getAbsolutePath()) .addArgument(analysisInput.toAbsolutePath().toString()) .addArgument(analysisOutput.toAbsolutePath().toString()).addArgument(CSharpPlugin.LANGUAGE_KEY); int exitCode = CommandExecutor.create().execute(command, new LogInfoStreamConsumer(), new LogErrorStreamConsumer(), Integer.MAX_VALUE); if (exitCode != 0) { throw new IllegalStateException("The .NET analyzer failed with exit code: " + exitCode + " - Verify that the .NET Framework version 4.5.2 at least is installed."); } }