List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:fi.helsinki.opintoni.service.EventService.java
private Map<String, CoursePageCourseImplementation> getCoursePages(List<OodiEvent> oodiEvents, List<String> courseIds) { return Stream.concat(getOodiEventCourseIds(oodiEvents), courseIds.stream()).distinct() .collect(Collectors.toMap(realisationId -> realisationId, coursePageClient::getCoursePage)); }
From source file:com.marand.thinkmed.medications.business.MedicationsFinderImpl.java
@Override public List<TreeNodeData> findSimilarMedications(final long medicationId, @Nonnull final List<Long> routeIds, @Nonnull final DateTime when) { Preconditions.checkNotNull(routeIds, "routeIds must not be null"); Preconditions.checkNotNull(when, "when must not be null"); final Set<Long> similarMedicationsIds = medicationsDao.findSimilarMedicationsIds(medicationId, routeIds, when);// ww w . ja v a2 s. c om final Map<Long, MedicationHolderDto> similarMedicationsMap = similarMedicationsIds.stream() .map(i -> medicationsValueHolder.getValue().get(i)).filter(m -> m != null) .collect(Collectors.toMap(MedicationHolderDto::getId, m -> m)); return buildMedicationsTree(similarMedicationsMap); }
From source file:com.vsct.dt.hesperides.templating.platform.PropertiesData.java
public MustacheScope toMustacheScope(Set<KeyValueValorisationData> instanceValorisations, Set<KeyValueValorisationData> platformValorisations, Boolean buildingFile) { if (instanceValorisations == null) { instanceValorisations = new HashSet<>(); }/* w ww . j a v a 2s .c o m*/ if (platformValorisations == null) { platformValorisations = new HashSet<>(); } HashSet<ValorisationData> valorisations = new HashSet<>(); valorisations.addAll(keyValueProperties); valorisations.addAll(iterableProperties); if (platformValorisations != null) { /* addAll doesn't replace existing values, but we want to, so iterate */ for (KeyValueValorisationData v : platformValorisations) { //Remove local valorisation if it exists (ie has the same name even if the value is different) for (Iterator<ValorisationData> it = valorisations.iterator(); it.hasNext();) { ValorisationData existingValorisation = it.next(); if (existingValorisation.getName().equals(v.getName())) { it.remove(); } } valorisations.add(v); } } /* Prepare what will be injected in the values */ Map<String, String> injectableKeyValueValorisations = keyValueProperties.stream() .collect(Collectors.toMap(ValorisationData::getName, KeyValueValorisationData::getValue)); Map<String, String> injectablePlatformValorisations = platformValorisations.stream() .collect(Collectors.toMap(ValorisationData::getName, KeyValueValorisationData::getValue)); /* Erase local valorisations by platform valorisations */ injectableKeyValueValorisations.putAll(injectablePlatformValorisations); Map<String, String> injectableInstanceProperties = instanceValorisations.stream() .collect(Collectors.toMap(ValorisationData::getName, KeyValueValorisationData::getValue)); injectableKeyValueValorisations.replaceAll((key, value) -> value.replace("$", "\\$")); injectableInstanceProperties.replaceAll((key, value) -> value.replace("$", "\\$")); InjectableMustacheScope injectable = MustacheScope.from(valorisations) /* First re-inject keyValueValorisations, so they can refer to themselves */ .inject(injectableKeyValueValorisations) /* Do it a second time in case global properties where referring to themselves */ .inject(injectableKeyValueValorisations) /* Finally inject instance valorisations */ .inject(injectableInstanceProperties) /* Do it a third time in case instances properties where referring to global properties */ .inject(injectableKeyValueValorisations); MustacheScope mustacheScope = injectable.create(); if (mustacheScope.getMissingKeyValueProperties().size() > 0 && buildingFile) { Map<String, String> missing_valuation = new HashMap<>(); Set<KeyValuePropertyModel> missing = mustacheScope.getMissingKeyValueProperties(); missing.stream().forEach(prop -> { missing_valuation.put(prop.getName(), ""); }); mustacheScope = injectable.inject(missing_valuation).create(); } return mustacheScope; }
From source file:org.obiba.mica.search.CoverageQueryExecutor.java
/** * Extract hits from aggregations and merge them into the taxonomies descriptions. *///from w w w . ja v a 2 s . com private Iterable<MicaSearch.TaxonomyCoverageDto> getCoverages( List<MicaSearch.AggregationResultDto> aggregations) { Map<String, Map<String, MicaSearch.TermsAggregationResultDto>> aggTermsTitlesMap = aggregations.stream() .collect(Collectors.toMap(MicaSearch.AggregationResultDto::getAggregation, a -> a.getExtension(MicaSearch.TermsAggregationResultDto.terms).stream() .collect(Collectors.toMap(MicaSearch.TermsAggregationResultDto::getKey, t -> t)))); Map<String, List<BucketResult>> bucketResultsByTaxonomy = extractBucketResults(aggregations).stream() .collect(Collectors.groupingBy(BucketResult::getTaxonomy)); Map<String, Map<String, Integer>> aggsMap = Maps.newHashMap(); aggregations.forEach(agg -> { String name = agg.getAggregation(); List<MicaSearch.TermsAggregationResultDto> results = agg .getExtension(MicaSearch.TermsAggregationResultDto.terms); if (results != null && !results.isEmpty() && isAttributeField(name)) { String key = name.replaceAll("^attributes-", "").replaceAll("-und$", ""); if (!aggsMap.containsKey(key)) aggsMap.put(key, Maps.newHashMap()); results.forEach(res -> aggsMap.get(key).put(res.getKey(), res.getCount())); } }); List<MicaSearch.TaxonomyCoverageDto> coverages = Lists.newArrayList(); getTaxonomies().stream().filter(taxonomy -> applyFilter(taxonomy)) .forEach(taxonomy -> addTaxonomyCoverage(coverages, taxonomy, aggsMap, bucketResultsByTaxonomy.get(taxonomy.getName()), aggTermsTitlesMap)); return coverages; }
From source file:com.netflix.spinnaker.fiat.permissions.DefaultPermissionsResolver.java
@Override @SuppressWarnings("unchecked") public Map<String, UserPermission> resolve(@NonNull Collection<ExternalUser> users) { val userToRoles = getAndMergeUserRoles(users); return userToRoles.entrySet().stream().map(entry -> { String username = entry.getKey(); Set<Role> userRoles = new HashSet<>(entry.getValue()); return new UserPermission().setId(username).setRoles(userRoles).addResources(getResources(userRoles)); }).collect(Collectors.toMap(UserPermission::getId, Function.identity())); }
From source file:mtsar.processors.meta.ZenCrowd.java
private Map<Integer, Task> getTaskMap() { return taskDAO.listForStage(stage.getId()).stream() .collect(Collectors.toMap(Task::getId, Function.identity())); }
From source file:io.wcm.caconfig.extensions.references.impl.ConfigurationReferenceProvider.java
@SuppressWarnings("null") @Override//from w w w .java2s . c o m public List<com.day.cq.wcm.api.reference.Reference> findReferences(Resource resource) { if (!enabled) { return Collections.emptyList(); } PageManager pageManager = resource.getResourceResolver().adaptTo(PageManager.class); Page contextPage = pageManager.getContainingPage(resource); if (contextPage == null) { return Collections.emptyList(); } Map<String, ConfigurationMetadata> configurationMetadatas = new TreeMap<>(configurationManager .getConfigurationNames().stream().collect(Collectors.toMap(configName -> configName, configName -> configurationManager.getConfigurationMetadata(configName)))); List<com.day.cq.wcm.api.reference.Reference> references = new ArrayList<>(); Set<String> configurationBuckets = new LinkedHashSet<>( configurationResourceResolverConfig.configBucketNames()); for (String configurationName : configurationMetadatas.keySet()) { Iterator<Resource> configurationInheritanceChain = configurationResourceResolvingStrategy .getResourceInheritanceChain(resource, configurationBuckets, configurationName); Map<String, Page> referencePages = new LinkedHashMap<>(); while (configurationInheritanceChain != null && configurationInheritanceChain.hasNext()) { Resource configurationResource = configurationInheritanceChain.next(); // get page for configuration resource - and all children (e.g. for config collections) // collect in map to elimnate duplicate pages Page configPage = pageManager.getContainingPage(configurationResource); if (configPage != null) { referencePages.put(configPage.getPath(), configPage); Iterator<Page> deepChildren = configPage.listChildren(new PageFilter(false, true), true); while (deepChildren.hasNext()) { Page configChildPage = deepChildren.next(); referencePages.put(configChildPage.getPath(), configChildPage); } } } // generate references for each page (but not if the context page itself is included as well) referencePages.values().stream() .filter(item -> !StringUtils.equals(contextPage.getPath(), item.getPath())) .forEach(item -> references .add(toReference(resource, item, configurationMetadatas, configurationBuckets))); } log.debug("Found {} references for resource {}", references.size(), resource.getPath()); return references; }
From source file:com.yodle.vantage.functional.config.VantageFunctionalTest.java
protected Version validateVersion(Version expected, Version actual, boolean fetchActual) { assertEquals(expected.getComponent(), actual.getComponent()); assertEquals(expected.getVersion(), actual.getVersion()); compareResolvedDependencies(Sets.newHashSet(expected.getRequestedDependencies()), Sets.newHashSet(actual.getRequestedDependencies())); compareResolvedDependencies(expected.getResolvedDependencies(), actual.getResolvedDependencies()); if (fetchActual) { expected.getResolvedDependencies().stream().forEach(d -> validateDependency(d, expected)); } else {//from w w w .jav a 2 s . co m //validateDependency validates the dependency's dependents field, but we don't have that since we can't fetch //the dependency directly and that field isn't set on the nested dependency version Map<Version, Dependency> actualDependenciesByVersion = actual.getResolvedDependencies().stream() .collect(Collectors.toMap(Dependency::getVersion, d -> d)); expected.getResolvedDependencies().stream().forEach(d -> validateVersion(d.getVersion(), actualDependenciesByVersion.get(d.getVersion()).getVersion())); } return actual; }
From source file:delfos.dataset.generated.modifieddatasets.pseudouser.PseudoUserRatingsDataset.java
@Override public Map<Integer, RatingType> getUserRatingsRated(Integer idUser) throws UserNotFound { boolean containsKey = pseudoUsersById.containsKey(idUser); if (containsKey) { User pseudoUser = pseudoUsersById.get(idUser); Map<Integer, RatingType> pseudoUserRatings = pseudoUsersRatings.get(pseudoUser).entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().getId(), entry -> entry.getValue())); return pseudoUserRatings; } else {//from ww w. java 2 s . c om return originalDatasetLoader.getRatingsDataset().getUserRatingsRated(idUser); } }
From source file:org.ligoj.app.plugin.prov.azure.in.ProvAzurePriceImportResource.java
/** * Install or update prices.// ww w . j a va 2 s.co m * * @throws IOException * When prices cannot be remotely read. */ public void install() throws IOException { final UpdateContext context = new UpdateContext(); // Node is already persisted, install VM prices final Node node = nodeRepository.findOneExpected(ProvAzurePluginResource.KEY); context.setNode(node); nextStep(node, "initialize", 1); // The previously installed location cache. Key is the location AWS name context.setRegions(locationRepository.findAllBy(BY_NODE, node.getId()).stream() .collect(Collectors.toMap(INamableBean::getName, Function.identity()))); // Proceed to the install installStoragePrices(context); installComputePrices(context); nextStep(node, "finalize", 0); }