List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:it.reply.orchestrator.service.CmdbServiceImpl.java
@Override public CloudProvider fillCloudProviderInfo(CloudProvider cp) { // Get provider's data cp.setCmdbProviderData(getProviderById(cp.getId())); cp.setName(cp.getCmdbProviderData().getId()); Map<String, CloudService> allServices = getServicesByProvider(cp.getId()).stream() .collect(Collectors.toMap(CloudService::getId, Function.identity())); // Get provider's services' data for (Map.Entry<String, CloudService> serviceEntry : cp.getCmdbProviderServices().entrySet()) { if (allServices.containsKey(serviceEntry.getKey())) { serviceEntry.setValue(allServices.get(serviceEntry.getKey())); } else {/*www.j av a 2s . c om*/ serviceEntry.setValue(getServiceById(serviceEntry.getKey())); } } for (CloudService service : allServices.values()) { if (service.isOneProviderStorageService()) { cp.getCmdbProviderServices().put(service.getId(), service); } } // FIXME Get other data (i.e. OneData, Images mapping, etc) // Get images for provider (requires to know the compute service) // FIXME: What if there are multiple compute service for a provider (remember that those are // SLAM given)? List<CloudService> imageServices = cp.getCmbdProviderServicesByType(Type.COMPUTE); for (CloudService imageService : imageServices) { if (imageService != null) { LOG.debug("Retrieving image list for service <{}> of provider <{}>", imageService.getId(), cp.getId()); cp.addCmdbCloudServiceImages(imageService.getId(), getImagesByService(imageService.getId()).stream() .map(e -> e.getData()).collect(Collectors.toList())); } else { LOG.debug("No image service to retrieve image list from for provider <{}>", cp.getId()); } } return cp; }
From source file:org.ligoj.app.plugin.prov.azure.in.ProvAzurePriceImportResource.java
/** * Install storage prices from the JSON file provided by AWS. * * @param context//from w ww .j a va 2 s. co m * The update context. */ private void installStoragePrices(final UpdateContext context) throws IOException { final Node node = context.getNode(); log.info("Azure managed-disk prices..."); nextStep(node, "managed-disk-initialize", 1); // The previously installed storage types cache. Key is the storage type name context.setStorageTypes(stRepository.findAllBy(BY_NODE, node.getId()).stream() .collect(Collectors.toMap(INamableBean::getName, Function.identity()))); context.setPreviousStorages(new HashMap<>()); spRepository.findAllBy("type.node.id", node.getId()).forEach(p -> context.getPreviousStorages() .computeIfAbsent(p.getType(), t -> new HashMap<>()).put(p.getLocation(), p)); // Fetch the remote prices stream nextStep(node, "managed-disk-retrieve-catalog", 1); try (CurlProcessor curl = new CurlProcessor()) { final String rawJson = StringUtils.defaultString(curl.get(getManagedDiskApi()), "{}"); final ManagedDisks prices = objectMapper.readValue(rawJson, ManagedDisks.class); // Add region as needed nextStep(node, "managed-disk-update-catalog", 1); prices.getRegions().stream().filter(this::isEnabledRegion).forEach(r -> installRegion(context, r)); // Update or install storage price final Map<String, ManagedDisk> offers = prices.getOffers(); context.setTransactions(offers.getOrDefault("transactions", new ManagedDisk()).getPrices()); offers.entrySet().stream().filter(p -> !"transactions".equals(p.getKey())) .forEach(o -> installStoragePrice(context, o)); } }
From source file:com.liferay.apio.architect.impl.jaxrs.json.reader.MultipartBodyMessageBodyReader.java
private <T> Map<String, List<T>> _flattenMap(Map<String, Map<Integer, T>> indexedValueLists) { Set<Entry<String, Map<Integer, T>>> entries = indexedValueLists.entrySet(); Stream<Entry<String, Map<Integer, T>>> stream = entries.stream(); return stream.sorted(comparingByKey()).collect(Collectors.toMap(Entry::getKey, v -> { Map<Integer, T> map = v.getValue(); return new ArrayList<>(map.values()); }));/*from w w w. j ava2s.co m*/ }
From source file:io.syndesis.model.WithConfigurationProperties.java
/** * Filters the properties that the {@link Connector} considers sensitive / secret. * @param properties The specified configuration. * @param valueConverter A {@link Function} that is applies to each {@link Entry} of the configuration. * @return A map with just the sensitive data. *//*from w ww .j a v a 2s . c o m*/ default Map<String, String> filterSecrets(Map<String, String> properties, Function<Entry<String, String>, String> valueConverter) { return properties.entrySet().stream().filter(isSecret()) .collect(Collectors.toMap(e -> e.getKey(), e -> valueConverter.apply(e))); }
From source file:net.krotscheck.stk.filterColumn.FilterColumnBolt.java
/** * Whenever the provided streams are changed, this method is invoked to * trigger the component to recalculate the emitted streams. * * @param providedStreams The number of streams provided to this component. * @return A set of emitted streams.//from w w w . j a va 2 s .c o m */ @Override protected Collection<Stream> calculateEmittedStreams(final Collection<Stream> providedStreams) { List<Stream> emitted = new ArrayList<>(); for (Stream provided : providedStreams) { Stream.Builder streamBuilder = new Stream.Builder(provided.getStreamId()); // Filter out whatever we don't have. Map<String, Type> filtered = provided.getSchema().entrySet().stream() .filter(p -> requestedColumns.contains(p.getKey())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); // Insert everything we don't have... for (String key : requestedColumns) { if (!filtered.containsKey(key)) { filtered.put(key, Type.STRING); } } streamBuilder.addSchemaFields(filtered); emitted.add(streamBuilder.build()); } return Collections.unmodifiableCollection(emitted); }
From source file:io.syndesis.model.WithProperties.java
/** * Filters the properties that the {@link Connector} considers sensitive / secret. * @param properties The specified configuration. * @param valueConverter A {@link Function} that is applies to each {@link Map.Entry} of the configuration. * @return A map with just the sensitive data. *//*from ww w . jav a2 s . co m*/ default Map<String, String> filterSecrets(Map<String, String> properties, Function<Map.Entry<String, String>, String> valueConverter) { return properties.entrySet().stream().filter(isSecret()) .collect(Collectors.toMap(e -> e.getKey(), e -> valueConverter.apply(e))); }
From source file:org.openlmis.fulfillment.web.util.BasicOrderDtoBuilder.java
private Map<UUID, ProcessingPeriodDto> getPeriods(List<Order> orders) { Set<UUID> periodIds = orders.stream().map(Order::getProcessingPeriodId).collect(Collectors.toSet()); return periodReferenceDataService.findByIds(periodIds).stream() .collect(Collectors.toMap(BaseDto::getId, Function.identity())); }
From source file:de.whs.poodle.repositories.StatisticsRepository.java
public Map<Integer, Statistic> getExerciseToStatisticMapForWorksheet(int studentId, int worksheetId) { @SuppressWarnings("unchecked") List<Statistic> statistics = em .createNativeQuery("SELECT s.* FROM worksheet ws " + "JOIN chapter c ON c.worksheet_id = ws.id " + "JOIN chapter_to_exercise ce ON ce.chapter_id = c.id " + "JOIN exercise e ON ce.exercise_id = e.id " + "JOIN v_statistic s ON s.exercise_root_id = e.root_id " + "WHERE ws.id = :worksheetId AND s.student_id = :studentId", Statistic.class) .setParameter("worksheetId", worksheetId).setParameter("studentId", studentId).getResultList(); return statistics.stream().collect(Collectors.toMap(Statistic::getExerciseRootId, Function.identity())); }
From source file:eu.fthevenet.binjr.sources.jrds.adapters.JrdsDataAdapter.java
@Override public TreeItem<TimeSeriesBinding<Double>> getBindingTree() throws DataAdapterException { Gson gson = new Gson(); try {/*from w w w . ja v a2s .co m*/ JsonJrdsTree t = gson.fromJson(getJsonTree(treeViewTab.getCommand(), treeViewTab.getArgument(), filter), JsonJrdsTree.class); Map<String, JsonJrdsItem> m = Arrays.stream(t.items).collect(Collectors.toMap(o -> o.id, (o -> o))); TreeItem<TimeSeriesBinding<Double>> tree = new TreeItem<>( bindingFactory.of("", getSourceName(), "/", this)); for (JsonJrdsItem branch : Arrays.stream(t.items).filter( jsonJrdsItem -> JRDS_TREE.equals(jsonJrdsItem.type) || JRDS_FILTER.equals(jsonJrdsItem.type)) .collect(Collectors.toList())) { attachNode(tree, branch.id, m); } return tree; } catch (JsonParseException e) { throw new DataAdapterException( "An error occurred while parsing the json response to getBindingTree request", e); } catch (URISyntaxException e) { throw new SourceCommunicationException("Error building URI for request", e); } }
From source file:com.fns.grivet.repo.JdbcEntityRepository.java
@Override public void deleteAll() { String entitySql = "DELETE FROM entity"; log.trace(String.format("JdbcEntityRepository.delete[sql=%s]", entitySql)); jdbcTemplate.execute(entitySql);//from www .ja v a 2s.c o m Collection<String> eavSql = Stream.of(AttributeType.values()).collect( Collectors.toMap(k -> k.getType(), v -> String.format("DELETE FROM entityav_%s", v.getType()))) .values(); for (String sql : eavSql) { log.trace(String.format("JdbcEntityRepository.delete[sql=%s]", sql)); jdbcTemplate.execute(sql); } }