List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:controllers.VersionSpecificationController.java
private Map<String, String> selectLastQueryParams(final Map<String, String[]> queryString) { return queryString.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> { final int lastIndex = entry.getValue().length - 1; return entry.getValue()[lastIndex]; }));/* w w w. j av a 2 s. c o m*/ }
From source file:com.epam.catgenome.util.Utils.java
public static Map<String, Chromosome> makeChromosomeMap(Reference reference) { return reference.getChromosomes().stream() .collect(Collectors.toMap(BaseEntity::getName, chromosome -> chromosome)); }
From source file:org.ligoj.app.plugin.prov.aws.in.ProvAwsPriceImportResource.java
/** * Install S3 AWS prices from the CSV file. Note only the first 50TB storage tiers is considered * * @param context/*from ww w .j a va 2 s . c om*/ * The update context. * @param api * The API name, only for log. * @param endpoint * The prices end-point JSON URL. * @param apiClass * The mapping model from JSON at region level. * @param mapper * The mapping function from JSON at region level to JPA entity. */ private void installS3Prices(final UpdateContext context) throws IOException, URISyntaxException { log.info("AWS S3 prices ..."); importCatalogResource.nextStep(context.getNode().getId(), t -> t.setPhase("s3")); // Track the created instance to cache partial costs final Map<String, ProvStoragePrice> previous = spRepository .findAllBy("type.node.id", context.getNode().getId()).stream() .filter(p -> p.getType().getName().startsWith("s3") || "glacier".equals(p.getType().getName())) .collect(Collectors.toMap(p2 -> p2.getLocation().getName() + p2.getType().getName(), Function.identity())); context.setPreviousStorage(previous); context.setStorageTypes(previous.values().stream().map(ProvStoragePrice::getType).distinct() .collect(Collectors.toMap(ProvStorageType::getName, Function.identity()))); context.setStorageTypesMerged(new HashMap<>()); int priceCounter = 0; // Get the remote prices stream try (BufferedReader reader = new BufferedReader(new InputStreamReader( new URI(configuration.get(CONF_URL_S3_PRICES, S3_PRICES)).toURL().openStream()))) { // Pipe to the CSV reader final CsvForBeanS3 csvReader = new CsvForBeanS3(reader); // Build the AWS storage prices from the CSV AwsS3Price csv = null; do { // Read the next one csv = csvReader.read(); if (csv == null) { // EOF break; } final ProvLocation location = getRegionByHumanName(context, csv.getLocation()); if (location != null) { // Supported location instalS3Price(context, csv, location); priceCounter++; } } while (true); } finally { // Report log.info("AWS S3 finished : {} prices", priceCounter); nextStep(context, null, 1); } }
From source file:io.pravega.client.stream.mock.MockController.java
@Override public CompletableFuture<Map<Segment, Long>> getSegmentsAtTime(Stream stream, long timestamp) { return CompletableFuture .completedFuture(getSegmentsForStream(stream).stream().collect(Collectors.toMap(s -> s, s -> 0L))); }
From source file:alfio.manager.system.ConfigurationManager.java
private static Map<ConfigurationKeys.SettingCategory, List<Configuration>> removeAlfioPISettingsIfNeeded( boolean offlineCheckInEnabled, Map<ConfigurationKeys.SettingCategory, List<Configuration>> settings) { if (offlineCheckInEnabled) { return settings; }// w w w . j a v a 2 s . com return settings.entrySet().stream().filter(e -> e.getKey() != ConfigurationKeys.SettingCategory.ALFIO_PI) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
From source file:fr.paris.lutece.portal.web.xsl.XslExportJspBeanTest.java
public void testGetConfirmRemoveXslExport() throws AccessDeniedException { MockHttpServletRequest request = new MockHttpServletRequest(); AdminUser user = new AdminUser(); user.setRoles(//from ww w .j a v a 2s. com AdminRoleHome.findAll().stream().collect(Collectors.toMap(AdminRole::getKey, Function.identity()))); Utils.registerAdminUserWithRigth(request, user, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); request.setParameter("id_xsl_export", Integer.toString(_xslExport.getIdXslExport())); _instance.init(request, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); _instance.getConfirmRemoveXslExport(request); AdminMessage message = AdminMessageService.getMessage(request); assertNotNull(message); assertTrue(message.getRequestParameters().containsKey(SecurityTokenService.PARAMETER_TOKEN)); }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_Buckets.java
/** Gets a list of keys,modified from v1 and a list matching keys,modified from V2 (ie _id minus ';') * @param bucket_mgmt/*from w w w.j a va 2s . c o m*/ * @param source_db * @return tuple of id-vs-(date-or-null-if-not-approved) for v1, id-vs-date for v2 */ protected static CompletableFuture<Tuple2<Map<String, String>, Map<String, Date>>> compareSourcesToBuckets_get( final IManagementCrudService<DataBucketBean> bucket_mgmt, final ICrudService<JsonNode> source_db) { // (could make this more efficient by having a regular "did something happen" query with a slower "get everything and resync) // (don't forget to add "modified" to the compund index though) CompletableFuture<Cursor<JsonNode>> f_v1_sources = source_db.getObjectsBySpec( CrudUtils.allOf().when("extractType", "V2DataBucket"), Arrays.asList("key", "modified", "isApproved"), true); return f_v1_sources.<Map<String, String>>thenApply(v1_sources -> { return StreamSupport.stream(v1_sources.spliterator(), false).collect(Collectors.toMap( j -> safeJsonGet("key", j).asText(), j -> safeJsonGet("isApproved", j).asBoolean() ? safeJsonGet("modified", j).asText() : "")); }).<Tuple2<Map<String, String>, Map<String, Date>>>thenCompose(v1_key_datestr_map -> { final SingleQueryComponent<DataBucketBean> bucket_query = CrudUtils.allOf(DataBucketBean.class) .rangeIn(DataBucketBean::_id, "aleph...bucket.", true, "aleph...bucket/", true); return bucket_mgmt.getObjectsBySpec(bucket_query, Arrays.asList(JsonUtils._ID, "modified"), true) .<Tuple2<Map<String, String>, Map<String, Date>>>thenApply(c -> { final Map<String, Date> v2_key_date_map = StreamSupport.stream(c.spliterator(), false) .collect(Collectors.toMap(b -> getV1SourceKeyFromBucketId(b._id()), // (convert to v1 source key format) b -> b.modified())); return Tuples._2T(v1_key_datestr_map, v2_key_date_map); }); }); }
From source file:com.codelanx.codelanxlib.command.CommandNode.java
/** * Returns all {@link CommandNode} objects held by this node that are * aliases of other {@link CommandNode CommandNodes} * * @since 0.1.0/* w w w .ja va 2s . c om*/ * @version 0.1.0 * * @return Any aliased {@link CommandNode} objects */ public final Map<String, CommandNode<? extends Plugin>> getAliases() { return this.subcommands.entrySet().stream().filter(c -> { return this != c.getValue().getParent(); }).collect(Collectors.toMap((c) -> this.getUsage() + " " + c.getKey(), (c) -> c.getValue())); }
From source file:delfos.rs.trustbased.WeightedGraph.java
protected final Map<Integer, Node> makeNodesByIndex(Map<Node, Integer> nodesIndex) { return nodesIndex.entrySet().parallelStream() .collect(Collectors.toMap(entry -> entry.getValue(), entry -> entry.getKey())); }
From source file:com.thinkbiganalytics.nifi.feedmgr.TemplateCreationHelper.java
private void mergeControllerServices(TemplateInstance templateInstance) { final Map<String, ControllerServiceDTO> map = new HashMap<String, ControllerServiceDTO>(); final Map<String, List<ControllerServiceDTO>> serviceNameMap = new HashMap<>(); //first use the snapshotted servies as a baseline for (ControllerServiceDTO serviceDTO : snapshotControllerServices) { map.put(serviceDTO.getId(), serviceDTO); if (!serviceNameMap.containsKey(serviceDTO.getName())) { serviceNameMap.put(serviceDTO.getName(), new ArrayList<ControllerServiceDTO>()); }/*from ww w . j ava2 s .co m*/ serviceNameMap.get(serviceDTO.getName()).add(serviceDTO); } java.util.function.Predicate<ControllerServiceDTO> matchingServiceFilter = ( cs) -> map.containsKey(cs.getId()) || serviceNameMap.containsKey(cs.getName()); List<ControllerServiceDTO> matchingControllerServices = newlyCreatedControllerServices.stream() .filter(matchingServiceFilter).collect(Collectors.toList()); List<ControllerServiceDTO> unmatchedServices = newlyCreatedControllerServices.stream() .filter(matchingServiceFilter.negate()).collect(Collectors.toList()); //if the service has additional propertyDescriptors that identify other services we need to fetch the service by its id. if (unmatchedServices != null && !unmatchedServices.isEmpty()) { Map<String, ControllerServiceDTO> updatedServices = unmatchedServices.stream().map(serviceToAdd -> { //if the service has additional propertyDescriptors that identify other services we need to fetch the service by its id if (serviceToAdd.getDescriptors() != null && serviceToAdd.getDescriptors().values().stream() .anyMatch(propertyDescriptorDTO -> StringUtils .isNotBlank(propertyDescriptorDTO.getIdentifiesControllerService()))) { try { Optional<ControllerServiceDTO> cs = restClient.getNiFiRestClient().controllerServices() .findById(serviceToAdd.getId()); if (cs.isPresent()) { return cs.get(); } else { return serviceToAdd; } } catch (Exception e) { return serviceToAdd; } } else { return serviceToAdd; } }).collect(Collectors.toMap(service -> service.getId(), service -> service)); map.putAll(updatedServices); //update the core item newlyCreatedControllerServices = newlyCreatedControllerServices.stream().map(controllerServiceDTO -> { if (map.containsKey(controllerServiceDTO.getId())) { return updatedServices.get(controllerServiceDTO.getId()); } else { return controllerServiceDTO; } }).collect(Collectors.toSet()); } //if match existing services, then delete the new ones if (matchingControllerServices != null && !matchingControllerServices.isEmpty()) { for (ControllerServiceDTO serviceToDelete : matchingControllerServices) { try { if (templateInstance != null) { templateInstance.addDeletedServiceMapping(serviceToDelete.getId(), serviceNameMap.get(serviceToDelete.getName())); } restClient.deleteControllerService(serviceToDelete.getId()); } catch (NifiClientRuntimeException e) { log.error( "Exception while attempting to mergeControllerServices. Unable to delete Service {}. {}", serviceToDelete.getId(), e.getMessage()); } } } mergedControllerServices = map; //validate //Create a map of the Controller Service Name to list of matching services this.serviceNameMap = mergedControllerServices.values().stream() .collect(Collectors.groupingBy(cs -> cs.getName())); this.enabledServiceNameMap = mergedControllerServices.values().stream() .filter(cs -> NifiProcessUtil.SERVICE_STATE.ENABLED.name().equalsIgnoreCase(cs.getState())) .collect(Collectors.groupingBy(cs -> cs.getName())); }