Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:com.hurence.logisland.processor.elasticsearch.EnrichRecordsElasticsearch.java

/**
 * process events/*from   w  w  w .j  av a 2 s.c  o m*/
 *
 * @param context
 * @param records
 * @return
 */
@Override
public Collection<Record> process(final ProcessContext context, final Collection<Record> records) {
    if (records.size() == 0) {
        return Collections.emptyList();
    }

    List<Triple<Record, String, IncludeFields>> recordsToEnrich = new ArrayList<>();
    MultiGetQueryRecordBuilder mgqrBuilder = new MultiGetQueryRecordBuilder();

    mgqrBuilder.excludeFields(excludesArray);

    for (Record record : records) {

        String recordKeyName = evaluatePropAsString(record, context, RECORD_KEY_FIELD);
        String indexName = evaluatePropAsString(record, context, ES_INDEX_FIELD);
        String typeName = evaluatePropAsString(record, context, ES_TYPE_FIELD);
        String includesFieldName = evaluatePropAsString(record, context, ES_INCLUDES_FIELD);

        if (recordKeyName != null && indexName != null && typeName != null) {
            try {
                // Includes :
                String[] includesArray = null;
                if ((includesFieldName != null) && (!includesFieldName.isEmpty())) {
                    includesArray = includesFieldName.split("\\s*,\\s*");
                }
                IncludeFields includeFields = new IncludeFields(includesArray);
                mgqrBuilder.add(indexName, typeName, includeFields.getAttrsToIncludeArray(), recordKeyName);
                recordsToEnrich.add(new ImmutableTriple(record, asUniqueKey(indexName, typeName, recordKeyName),
                        includeFields));
            } catch (Throwable t) {
                record.setStringField(FieldDictionary.RECORD_ERRORS, "Can not request ElasticSearch with "
                        + indexName + " " + typeName + " " + recordKeyName);
                getLogger().error(
                        "Can not request ElasticSearch with index: {}, type: {}, recordKey: {}, record id is :\n{}",
                        new Object[] { indexName, typeName, recordKeyName, record.getId() }, t);
            }
        } else {
            getLogger().warn(
                    "Can not request ElasticSearch with "
                            + "index: {}, type: {}, recordKey: {}, record id is :\n{}",
                    new Object[] { indexName, typeName, recordKeyName, record.getId() });
        }
    }

    List<MultiGetResponseRecord> multiGetResponseRecords = null;
    try {
        List<MultiGetQueryRecord> mgqrs = mgqrBuilder.build();
        if (mgqrs.isEmpty())
            return records;
        multiGetResponseRecords = elasticsearchClientService.multiGet(mgqrs);
    } catch (InvalidMultiGetQueryRecordException e) {
        getLogger().error("error while multiGet elasticsearch", e);
    }

    if (multiGetResponseRecords == null || multiGetResponseRecords.isEmpty()) {
        return records;
    }

    // Transform the returned documents from ES in a Map
    Map<String, MultiGetResponseRecord> responses = multiGetResponseRecords.stream()
            .collect(Collectors.toMap(EnrichRecordsElasticsearch::asUniqueKey, Function.identity()));

    recordsToEnrich.forEach(recordToEnrich -> {

        Triple<Record, String, IncludeFields> triple = recordToEnrich;
        Record outputRecord = triple.getLeft();
        String key = triple.getMiddle();
        IncludeFields includeFields = triple.getRight();

        MultiGetResponseRecord responseRecord = responses.get(key);
        if ((responseRecord != null) && (responseRecord.getRetrievedFields() != null)) {
            // Retrieve the fields from responseRecord that matches the ones in the recordToEnrich.
            responseRecord.getRetrievedFields().forEach((fieldName, v) -> {
                if (includeFields.includes(fieldName)) {
                    // Now check if there is an attribute mapping rule to apply
                    if (includeFields.hasMappingFor(fieldName)) {
                        String mappedAttributeName = includeFields.getAttributeToMap(fieldName);
                        // Replace the attribute name
                        outputRecord.setStringField(mappedAttributeName, v);
                    } else {
                        outputRecord.setStringField(fieldName, v);
                    }
                }
            });
        }
    });

    return records;
}

From source file:org.ligoj.app.plugin.prov.aws.in.ProvAwsPriceImportResource.java

/**
 * Install storage prices from the JSON file provided by AWS.
 *
 * @param context//from   w  w w. j a  v  a  2s. co m
 *            The update context.
 */
private void installStoragePrices(final UpdateContext context) throws IOException {
    // The previously installed storage types cache. Key is the storage name
    final Node node = context.getNode();
    context.setStorageTypes(installStorageTypes(context));

    // Install EBS prices
    installPrices(context, "ebs", configuration.get(CONF_URL_EBS_PRICES, EBS_PRICES), EbsPrices.class,
            (r, region) -> {
                // Get previous prices for this location
                final Map<Integer, ProvStoragePrice> previous = spRepository
                        .findAll(node.getId(), region.getName()).stream()
                        .collect(Collectors.toMap(p -> p.getType().getId(), Function.identity()));
                return (int) r.getTypes().stream().filter(t -> containsKey(context, t)).filter(
                        t -> t.getValues().stream().filter(j -> !"perPIOPSreq".equals(j.getRate())).anyMatch(
                                j -> install(j, context.getStorageTypes().get(t.getName()), region, previous)))
                        .count();
            });
}

From source file:com.netflix.spinnaker.front50.model.S3Support.java

/**
 * Fetch any previously cached applications that have been updated since last retrieved.
 *
 * @param existingItems Previously cached applications
 * @return Refreshed applications/*from  w w  w  .  j  a  v a  2  s. c  o  m*/
 */
protected Set<T> fetchAllItems(Set<T> existingItems) {
    if (existingItems == null) {
        existingItems = new HashSet<>();
    }

    Long refreshTime = System.currentTimeMillis();

    ObjectListing bucketListing = amazonS3
            .listObjects(new ListObjectsRequest(bucket, rootFolder, null, null, 10000));
    List<S3ObjectSummary> summaries = bucketListing.getObjectSummaries();

    while (bucketListing.isTruncated()) {
        bucketListing = amazonS3.listNextBatchOfObjects(bucketListing);
        summaries.addAll(bucketListing.getObjectSummaries());
    }

    Map<String, S3ObjectSummary> summariesByName = summaries.stream().filter(this::filterS3ObjectSummary)
            .collect(Collectors.toMap(S3ObjectSummary::getKey, Function.identity()));

    Map<String, T> existingItemsByName = existingItems.stream()
            .filter(a -> summariesByName.containsKey(buildS3Key(a)))
            .collect(Collectors.toMap(Timestamped::getId, Function.identity()));

    summaries = summariesByName.values().stream().filter(s3ObjectSummary -> {
        String itemName = extractItemName(s3ObjectSummary);
        T existingItem = existingItemsByName.get(itemName);

        return existingItem == null || existingItem.getLastModified() == null
                || s3ObjectSummary.getLastModified().after(new Date(existingItem.getLastModified()));
    }).collect(Collectors.toList());

    Observable.from(summaries).buffer(10).flatMap(ids -> Observable.from(ids).flatMap(s3ObjectSummary -> {
        try {
            return Observable
                    .just(amazonS3.getObject(s3ObjectSummary.getBucketName(), s3ObjectSummary.getKey()));
        } catch (AmazonS3Exception e) {
            if (e.getStatusCode() == 404) {
                // an item has been removed between the time that object summaries were fetched and now
                existingItemsByName.remove(extractItemName(s3ObjectSummary));
                return Observable.empty();
            }

            throw e;
        }
    }).subscribeOn(scheduler)).map(s3Object -> {
        try {
            T item = deserialize(s3Object);
            item.setLastModified(s3Object.getObjectMetadata().getLastModified().getTime());
            return item;
        } catch (IOException e) {
            throw new IllegalStateException(e);
        }
    }).subscribeOn(scheduler).toList().toBlocking().single().forEach(item -> {
        existingItemsByName.put(item.getId().toLowerCase(), item);
    });

    existingItems = existingItemsByName.values().stream().collect(Collectors.toSet());
    this.lastRefreshedTime = refreshTime;
    return existingItems;
}

From source file:com.uber.hoodie.io.strategy.TestHoodieCompactionStrategy.java

private List<HoodieCompactionOperation> createCompactionOperations(HoodieWriteConfig config,
        Map<Long, List<Long>> sizesMap) {
    Map<Long, String> keyToParitionMap = sizesMap.entrySet().stream()
            .map(e -> Pair.of(e.getKey(), partitionPaths[new Random().nextInt(partitionPaths.length - 1)]))
            .collect(Collectors.toMap(Pair::getKey, Pair::getValue));
    return createCompactionOperations(config, sizesMap, keyToParitionMap);
}

From source file:ch.algotrader.service.tt.TTFixReferenceDataServiceImpl.java

private void retrieveOptions(final OptionFamily securityFamily, final List<TTSecurityDefVO> securityDefs) {

    // get all current options
    List<Option> allOptions = this.optionDao.findBySecurityFamily(securityFamily.getId());
    Map<String, Option> mapByTtid = allOptions.stream().filter(e -> e.getTtid() != null)
            .collect(Collectors.toMap(e -> e.getTtid(), e -> e));
    Map<String, Option> mapBySymbol = allOptions.stream().collect(Collectors.toMap(e -> e.getSymbol(), e -> e));
    for (TTSecurityDefVO securityDef : securityDefs) {

        String type = securityDef.getType();
        if (!type.equalsIgnoreCase("OPT")) {
            throw new ServiceException("Unexpected security definition type for option: " + type);
        }//from w ww.jav  a  2  s . c  om
        String id = securityDef.getId();
        if (!mapByTtid.containsKey(id)) {
            OptionType optionType = securityDef.getOptionType();
            BigDecimal strike = securityDef.getStrikePrice() != null
                    ? PriceUtil.normalizePrice(securityFamily, Broker.TT.name(), securityDef.getStrikePrice())
                    : null;
            LocalDate expiryDate = securityDef.getExpiryDate() != null ? securityDef.getExpiryDate()
                    : securityDef.getMaturityDate();
            String symbol = OptionSymbol.getSymbol(securityFamily, expiryDate, optionType, strike,
                    this.commonConfig.getOptionSymbolPattern());

            if (!mapBySymbol.containsKey(symbol)) {
                String isin = securityFamily.getIsinRoot() != null
                        ? OptionSymbol.getIsin(securityFamily, expiryDate, optionType, strike)
                        : null;
                String ric = securityFamily.getRicRoot() != null
                        ? OptionSymbol.getRic(securityFamily, expiryDate, optionType, strike)
                        : null;
                String desc = securityDef.getDescription();

                Option option = Option.Factory.newInstance();
                option.setDescription(desc);
                option.setSymbol(symbol);
                option.setIsin(isin);
                option.setRic(ric);
                option.setTtid(id);
                option.setOptionType(optionType);
                option.setStrike(strike);
                option.setExpiration(DateTimeLegacy.toLocalDate(expiryDate));
                option.setSecurityFamily(securityFamily);
                option.setUnderlying(securityFamily.getUnderlying());

                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Creating option based on TT definition: {} {} {} {}", securityDef.getSymbol(),
                            securityDef.getOptionType(), securityDef.getMaturityDate(),
                            securityDef.getStrikePrice());
                }
                this.optionDao.save(option);
            } else {
                Option option = mapBySymbol.get(symbol);
                option.setTtid(id);

                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Updating option based on TT definition: {}", securityDef.getSymbol());
                }
            }
        }
    }
}

From source file:io.pravega.controller.server.ControllerService.java

public CompletableFuture<Map<SegmentRange, List<Integer>>> getSegmentsImmediatelyFollowing(SegmentId segment) {
    Preconditions.checkNotNull(segment, "segment");
    OperationContext context = streamStore.createContext(segment.getStreamInfo().getScope(),
            segment.getStreamInfo().getStream());
    return streamStore
            .getSuccessors(segment.getStreamInfo().getScope(), segment.getStreamInfo().getStream(),
                    segment.getSegmentNumber(), context, executor)
            .thenComposeAsync(successors -> FutureHelpers.keysAllOfWithResults(successors.entrySet().stream()
                    .collect(Collectors.toMap(entry -> streamStore
                            .getSegment(segment.getStreamInfo().getScope(), segment.getStreamInfo().getStream(),
                                    entry.getKey(), context, executor)
                            .thenApply(seg -> ModelHelper.createSegmentRange(segment.getStreamInfo().getScope(),
                                    segment.getStreamInfo().getStream(), seg.getNumber(), seg.getKeyStart(),
                                    seg.getKeyEnd())),
                            Map.Entry::getValue))),
                    executor);/*ww w  .  j  a va2s .com*/
}

From source file:com.ge.predix.acs.service.policy.evaluation.PolicyEvaluationServiceImpl.java

LinkedHashSet<PolicySet> filterPolicySetsByPriority(final String subjectIdentifier, final String uri,
        final List<PolicySet> allPolicySets, final LinkedHashSet<String> policySetsEvaluationOrder)
        throws IllegalArgumentException {

    if (policySetsEvaluationOrder.isEmpty()) {
        if (allPolicySets.size() > 1) {
            LOGGER.error(String.format(
                    "Found more than one policy set during policy evaluation and "
                            + "no evaluation order is provided. subjectIdentifier='%s', resourceURI='%s'",
                    subjectIdentifier, uri));
            throw new IllegalArgumentException("More than one policy set exists for this zone. "
                    + "Please provide an ordered list of policy set names to consider for this evaluation and "
                    + "resubmit the request.");
        } else {/*from  w  ww. ja v a 2  s.c om*/
            return allPolicySets.stream().collect(Collectors.toCollection(LinkedHashSet::new));
        }
    }

    Map<String, PolicySet> allPolicySetsMap = allPolicySets.stream()
            .collect(Collectors.toMap(PolicySet::getName, Function.identity()));
    LinkedHashSet<PolicySet> filteredPolicySets = new LinkedHashSet<PolicySet>();
    for (String policySetId : policySetsEvaluationOrder) {
        PolicySet policySet = allPolicySetsMap.get(policySetId);
        if (policySet == null) {
            LOGGER.error("No existing policy set matches policy set in the evaluation order of the request. "
                    + "Subject: " + subjectIdentifier + ", Resource: " + uri);
            throw new IllegalArgumentException(
                    "No existing policy set matches policy set in the evaluaion order of the request. "
                            + "Please review the policy evauation order and resubmit the request.");
        } else {
            filteredPolicySets.add(policySet);
        }
    }
    return filteredPolicySets;
}

From source file:com.github.aptd.simulation.datamodel.CXMLReader.java

/**
 * creates the agent structure//from  w  w  w. j a  va 2 s .c  om
 *
 * @param p_ai AI component
 * @return unmodifyable agent map
 */
private static Map<String, String> agents(final Iagents p_ai) {
    return Collections.<String, String>unmodifiableMap(p_ai.getAgents().getInstance().getAgent()
            .parallelStream().collect(Collectors.toMap(Iagent::getId, i -> i.getConfiguration().getAsl())));
}

From source file:com.esri.geoportal.harvester.api.defs.EntityDefinition.java

private Map<String, String> getCleanProperties(Map<String, String> props) {
    return props.entrySet().stream().filter(e -> !StringUtils.isBlank(e.getValue()))
            .collect(Collectors.toMap(Map.Entry<String, String>::getKey, Map.Entry<String, String>::getValue));
}

From source file:alfio.repository.TicketFieldRepository.java

default Map<String, String> findAllValuesForTicketId(int ticketId) {
    return findNameAndValue(ticketId).stream().filter(t -> t.getName() != null && t.getValue() != null)
            .collect(Collectors.toMap(FieldNameAndValue::getName, FieldNameAndValue::getValue));
}