Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:chatbot.Chatbot.java

/** ************************************************************************************************
 * inverse document frequency = log of number of documents divided by
 * number of documents in which a term appears.
 * Note that if the query is included as index -1 then it will
 * get processed too. Put the results into
 * HashMap<String,Float> idf/*w w w .  j  a  v  a 2 s  . c  o m*/
 */
private void calcIDF(int docCount) {

    idf.putAll(docfreq.keySet().stream().collect(
            Collectors.toMap((t) -> t, t -> ((float) Math.log10((float) docCount / (float) docfreq.get(t))))));
}

From source file:delfos.dataset.generated.modifieddatasets.pseudouser.PseudoUserRatingsDataset.java

@Override
public Map<Integer, RatingType> getItemRatingsRated(Integer idItem) throws ItemNotFound {

    Map<Integer, RatingType> itemRatingsRated = originalDatasetLoader.getRatingsDataset()
            .getItemRatingsRated(idItem);

    Collection<RatingType> ratedByPseudoUsers = pseudoUsersRatings.values().parallelStream()
            .flatMap(pseudoUserRatings -> pseudoUserRatings.values().stream())
            .filter(rating -> rating.getIdItem() == idItem).collect(Collectors.toList());

    Map<Integer, RatingType> itemsRatingsRated_byPseudoUsers = ratedByPseudoUsers.stream()
            .collect(Collectors.toMap(rating -> rating.getIdItem(), rating -> rating));

    Map<Integer, RatingType> ret = new TreeMap<>();

    ret.putAll(itemRatingsRated);// ww  w. j  av  a  2  s .  c o m
    ret.putAll(itemsRatingsRated_byPseudoUsers);

    return ret;
}

From source file:alfio.repository.TicketFieldRepository.java

default void updateOrInsert(Map<String, List<String>> values, int ticketId, int eventId) {
    Map<String, TicketFieldValue> toUpdate = findAllByTicketIdGroupedByName(ticketId);
    values = Optional.ofNullable(values).orElseGet(Collections::emptyMap);
    Map<String, Integer> fieldNameToId = findAdditionalFieldsForEvent(eventId).stream()
            .collect(Collectors.toMap(TicketFieldConfiguration::getName, TicketFieldConfiguration::getId));

    values.forEach((fieldName, fieldValues) -> {
        String fieldValue;/*from w  ww.j  a v  a  2  s .c o  m*/
        if (fieldValues.size() == 1) {
            fieldValue = fieldValues.get(0);
        } else if (fieldValues.stream().anyMatch(StringUtils::isNotBlank)) {
            fieldValue = Json.toJson(fieldValues);
        } else {
            fieldValue = "";
        }

        boolean isNotBlank = StringUtils.isNotBlank(fieldValue);
        if (toUpdate.containsKey(fieldName)) {
            TicketFieldValue field = toUpdate.get(fieldName);
            if (isNotBlank) {
                updateValue(field.getTicketId(), field.getTicketFieldConfigurationId(), fieldValue);
            } else {
                deleteValue(field.getTicketId(), field.getTicketFieldConfigurationId());
            }
        } else if (fieldNameToId.containsKey(fieldName) && isNotBlank) {
            insertValue(ticketId, fieldNameToId.get(fieldName), fieldValue);
        }
    });
}

From source file:eu.itesla_project.modules.validation.OverloadValidationTool.java

private static void writeCsv(Set<String> contingencyIds,
        Map<String, Map<String, OverloadStatus>> statusPerContingencyPerCase, Path outputDir)
        throws IOException {
    try (BufferedWriter writer = Files.newBufferedWriter(outputDir.resolve("comparison.csv"),
            StandardCharsets.UTF_8)) {
        writer.write("base case");
        for (String contingencyId : contingencyIds) {
            writer.write(CSV_SEPARATOR);
            writer.write(contingencyId + " load flow");
            writer.write(CSV_SEPARATOR);
            writer.write(contingencyId + " offline rule");
        }//from   w w  w  .j  av  a  2s  .c  o m
        writer.newLine();

        for (Map.Entry<String, Map<String, OverloadStatus>> e : statusPerContingencyPerCase.entrySet()) {
            String baseCaseName = e.getKey();
            Map<String, OverloadStatus> statusPerContingency = e.getValue();
            writer.write(baseCaseName);
            for (String contingencyId : contingencyIds) {
                OverloadStatus overloadStatus = statusPerContingency.get(contingencyId);
                writer.write(CSV_SEPARATOR);
                writer.write(Boolean.toString(overloadStatus.isLfOk()));
                writer.write(CSV_SEPARATOR);
                writer.write(Boolean.toString(overloadStatus.isOfflineRuleOk()));
            }
            writer.newLine();
        }
    }

    List<String> categories = Arrays.asList("OK_OK", "NOK_NOK", "OK_NOK", "NOK_OK");

    Map<String, Map<String, AtomicInteger>> synthesisPerContingency = new HashMap<>();
    for (String contingencyId : contingencyIds) {
        synthesisPerContingency.put(contingencyId,
                categories.stream().collect(Collectors.toMap(Function.identity(), e -> new AtomicInteger())));
    }
    for (Map.Entry<String, Map<String, OverloadStatus>> e : statusPerContingencyPerCase.entrySet()) {
        Map<String, OverloadStatus> statusPerContingency = e.getValue();
        for (String contingencyId : contingencyIds) {
            OverloadStatus overloadStatus = statusPerContingency.get(contingencyId);
            synthesisPerContingency.get(contingencyId).get(
                    okToString(overloadStatus.isLfOk()) + "_" + okToString(overloadStatus.isOfflineRuleOk()))
                    .incrementAndGet();
        }
    }

    try (BufferedWriter writer = Files.newBufferedWriter(outputDir.resolve("synthesis.csv"),
            StandardCharsets.UTF_8)) {
        writer.write("contingency");
        for (String c : categories) {
            writer.write(CSV_SEPARATOR);
            writer.write(c);
        }
        writer.newLine();
        for (Map.Entry<String, Map<String, AtomicInteger>> e : synthesisPerContingency.entrySet()) {
            String contingencyId = e.getKey();
            Map<String, AtomicInteger> count = e.getValue();
            writer.write(contingencyId);
            for (String c : categories) {
                writer.write(CSV_SEPARATOR);
                writer.write(Integer.toString(count.get(c).get()));
            }
            writer.newLine();
        }
    }
}

From source file:com.arpnetworking.metrics.mad.parsers.CollectdJsonToRecordParser.java

/**
 * Parses a collectd POST body./*from   ww w  . j  a v  a 2  s. c  om*/
 *
 * @param request an HTTP request
 * @return A list of {@link DefaultRecord.Builder}
 * @throws ParsingException if the body is not parsable as collectd formatted json data
 */
public List<Record> parse(final HttpRequest request) throws ParsingException {
    final Map<String, String> metricTags = Maps.newHashMap();
    for (final Map.Entry<String, String> header : request.getHeaders().entries()) {
        if (header.getKey().toLowerCase(Locale.ENGLISH).startsWith(TAG_PREFIX)) {
            metricTags.put(header.getKey().toLowerCase(Locale.ENGLISH).substring(TAG_PREFIX.length()),
                    header.getValue());
        }
    }
    try {
        final List<CollectdRecord> records = OBJECT_MAPPER.readValue(request.getBody(), COLLECTD_RECORD_LIST);
        final List<Record> parsedRecords = Lists.newArrayList();
        for (final CollectdRecord record : records) {
            final Multimap<String, Metric> metrics = HashMultimap.create();

            metricTags.put(Key.HOST_DIMENSION_KEY, record.getHost());
            final DefaultRecord.Builder builder = new DefaultRecord.Builder()
                    .setId(UUID.randomUUID().toString()).setTime(record.getTime())
                    .setAnnotations(ImmutableMap.copyOf(metricTags))
                    .setDimensions(ImmutableMap.copyOf(metricTags));

            final String plugin = record.getPlugin();
            final String pluginInstance = record.getPluginInstance();
            final String type = record.getType();
            final String typeInstance = record.getTypeInstance();

            for (final CollectdRecord.Sample sample : record.getSamples()) {
                if (sample.getValue() == null) {
                    continue;
                }
                final String metricName = computeMetricName(plugin, pluginInstance, type, typeInstance,
                        sample.getDsName());
                final MetricType metricType = mapDsType(sample.getDsType());
                final Metric metric = new DefaultMetric.Builder().setType(metricType)
                        .setValues(Collections
                                .singletonList(new Quantity.Builder().setValue(sample.getValue()).build()))
                        .build();
                metrics.put(metricName, metric);
            }
            final Map<String, Metric> collectedMetrics = metrics.asMap().entrySet().stream()
                    .collect(Collectors.toMap(Map.Entry::getKey, CollectdJsonToRecordParser::mergeMetrics));
            builder.setMetrics(ImmutableMap.copyOf(collectedMetrics));
            parsedRecords.add(builder.build());
        }
        return parsedRecords;
    } catch (final IOException | ConstraintsViolatedException ex) {
        throw new ParsingException("Error parsing collectd json", request.getBody(), ex);
    }
}

From source file:com.fns.grivet.repo.JdbcEntityRepository.java

@Override
public void delete(Long eid) {
    String entitySql = "DELETE FROM entity WHERE eid = ?";
    log.trace(String.format("JdbcEntityRepository.delete[sql=%s]", entitySql));
    jdbcTemplate.update(entitySql, new Object[] { eid });

    Collection<String> eavSql = Stream.of(AttributeType.values()).collect(Collectors.toMap(k -> k.getType(),
            v -> String.format("DELETE FROM entityav_%s WHERE eid = ?", v.getType()))).values();
    for (String sql : eavSql) {
        log.trace(String.format("JdbcEntityRepository.delete[sql=%s]", sql));
        jdbcTemplate.update(sql, new Object[] { eid });
    }//from  w w  w . ja  va2s .c  o  m
}

From source file:com.hurence.logisland.plugin.PluginManager.java

private static Map<ModuleInfo, List<String>> findPluginMeta() {
    return PluginLoader.getRegistry().entrySet().stream()
            .map(e -> new Tuple<>(((PluginClassLoader) e.getValue()).getModuleInfo(), e.getKey()))
            .collect(Collectors.groupingBy(t -> t.getKey().getArtifact())).entrySet().stream()
            .collect(Collectors.toMap(e -> e.getValue().stream().findFirst().get().getKey(),
                    e -> e.getValue().stream().map(Tuple::getValue).sorted().collect(Collectors.toList())));

}

From source file:edu.zipcloud.cloudstreetmarket.core.services.StockProductServiceOfflineImpl.java

private void updateStocksAndQuotesFromYahoo(Set<StockProduct> askedContent) {
    if (askedContent.isEmpty()) {
        return;/*from www . j a v a2s . co  m*/
    }

    Set<StockProduct> recentlyUpdated = askedContent.stream()
            .filter(t -> t.getLastUpdate() != null && DateUtil.isRecent(t.getLastUpdate(), 1))
            .collect(Collectors.toSet());

    if (askedContent.size() != recentlyUpdated.size()) {

        String guid = AuthenticationUtil.getPrincipal().getUsername();

        String token = usersConnectionRepository.getRegisteredSocialUser(guid).getAccessToken();
        ConnectionRepository connectionRepository = usersConnectionRepository.createConnectionRepository(guid);
        Connection<Yahoo2> connection = connectionRepository.getPrimaryConnection(Yahoo2.class);

        if (connection != null) {
            askedContent.removeAll(recentlyUpdated);

            Map<String, StockProduct> updatableTickers = askedContent.stream()
                    .collect(Collectors.toMap(StockProduct::getId, Function.identity()));

            List<YahooQuote> yahooQuotes = connection.getApi().financialOperations()
                    .getYahooQuotes(new ArrayList<String>(updatableTickers.keySet()), token);

            Set<StockProduct> updatableProducts = yahooQuotes.stream()
                    .filter(yq -> StringUtils.isNotBlank(yq.getExchange()))
                    .filter(yq -> updatableTickers.get(yq.getId()) != null).map(yq -> {
                        StockQuote sq = new StockQuote(yq, updatableTickers.get((yq.getId())));
                        return syncProduct(updatableTickers.get((yq.getId())), sq);
                    }).collect(Collectors.toSet());

            if (!updatableProducts.isEmpty()) {
                stockProductRepository.save(updatableProducts);
            }

            //This job below should decrease with the time
            Set<StockProduct> removableProducts = yahooQuotes.stream()
                    .filter(yq -> StringUtils.isBlank(yq.getExchange())).map(yq -> {
                        StockQuote sq = new StockQuote(yq, updatableTickers.get((yq.getId())));
                        return syncProduct(updatableTickers.get((yq.getId())), sq);
                    }).collect(Collectors.toSet());

            if (!removableProducts.isEmpty()) {
                stockProductRepository.delete(removableProducts);
            }
        }
    }
}

From source file:org.openlmis.fulfillment.web.util.BasicOrderDtoBuilder.java

private Map<UUID, ProgramDto> getPrograms(List<Order> orders) {
    Set<UUID> programIds = orders.stream().map(Order::getProgramId).collect(Collectors.toSet());
    return programReferenceDataService.findByIds(programIds).stream()
            .collect(Collectors.toMap(BaseDto::getId, Function.identity()));
}

From source file:com.marand.thinkmed.medications.connector.impl.rest.RestMedicationsConnector.java

@Override
public Map<String, PatientDisplayWithLocationDto> getPatientDisplayWithLocationMap(
        final Collection<String> careProviderIds, final Collection<String> patientIds) {
    final String patientsListJson;
    if (patientIds != null) {
        if (patientIds.isEmpty()) {
            return Collections.emptyMap();
        }/* ww w.ja va  2 s. c o m*/
        final String patientIdsString = patientIds.stream().collect(Collectors.joining(","));
        patientsListJson = restClient.getPatientsSummariesList(patientIdsString);
    } else {
        Preconditions.checkArgument(careProviderIds != null, "Both patientIds and careProviderId are null");
        final String careProviderIdsString = careProviderIds.stream().collect(Collectors.joining(","));
        patientsListJson = restClient.getCareProvidersPatientsSummariesList(careProviderIdsString);
    }
    final List<PatientDisplayWithLocationDto> patientsList = Arrays
            .asList(JsonUtil.fromJson(patientsListJson, PatientDisplayWithLocationDto[].class));

    return patientsList.stream()
            .collect(Collectors.toMap(p -> p.getPatientDisplayDto().getId(), Function.identity()));
}