Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:com.nike.cerberus.service.CloudFormationService.java

/**
 * Returns the current status of the named stack.
 *
 * @param stackId Stack name.//from   ww w.j av  a 2s  .c om
 * @return Stack outputs data.
 */
public Map<String, String> getStackParameters(final String stackId) {
    final DescribeStacksRequest request = new DescribeStacksRequest().withStackName(stackId);
    final DescribeStacksResult result = cloudFormationClient.describeStacks(request);
    final Map<String, String> parameters = Maps.newHashMap();

    if (result.getStacks().size() > 0) {
        parameters.putAll(result.getStacks().get(0).getParameters().stream()
                .collect(Collectors.toMap(Parameter::getParameterKey, Parameter::getParameterValue)));

    }

    return parameters;
}

From source file:org.ligoj.app.plugin.prov.aws.in.ProvAwsPriceImportResource.java

/**
 * Install or update prices.//from   w  w w .jav  a2s .  c om
 *
 * @throws IOException
 *             When CSV or XML files cannot be read.
 * @throws URISyntaxException
 *             When CSV or XML files cannot be read.
 */
public void install() throws IOException, URISyntaxException {
    final UpdateContext context = new UpdateContext();
    // Node is already persisted, install EC2 prices
    final Node node = nodeRepository.findOneExpected(ProvAwsPluginResource.KEY);
    context.setNode(node);

    // The previously installed location cache. Key is the location AWS name
    context.setRegions(
            locationRepository.findAllBy(BY_NODE, node.getId()).stream().filter(this::isEnabledRegion)
                    .collect(Collectors.toMap(INamableBean::getName, Function.identity())));
    nextStep(node, null, 0);

    // Proceed to the install
    installStoragePrices(context);
    installComputePrices(context);

    // S3 and NFS need to be processed after EC2 for region mapping
    installS3Prices(context);
    installEfsPrices(context);
}

From source file:com.thinkbiganalytics.nifi.provenance.repo.ConfigurationProperties.java

public Map<String, PropertyChange> refresh() {
    Map<String, PropertyChange> changes = new HashMap<>();
    populateChanges(changes, true);// w w  w. j a v  a 2s  . c om
    load();
    populateChanges(changes, false);
    return changes.values().stream().filter(propertyChange -> propertyChange.changed())
            .collect(Collectors.toMap(change -> change.getPropertyName(), Function.identity()));
}

From source file:com.nirmata.workflow.details.TestJsonSerializer.java

private Map<String, String> randomMap() {
    return Stream.generate(random::nextInt).limit(random.nextInt(3) + 1)
            .collect(Collectors.toMap(n -> Integer.toString(n * 4), n -> Integer.toString(n * 2)));
}

From source file:com.spankingrpgs.model.GameState.java

/**
 * Performs a deep copy of the passed in GameState.
 *
 * @param copy  The state to perform a deep copy of
 *//*  w  w w.java2s. co  m*/
private GameState(GameState copy) {
    this.playerCharacter = copy.playerCharacter.copy();
    characters = new LinkedHashMap<>(
            copy.characters.entrySet().stream().filter(entry -> entry.getValue() != null)
                    .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().copy())));
    keywords = new HashSet<>(copy.keywords);
    items = new HashMap<>(copy.items);
    events = new HashMap<>(copy.events);
    party = new HashMap<>(copy.party.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey,
            rangeCharacters -> rangeCharacters.getValue().stream().collect(Collectors.toList()))));
    attritionRate = copy.attritionRate;
    artificialIntelligenceLevel = copy.artificialIntelligenceLevel;
    playerSpankable = copy.playerSpankable;
    numTimesLost = copy.numTimesLost;
    skills = new HashMap<>(copy.skills);
    equipment = new HashMap<>(copy.equipment);
    episodeNumber = copy.episodeNumber;
    dayNumber = copy.dayNumber;
    activityLength = copy.activityLength;
    gameTime = copy.gameTime;
    this.previousVillains = copy.previousVillains.entrySet().stream()
            .collect(Collectors.toMap(Map.Entry::getKey, entry -> characters.get(entry.getValue().getName())));
    this.spankerGender = copy.spankerGender;
    this.spankeeGender = copy.spankeeGender;
}

From source file:com.serphacker.serposcope.db.google.GoogleSerpRescanDB.java

public void rescan(Integer specificRunId, Collection<GoogleTarget> targets, Collection<GoogleSearch> searches,
        boolean updateSummary) {
    LOG.debug("SERP rescan (bulk) : starting");
    long _start = System.currentTimeMillis();
    Map<Integer, Integer> searchCountByGroup = searchDB.countByGroup();
    Run specPrevRun = null;//from  w  w w  .j  a  va 2 s.  c o m
    Map<Integer, GoogleTargetSummary> specPrevRunSummaryByTarget = new HashMap<>();

    if (specificRunId != null) {
        specPrevRun = runDB.findPrevious(specificRunId);
        if (specPrevRun != null) {
            specPrevRunSummaryByTarget = targetSummaryDB.list(specPrevRun.getId()).stream()
                    .collect(Collectors.toMap(GoogleTargetSummary::getTargetId, Function.identity()));
        }
    }

    List<GoogleRank> ranks = new ArrayList<>();
    for (GoogleTarget target : targets) {

        Map<Integer, GoogleTargetSummary> summaryByRunId = new HashMap<>();
        GoogleTargetSummary specificPreviousSummary = specPrevRunSummaryByTarget.get(target.getId());
        if (specificPreviousSummary != null) {
            summaryByRunId.put(specPrevRun.getId(), specificPreviousSummary);
        }

        for (GoogleSearch search : searches) {
            final MutableInt previousRunId = new MutableInt(0);
            final MutableInt previousRank = new MutableInt(GoogleRank.UNRANKED);
            GoogleBest searchBest = new GoogleBest(target.getGroupId(), target.getId(), search.getId(),
                    GoogleRank.UNRANKED, null, null);

            if (specPrevRun != null) {
                previousRunId.setValue(specPrevRun.getId());
                previousRank.setValue(
                        rankDB.get(specPrevRun.getId(), target.getGroupId(), target.getId(), search.getId()));
                GoogleBest specificBest = rankDB.getBest(target.getGroupId(), target.getId(), search.getId());
                if (specificBest != null) {
                    searchBest = specificBest;
                }
            }
            final GoogleBest best = searchBest;

            serpDB.stream(specificRunId, specificRunId, search.getId(), (GoogleSerp res) -> {

                int rank = GoogleRank.UNRANKED;
                String rankedUrl = null;
                for (int i = 0; i < res.getEntries().size(); i++) {
                    if (target.match(res.getEntries().get(i).getUrl())) {
                        rankedUrl = res.getEntries().get(i).getUrl();
                        rank = i + 1;
                        break;
                    }
                }

                // only update last run
                GoogleRank gRank = new GoogleRank(res.getRunId(), target.getGroupId(), target.getId(),
                        search.getId(), rank, previousRank.shortValue(), rankedUrl);
                ranks.add(gRank);
                if (ranks.size() > 2000) {
                    rankDB.insert(ranks);
                    ranks.clear();
                }

                if (updateSummary) {
                    GoogleTargetSummary summary = summaryByRunId.get(res.getRunId());
                    if (summary == null) {
                        summaryByRunId.put(res.getRunId(),
                                summary = new GoogleTargetSummary(target.getGroupId(), target.getId(),
                                        res.getRunId(), 0));
                    }
                    summary.addRankCandidat(gRank);
                }

                if (rank != GoogleRank.UNRANKED && rank <= best.getRank()) {
                    best.setRank((short) rank);
                    best.setUrl(rankedUrl);
                    best.setRunDay(res.getRunDay());
                }

                previousRunId.setValue(res.getRunId());
                previousRank.setValue(rank);
            });

            if (best.getRank() != GoogleRank.UNRANKED) {
                rankDB.insertBest(best);
            }
        }

        // fill previous summary score
        if (updateSummary) {
            TreeMap<Integer, GoogleTargetSummary> summaries = new TreeMap<>(summaryByRunId);

            GoogleTargetSummary previousSummary = null;
            for (Map.Entry<Integer, GoogleTargetSummary> entry : summaries.entrySet()) {
                GoogleTargetSummary summary = entry.getValue();
                summary.computeScoreBP(searchCountByGroup.getOrDefault(summary.getGroupId(), 0));
                if (previousSummary != null) {
                    summary.setPreviousScoreBP(previousSummary.getScoreBP());
                }
                previousSummary = summary;
            }

            if (specPrevRun != null) {
                summaries.remove(specPrevRun.getId());
            }

            if (!summaries.isEmpty()) {
                targetSummaryDB.insert(summaries.values());
            }
        }
    }

    if (!ranks.isEmpty()) {
        rankDB.insert(ranks);
        ranks.clear();
    }

    LOG.debug("SERP rescan : done, duration = {}",
            DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - _start));
}

From source file:io.syndesis.dao.DeploymentDescriptorTest.java

@Test
public void thereShouldBeNoDuplicateNames() {
    final Map<String, Long> namesWithCount = StreamSupport.stream(deployment.spliterator(), true)
            .filter(data -> "connector".equals(data.get("kind").asText()))
            .flatMap(/*from  w ww  . j a v  a2  s .c om*/
                    connector -> StreamSupport.stream(connector.get("data").get("actions").spliterator(), true))
            .map(action -> action.get("name").asText())
            .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));

    final Map<String, Long> multipleNames = namesWithCount.entrySet().stream().filter(e -> e.getValue() > 1)
            .collect(Collectors.toMap(Entry::getKey, Entry::getValue));

    assertThat(multipleNames).as("Expected unique action names").isEmpty();
}

From source file:org.wallride.repository.PageRepositoryImpl.java

private FullTextQuery buildFullTextQuery(PageSearchRequest request, Pageable pageable, Criteria criteria) {
    FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager);
    QueryBuilder qb = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity(Page.class).get();

    @SuppressWarnings("rawtypes")
    BooleanJunction<BooleanJunction> junction = qb.bool();
    junction.must(qb.all().createQuery());

    junction.must(qb.keyword().onField("drafted").ignoreAnalyzer().matching("_null_").createQuery());

    if (StringUtils.hasText(request.getKeyword())) {
        Analyzer analyzer = fullTextEntityManager.getSearchFactory().getAnalyzer("synonyms");
        String[] fields = new String[] { "title", "body", "tags.name", };
        MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
        parser.setDefaultOperator(QueryParser.Operator.AND);
        Query query = null;//  w  w  w. j a v  a 2  s.c o  m
        try {
            query = parser.parse(request.getKeyword());
        } catch (ParseException e1) {
            try {
                query = parser.parse(QueryParser.escape(request.getKeyword()));
            } catch (ParseException e2) {
                throw new RuntimeException(e2);
            }
        }
        junction.must(query);
    }
    if (StringUtils.hasText(request.getLanguage())) {
        junction.must(qb.keyword().onField("language").matching(request.getLanguage()).createQuery());
    }

    if (request.getStatus() != null) {
        junction.must(qb.keyword().onField("status").matching(request.getStatus()).createQuery());
    }

    if (!CollectionUtils.isEmpty(request.getCategoryIds())) {
        BooleanJunction<BooleanJunction> subJunction = qb.bool();
        for (long categoryId : request.getCategoryIds()) {
            subJunction.should(qb.keyword().onField("categories.id").matching(categoryId).createQuery());
        }
        junction.must(subJunction.createQuery());
    }
    if (!CollectionUtils.isEmpty(request.getCategoryCodes())) {
        BooleanJunction<BooleanJunction> subJunction = qb.bool();
        for (String categoryCode : request.getCategoryCodes()) {
            subJunction.should(qb.keyword().onField("categories.code").matching(categoryCode).createQuery());
        }
        junction.must(subJunction.createQuery());
    }

    if (!CollectionUtils.isEmpty(request.getTagIds())) {
        BooleanJunction<BooleanJunction> subJunction = qb.bool();
        for (long tagId : request.getTagIds()) {
            subJunction.should(qb.keyword().onField("tags.id").matching(tagId).createQuery());
        }
        junction.must(subJunction.createQuery());
    }
    if (!CollectionUtils.isEmpty(request.getTagNames())) {
        BooleanJunction<BooleanJunction> subJunction = qb.bool();
        for (String tagName : request.getTagNames()) {
            subJunction.should(qb.phrase().onField("tags.name").sentence(tagName).createQuery());
        }
        junction.must(subJunction.createQuery());
    }

    if (!CollectionUtils.isEmpty(request.getCustomFields())) {
        javax.persistence.Query query = entityManager.createQuery(
                "from CustomField where language = :language and code in (:codes)", CustomField.class);
        query.setParameter("language", request.getLanguage()).setParameter("codes",
                request.getCustomFields().keySet());
        List<CustomField> customFields = query.getResultList();

        if (!CollectionUtils.isEmpty(customFields)) {
            Map<String, CustomField> customFieldMap = customFields.stream()
                    .collect(Collectors.toMap(CustomField::getCode, Function.identity()));

            BooleanJunction<BooleanJunction> subJunction = qb.bool();
            for (String key : request.getCustomFields().keySet()) {
                List<Object> values = (List<Object>) request.getCustomFields().get(key);
                CustomField target = customFieldMap.get(key);
                BooleanJunction<BooleanJunction> customFieldJunction = qb.bool();
                switch (target.getFieldType()) {
                case TEXT:
                case TEXTAREA:
                case HTML:
                    for (Object value : values) {
                        customFieldJunction.must(qb.keyword().onField("customFieldValues." + key)
                                .ignoreFieldBridge().matching(value.toString()).createQuery());
                    }
                    break;
                default:
                    for (Object value : values) {
                        customFieldJunction.must(qb.phrase().onField("customFieldValues." + key)
                                .ignoreFieldBridge().sentence(value.toString()).createQuery());
                    }
                }
                subJunction.must(customFieldJunction.createQuery());
            }
            junction.must(subJunction.createQuery());
        }
    }

    if (request.getAuthorId() != null) {
        junction.must(qb.keyword().onField("author.id").matching(request.getAuthorId()).createQuery());
    }

    Query searchQuery = junction.createQuery();

    Sort sort = new Sort(new SortField("sortLft", SortField.Type.INT));

    FullTextQuery persistenceQuery = fullTextEntityManager.createFullTextQuery(searchQuery, Page.class)
            .setCriteriaQuery(criteria).setSort(sort);
    if (pageable != null) {
        persistenceQuery.setFirstResult(pageable.getOffset());
        persistenceQuery.setMaxResults(pageable.getPageSize());
    }
    return persistenceQuery;
}

From source file:com.netflix.imfutility.itunes.audio.ChannelsMapper.java

/**
 * Gets map of languages and its channels from descriptor which can be associated with alternative audios.
 *
 * @param mainLang language of main audio (excluded from scan)
 * @return list of channels/*  w w w .  jav  a2s  .co m*/
 */
public Map<String, List<Pair<SequenceUUID, Integer>>> guessAlternatives(String mainLang) {
    List<SoundfieldGroupInfo> stereo = findInputForChannelGroup(STEREO_LAYOUT);

    return stereo.stream().filter(g -> getLanguage(g) != null)
            .filter(g -> !LocaleHelper.equalsByDefaultRegion(mainLang, getLanguage(g)))
            .collect(Collectors.toMap(this::getLanguage, g -> getChannelsByLayout(STEREO_LAYOUT, g)));
}

From source file:mtsar.resources.WorkerResource.java

@PATCH
@Path("{worker}/answers")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public Response postAnswers(@Context Validator validator, @Context UriInfo uriInfo,
        @PathParam("worker") Integer id,
        @FormParam("type") @DefaultValue(AnswerDAO.ANSWER_TYPE_DEFAULT) String type,
        @FormParam("tags") List<String> tags, @FormParam("datetime") String datetimeParam,
        MultivaluedMap<String, String> params) {
    final Timestamp datetime = (datetimeParam == null) ? DateTimeUtils.now() : Timestamp.valueOf(datetimeParam);
    final Worker worker = fetchWorker(id);
    final Map<String, List<String>> nested = ParamsUtils.nested(params, "answers");

    final Map<Answer, Set<ConstraintViolation<Object>>> answers = nested.entrySet().stream().map(entry -> {
        final Integer taskId = Integer.valueOf(entry.getKey());
        final Task task = fetchTask(taskId);

        final Answer answer = new Answer.Builder().setStage(stage.getId()).addAllTags(tags).setType(type)
                .setTaskId(task.getId()).setWorkerId(worker.getId()).addAllAnswers(entry.getValue())
                .setDateTime(datetime).build();

        final Set<ConstraintViolation<Object>> violations = ParamsUtils.validate(validator,
                new TaskAnswerValidation.Builder().setTask(task).setAnswer(answer).build(),
                new AnswerValidation.Builder().setAnswer(answer).setAnswerDAO(answerDAO).build());

        return Pair.of(answer, violations);
    }).collect(Collectors.toMap(Pair::getLeft, Pair::getRight));

    final Set<ConstraintViolation<Object>> violations = answers.values().stream().flatMap(Set::stream)
            .collect(Collectors.toSet());
    if (!violations.isEmpty())
        throw new ConstraintViolationException(violations);

    final List<Answer> inserted = AnswerDAO.insert(answerDAO, answers.keySet());
    return Response.ok(inserted).build();
}