Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:it.greenvulcano.gvesb.debug.DebuggerServlet.java

/**
 * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest,
 *      javax.servlet.http.HttpServletResponse)
 */// ww w .  j  a  va2s.  c  om
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    try {

        StringBuffer sb = new StringBuffer();
        dump(request, sb);
        LOG.debug(sb.toString());

        DebugCommand debugOperation = Optional.ofNullable(request.getParameter("debugOperation"))
                .map(DebugCommand::valueOf).orElseThrow(IllegalArgumentException::new);

        PrintWriter writer = response.getWriter();

        Map<DebugKey, String> params = request.getParameterMap().keySet().stream().filter(KEYS::contains)
                .map(DebugKey::valueOf)
                .collect(Collectors.toMap(Function.identity(), k -> request.getParameter(k.name())));

        DebuggerObject dObj = gvDebugger.processCommand(debugOperation, params);

        if (dObj == null) {
            dObj = DebuggerObject.FAIL_DEBUGGER_OBJECT;
        }
        String debugOperationResponse = dObj.toXML();

        LOG.debug("Debug operation response: " + debugOperationResponse);
        writer.println(debugOperationResponse);
    } catch (IllegalArgumentException e) {
        LOG.error("Fail to process debug operation: missing or invalid value for parameter debugOperation");
        response.getWriter().println("Missing or invalid value for parameter debugOperation");
    } catch (Exception e) {
        LOG.error("Fail to process debug operation", e);
        throw new ServletException(e);
    }
}

From source file:com.adeptj.modules.data.jpa.core.JpaProperties.java

public static Map<String, Object> from(EntityManagerFactoryConfig config) {
    Map<String, Object> jpaProperties = new HashMap<>();
    jpaProperties.put(DDL_GENERATION, config.ddlGeneration());
    jpaProperties.put(DDL_GENERATION_MODE, config.ddlGenerationOutputMode());
    // DEPLOY_ON_STARTUP must be a string value
    jpaProperties.put(DEPLOY_ON_STARTUP, Boolean.toString(config.deployOnStartup()));
    jpaProperties.put(LOGGING_LEVEL, config.loggingLevel());
    jpaProperties.put(TRANSACTION_TYPE, config.persistenceUnitTransactionType());
    jpaProperties.put(ECLIPSELINK_PERSISTENCE_XML, config.persistenceXmlLocation());
    jpaProperties.put(SHARED_CACHE_MODE, config.sharedCacheMode());
    jpaProperties.put(VALIDATION_MODE, config.validationMode());
    jpaProperties.put(PERSISTENCE_PROVIDER, config.persistenceProviderClassName());
    if (config.useExceptionHandler()) {
        jpaProperties.put(EXCEPTION_HANDLER_CLASS, JpaExceptionHandler.class.getName());
    }//from  w  ww.ja  v  a 2 s .com
    // Extra properties are in [key=value] format.
    jpaProperties.putAll(Stream.of(config.jpaProperties()).filter(StringUtils::isNotEmpty)
            .map(row -> row.split(EQ)).filter(mapping -> ArrayUtils.getLength(mapping) == 2)
            .collect(Collectors.toMap(elem -> elem[0], elem -> elem[1])));
    return jpaProperties;
}

From source file:com.blackducksoftware.integration.hub.detect.detector.pip.PipenvGraphParser.java

public PipParseResult parse(final String projectName, final String projectVersionName,
        final List<String> pipFreezeOutput, final List<String> pipenvGraphOutput, final String sourcePath) {
    final MutableMapDependencyGraph dependencyGraph = new MutableMapDependencyGraph();
    final Stack<Dependency> dependencyStack = new Stack<>();

    final Map<String, String[]> pipFreezeMap = pipFreezeOutput.stream()
            .map(line -> line.split(TOP_LEVEL_SEPARATOR)).filter(splitLine -> splitLine.length == 2)
            .collect(Collectors.toMap(splitLine -> splitLine[0].trim().toLowerCase(), splitLine -> splitLine));

    int lastLevel = -1;
    for (final String line : pipenvGraphOutput) {
        final int currentLevel = getLevel(line);
        final Optional<Dependency> parsedDependency = getDependencyFromLine(pipFreezeMap, line);

        if (!parsedDependency.isPresent()) {
            continue;
        }/* w ww .  ja v a2s  . com*/

        final Dependency dependency = parsedDependency.get();

        if (currentLevel == lastLevel) {
            dependencyStack.pop();
        } else {
            for (; lastLevel >= currentLevel; lastLevel--) {
                dependencyStack.pop();
            }
        }

        if (dependencyStack.size() > 0) {
            dependencyGraph.addChildWithParent(dependency, dependencyStack.peek());
        } else {
            dependencyGraph.addChildrenToRoot(dependency);
        }

        lastLevel = currentLevel;
        dependencyStack.push(dependency);
    }

    if (!dependencyGraph.getRootDependencyExternalIds().isEmpty()) {
        final ExternalId projectExternalId = externalIdFactory.createNameVersionExternalId(Forge.PYPI,
                projectName, projectVersionName);
        final DetectCodeLocation codeLocation = new DetectCodeLocation.Builder(DetectCodeLocationType.PIP,
                sourcePath, projectExternalId, dependencyGraph).build();
        return new PipParseResult(projectName, projectVersionName, codeLocation);
    } else {
        return null;
    }
}

From source file:de.hybris.platform.chinesepspwechatpay.controllers.misc.ChineseWeChatPayResponseController.java

@RequestMapping(value = "/paymentresponse/notify", method = RequestMethod.POST)
public void handlePaymentResponse(final HttpServletRequest request, final HttpServletResponse response)
        throws CMSItemNotFoundException, IOException {
    final String requestBody = getPostRequestBody(request);
    if (requestBody.isEmpty()) {
        LOG.error("Notify body is empty");
    } else {//from w w  w .j  a v a 2  s  . com
        final Map<String, String> unifyResponseMap;
        try {
            final Node notifyXml = new XmlParser().parseText(requestBody);
            unifyResponseMap = (Map<String, String>) notifyXml.children().stream().collect(
                    Collectors.toMap(k -> ((Node) k).name(), k -> ((Node) k).children().get(0).toString()));
            final SignVerificationProcessor signVerificationProcessor = new SignVerificationProcessor(
                    weChatPayConfiguration, unifyResponseMap);
            if (!signVerificationProcessor.process().booleanValue()) {
                LOG.warn("Invalid notify from WeChatPay");
                response.setContentType("text/xml");
                response.getWriter().write(XSSEncoder.encodeXML("FAIL"));
            }
            final WeChatRawDirectPayNotification weChatPayNotification = new WeChatRawDirectPayNotification();
            final Map<String, String> camelCaseMap = WeChatPayUtils.convertKey2CamelCase(unifyResponseMap);
            BeanUtils.populate(weChatPayNotification, camelCaseMap);
            weChatPayNotificationService.handleWeChatPayPaymentResponse(weChatPayNotification);
            response.setContentType("text/xml");
            response.getWriter().write(XSSEncoder.encodeXML("SUCCESS"));
        } catch (IOException | SAXException | ParserConfigurationException | IllegalAccessException
                | InvocationTargetException e) {
            LOG.error("Problem in handling WeChatPay's notify message", e);
        }

    }
}

From source file:delfos.rs.collaborativefiltering.als.ALSRecommender.java

@Override
public MatrixFactorizationModel buildRecommendationModel(DatasetLoader<? extends Rating> datasetLoader)
        throws CannotLoadRatingsDataset, CannotLoadContentDataset, CannotLoadUsersDataset {

    int numIter = 1;
    int dimension = 5;
    long seed = getSeedValue();

    final double lambda = 0.1;

    Bias bias = new Bias(datasetLoader);

    Map<User, List<Double>> randomUserVectors = datasetLoader.getUsersDataset().parallelStream()
            .collect(Collectors.toMap(user -> user, user -> {
                Random random = new Random(seed + user.getId());
                List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed()
                        .collect(Collectors.toList());
                return vector;
            }));/*from   w  w w  .ja va  2 s.  co m*/

    Map<Item, List<Double>> randomItemVectors = datasetLoader.getContentDataset().parallelStream()
            .collect(Collectors.toMap(item -> item, item -> {
                Random random = new Random(seed + item.getId());
                List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed()
                        .collect(Collectors.toList());
                return vector;
            }));

    MatrixFactorizationModel model = new MatrixFactorizationModel(randomUserVectors, randomItemVectors, bias);

    for (int iterationIndex = 0; iterationIndex < numIter; iterationIndex++) {

        final int iteration = iterationIndex;
        final MatrixFactorizationModel initialModel = model;

        double error = getModelError(bias, datasetLoader, initialModel);

        System.out.println("Error in iteration " + iterationIndex + " is " + error);

        ProgressChangedController userProgress = new ProgressChangedController(
                getAlias() + " for dataset " + datasetLoader.getAlias() + " userOptimization iteration "
                        + iteration,
                datasetLoader.getUsersDataset().size(), this::fireBuildingProgressChangedEvent);

        Map<User, List<Double>> trainedUserVectors = datasetLoader.getUsersDataset().parallelStream()
                .collect(Collectors.toMap(user -> user, (User user) -> {
                    Map<Integer, ? extends Rating> userRatings = datasetLoader.getRatingsDataset()
                            .getUserRatingsRated(user.getId());

                    ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> {
                        List<Double> userVector = Arrays.stream(pu).boxed().collect(Collectors.toList());
                        double predictionError = userRatings.values().parallelStream()
                                .map(bias.getBiasApplier()).map(rating -> {
                                    List<Double> itemVector = initialModel.getItemFeatures(rating.getItem());
                                    double prediction = IntStream.range(0, userVector.size())
                                            .mapToDouble(index -> userVector.get(index) * itemVector.get(index))
                                            .sum();

                                    double value = rating.getRatingValue().doubleValue();

                                    double errorThisRating = prediction - value;

                                    return errorThisRating;
                                }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum();

                        double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum();
                        double objectiveFunctionValue = predictionError + lambda * penalty;
                        return objectiveFunctionValue;
                    });

                    SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0);

                    double[] initialGuess = new Random(seed + user.getId()).doubles(-10, 10).limit(dimension)
                            .toArray();

                    List<Double> initialGuessList = Arrays.stream(initialGuess).boxed()
                            .collect(Collectors.toList());

                    double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess);

                    try {
                        PointValuePair optimize = simplexOptimizer.optimize(
                                new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess),
                                objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER);
                        double optimizedPenalty = optimize.getValue();
                        userProgress.setTaskFinished();

                        List<Double> optimizedUserVector = Arrays.stream(optimize.getPoint()).boxed()
                                .collect(Collectors.toList());
                        return optimizedUserVector;
                    } catch (Exception ex) {
                        System.out.println("Vector cannot be optimized for user " + user + " (numRatings="
                                + userRatings.size() + ")");
                        return initialModel.getUserFeatures(user);
                    }
                }));

        ProgressChangedController itemProgress = new ProgressChangedController(
                getAlias() + " for dataset " + datasetLoader.getAlias() + " item optimization iteration "
                        + iteration,
                datasetLoader.getContentDataset().size(), this::fireBuildingProgressChangedEvent);

        Map<Item, List<Double>> trainedItemVectors = datasetLoader.getContentDataset().parallelStream()
                .collect(Collectors.toMap(item -> item, item -> {
                    Map<Integer, ? extends Rating> itemRatings = datasetLoader.getRatingsDataset()
                            .getItemRatingsRated(item.getId());

                    ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> {
                        List<Double> itemVector = Arrays.stream(pu).boxed().collect(Collectors.toList());
                        double predictionError = itemRatings.values().parallelStream()
                                .map(bias.getBiasApplier()).map(rating -> {
                                    List<Double> userVector = initialModel.getUserFeatures(rating.getUser());
                                    double prediction = IntStream.range(0, userVector.size())
                                            .mapToDouble(index -> userVector.get(index) * itemVector.get(index))
                                            .sum();

                                    double value = rating.getRatingValue().doubleValue();

                                    double errorThisRating = prediction - value;

                                    return errorThisRating;
                                }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum();

                        double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum();
                        double objectiveFunctionValue = predictionError + lambda * penalty;
                        return objectiveFunctionValue;
                    });

                    SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0);

                    double[] initialGuess = new Random(seed + item.getId()).doubles(-10, 10).limit(dimension)
                            .toArray();

                    List<Double> initialGuessList = Arrays.stream(initialGuess).boxed()
                            .collect(Collectors.toList());

                    double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess);

                    try {
                        PointValuePair optimize = simplexOptimizer.optimize(
                                new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess),
                                objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER);
                        double optimizedPenalty = optimize.getValue();
                        itemProgress.setTaskFinished();

                        List<Double> optimizedVector = Arrays.stream(optimize.getPoint()).boxed()
                                .collect(Collectors.toList());

                        return optimizedVector;
                    } catch (Exception ex) {
                        System.out.println("Vector cannot be optimized " + item
                                + " cannot be optimized (numRatings=" + itemRatings.size() + ")");
                        return initialModel.getItemFeatures(item);
                    }
                }));

        model = new MatrixFactorizationModel(trainedUserVectors, trainedItemVectors, bias);

    }
    return model;

}

From source file:com.uber.hoodie.io.HoodieCleanHelper.java

public HoodieCleanHelper(HoodieTable<T> hoodieTable, HoodieWriteConfig config) {
    this.hoodieTable = hoodieTable;
    this.fileSystemView = hoodieTable.getCompletedFileSystemView();
    this.commitTimeline = hoodieTable.getCompletedCommitTimeline();
    this.config = config;
    this.fileIdToPendingCompactionOperations = ((HoodieTableFileSystemView) hoodieTable.getRTFileSystemView())
            .getFileIdToPendingCompaction().entrySet().stream()
            .map(entry -> Pair.of(entry.getKey(), entry.getValue().getValue()))
            .collect(Collectors.toMap(Pair::getKey, Pair::getValue));
}

From source file:se.uu.it.cs.recsys.ruleminer.impl.FPGrowthImpl.java

private Map<Set<Integer>, Integer> miningWithFPGrowth(FPTree fpTree, Set<Integer> suffixPattern) {

    Map<Set<Integer>, Integer> frequentPatternFromSinglePrefixPath = new HashMap<>();
    FPTree branchingTree = fpTree;/*  ww w . j  ava2s  .  com*/

    if (fpTree.hasSinglePrefixPath()) {
        List<Item> singlePrefixPath = fpTree.getSinglePrefixPathInTopDownOrder();
        //            LOGGER.debug("Single prefix path: {}", singlePrefixPath);

        Map<Set<Integer>, Integer> frequentPatternWithinSinglePrefixPath = getFrequentPatternFromSinglePrefixPath(
                singlePrefixPath);

        frequentPatternFromSinglePrefixPath = frequentPatternWithinSinglePrefixPath.entrySet().stream()
                .collect(Collectors.toMap(entry -> {
                    Set<Integer> existingPattern = new HashSet<>(entry.getKey());
                    existingPattern.addAll(suffixPattern);
                    return existingPattern;
                }, entry -> entry.getValue()));

        branchingTree = fpTree.getBranchingTree();

        if (branchingTree == null) {
            return frequentPatternFromSinglePrefixPath;
        }
    }

    Map<Set<Integer>, Integer> frequentPatternFromBranchingTree = new HashMap<>();

    List<HeaderTableItem> headerList = branchingTree.getHeaderTable();
    ListIterator<HeaderTableItem> itr = headerList.listIterator(headerList.size());

    while (itr.hasPrevious()) {
        HeaderTableItem visitingItem = itr.previous();

        Set<Integer> newPattern = new HashSet<>(suffixPattern);
        newPattern.add(visitingItem.getItem().getId());

        frequentPatternFromBranchingTree.put(newPattern, visitingItem.getItem().getCount());
        //            LOGGER.debug("Adding new pattern: {}, count: {}", newPattern, visitingItem.getItem().getCount());

        Map<List<Integer>, Integer> patternBase = FPTreeUtil.getPatternBase(visitingItem);
        //            LOGGER.debug("Pattern base for item {} is: {}", visitingItem.getItem(), patternBase);

        FPTree conditionalTree = FPTreeBuilder.buildConditionalFPTree(patternBase, this.minSupport);

        if (conditionalTree != null && !conditionalTree.getRoot().getChildren().isEmpty()) {
            frequentPatternFromBranchingTree.putAll(miningWithFPGrowth(conditionalTree, newPattern));
        }
    }

    return consolidatePatterns(frequentPatternFromSinglePrefixPath, frequentPatternFromBranchingTree);
}

From source file:net.hamnaberg.json.Property.java

public Map<String, Value> getObject() {
    return unmodifiableMap(Optional.ofNullable(delegate.get("object")).filter(JsonNode::isObject)
            .map(object -> stream(spliteratorUnknownSize(object.fields(), Spliterator.ORDERED), false).collect(
                    Collectors.toMap(Map.Entry::getKey, entry -> ValueFactory.createValue(entry.getValue()))))
            .orElse(Collections.<String, Value>emptyMap()));
}

From source file:com.thinkbiganalytics.ingest.TableRegisterSupport.java

/**
 * copy the columnSpecs and reset the datatypes to match that of the feed column specs
 * @param feedColumnSpecs//from  ww  w.j a  v a  2s.  co m
 * @param columnSpecs
 * @return
 */
protected ColumnSpec[] adjustInvalidColumnSpec(ColumnSpec[] feedColumnSpecs, ColumnSpec[] columnSpecs) {
    //find the source data types from the _feed table that match these columns and replace the data types
    Map<String, ColumnSpec> feedColumnSpecMap = Arrays.asList(feedColumnSpecs).stream()
            .collect(Collectors.toMap(ColumnSpec::getName, Function.identity()));
    List<ColumnSpec> invalidColumnSpecs = Arrays.asList(columnSpecs).stream().map(c -> {
        ColumnSpec copy = new ColumnSpec(c);
        if (StringUtils.isNotBlank(copy.getOtherColumnName())
                && feedColumnSpecMap.containsKey(copy.getOtherColumnName())) {
            ColumnSpec feedSpec = feedColumnSpecMap.get(copy.getOtherColumnName());
            copy.setDataType(feedSpec.getDataType());
        }
        return copy;
    }).collect(Collectors.toList());
    return invalidColumnSpecs.toArray(new ColumnSpec[invalidColumnSpecs.size()]);
}

From source file:org.ow2.proactive.connector.iaas.cache.InstanceCache.java

private Map<String, Set<Instance>> cloneCreatedInstances() {
    return createdInstances.entrySet().stream()
            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}