Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:org.apache.ambari.view.web.service.PackageScannerServiceImpl.java

private void updateApplication(Application application, Registry registry) {
    Optional<Package> packageOptional = packageRepository.findByName(application.getName());

    Package pack = packageOptional.orElseGet(() -> {
        Package pac = new Package();
        pac.setName(application.getName());
        pac.setRegistry(registry);/*  w  ww  .  j  a  v  a  2  s . c om*/
        packageRepository.save(pac);
        return pac;
    });

    Map<String, Application.Version> versionMap = application.getVersions().stream()
            .collect(Collectors.toMap(Application.Version::getVersion, Function.identity()));

    Set<String> versionsFromRepo = pack.getVersions().stream().map(PackageVersion::getVersion)
            .collect(Collectors.toSet());
    versionMap.keySet().forEach(x -> {
        if (!versionsFromRepo.contains(x)) {
            PackageVersion packageVersion = new PackageVersion();
            Application.Version version = versionMap.get(x);
            packageVersion.setVersion(version.getVersion());
            packageVersion.setDeploymentDefinition(version.getApplicationConfig());
            packageVersion.setViewPackage(pack);
            pack.getVersions().add(packageVersion);
            versionRepository.save(packageVersion);
        }
    });

}

From source file:io.pravega.client.stream.mock.MockStreamManager.java

public Position getInitialPosition(String stream) {
    return new PositionImpl(controller.getSegmentsForStream(new StreamImpl(scope, stream)).stream()
            .collect(Collectors.toMap(segment -> segment, segment -> 0L)));
}

From source file:com.ikanow.aleph2.logging.utils.LoggingUtils.java

/**
 * Creates a map of subsystem -> logging level for quick lookups.  Grabs the overrides from
 * bucket.management_schema().logging_schema().log_level_overrides()
 * /*www  .  j  a  v a2  s .  com*/
 * Returns an empty list if none exist there
 * 
 * @param bucket
 * @param default_system_level 
 * @return
 */
public static ImmutableMap<String, Level> getBucketLoggingThresholds(final DataBucketBean bucket,
        final Level default_system_level) {
    //if overrides are set, create a map with them and the default
    if (bucket.management_schema() != null && bucket.management_schema().logging_schema() != null) {
        return new ImmutableMap.Builder<String, Level>()
                .put(DEFAULT_LEVEL_KEY,
                        Optional.ofNullable(bucket.management_schema().logging_schema().log_level())
                                .map(l -> Level.valueOf(l)).orElse(default_system_level))
                .putAll(Optional.ofNullable(bucket.management_schema().logging_schema().log_level_overrides())
                        .orElse(new HashMap<String, String>()).entrySet().stream()
                        .collect(Collectors.toMap(e -> e.getKey(), e -> Level.valueOf(e.getValue())))) //convert String Level to log4j.Level
                .build();
    } else {
        //otherwise just return an empty map
        return new ImmutableMap.Builder<String, Level>().build();
    }
}

From source file:cat.calidos.morfeu.webapp.GenericHttpServlet.java

protected Map<String, String> normaliseParams(Map<String, String[]> parameterMap) {
    return parameterMap.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()[0]));
}

From source file:fr.paris.lutece.portal.web.xsl.XslExportJspBeanTest.java

public void testDoCreateXslExport() throws AccessDeniedException, IOException {
    MockHttpServletRequest request = new MockHttpServletRequest();
    AdminUser user = new AdminUser();
    user.setRoles(//  w  w  w. j ava2s .co  m
            AdminRoleHome.findAll().stream().collect(Collectors.toMap(AdminRole::getKey, Function.identity())));
    Utils.registerAdminUserWithRigth(request, user, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT);
    String randomName = getRandomName();
    Map<String, String[]> parameters = new HashMap<>();
    parameters.put("title", new String[] { randomName });
    parameters.put("description", new String[] { randomName });
    parameters.put("extension", new String[] { randomName });
    parameters.put(SecurityTokenService.PARAMETER_TOKEN, new String[] {
            SecurityTokenService.getInstance().getToken(request, "admin/xsl/create_xsl_export.html") });
    Map<String, List<FileItem>> multipartFiles = new HashMap<>();
    List<FileItem> fileItems = new ArrayList<>();
    FileItem item = new DiskFileItemFactory().createItem("id_file", "", false, "xsl");
    item.getOutputStream().write("<?xml version='1.0'?><a/>".getBytes());
    fileItems.add(item);
    multipartFiles.put("id_file", fileItems);

    _instance.init(request, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT);
    try {
        _instance.doCreateXslExport(new MultipartHttpServletRequest(request, multipartFiles, parameters));
        assertTrue(XslExportHome.getList().stream().anyMatch(e -> randomName.equals(e.getTitle())
                && randomName.equals(e.getDescription()) && randomName.equals(e.getExtension())));
    } finally {
        XslExportHome.getList().stream().filter(e -> randomName.equals(e.getTitle()))
                .forEach(e -> XslExportHome.remove(e.getIdXslExport()));
    }
}

From source file:com.firewalld.sentimentanalysis.IDSentiWordNet.java

private Map<String, Double> createDictionary() throws IOException {
    Map<String, Double> dict = IOUtils
            .readLines(getClass().getClassLoader().getResourceAsStream(firewallConf.get(SWN_FILE)))
            .parallelStream()/*from w ww  .j av  a2 s.co m*/
            /* If it's a comment, skip this line */
            .filter(line -> !line.trim().startsWith("#")).flatMap(line -> {
                String[] data = line.split("\t");
                String wordTypeMarker = data[0];

                // Example line:
                // POS ID PosS NegS SynsetTerm#sensenumber Desc
                // a 00009618 0.5 0.25 spartan#4 austere#3 ascetical#2 ascetic#2 practicing great self-denial;...etc
                // Is it a valid line? Otherwise, through exception.
                if (data.length != 6) {
                    throw new IllegalArgumentException(
                            String.format("Incorrect tabulation format in file, line: %s", line));
                }

                // Calculate synset score as score = PosS - NegS
                Double synsetScore = Double.parseDouble(data[2]) - Double.parseDouble(data[3]);

                // Get all Synset terms
                String[] synTermsSplit = data[4].split(" ");

                // Go through all terms of current synset.
                Stream<Tuple2<String, Tuple2<Double, Double>>> synSets = Arrays.asList(synTermsSplit)
                        .parallelStream().map(synTermSplit -> {
                            // Get synterm and synterm rank
                            String[] synTermAndRank = synTermSplit.split("#");
                            String synTerm = synTermAndRank[0] + "#" + wordTypeMarker;

                            double synTermRank = Double.parseDouble(synTermAndRank[1]);
                            // What we get here is a (term, (rank, score))
                            return new Tuple2<>(synTerm, new Tuple2<>(synTermRank, synsetScore));
                        });

                return synSets;
            })
            // What we get here is a map of the type:
            // term -> {score of synset#1, score of synset#2...}
            .collect(Collectors.groupingBy(synSet -> synSet._1,
                    Collectors.mapping(synSet -> synSet._2, Collectors.toList())))
            .entrySet().parallelStream().map(synSet -> {
                String word = synSet.getKey();
                List<Tuple2<Double, Double>> synSetScoreList = synSet.getValue();

                // Calculate weighted average. Weigh the synsets according to
                // their rank.
                // Score= 1/2*first + 1/3*second + 1/4*third ..... etc.
                // Sum = 1/1 + 1/2 + 1/3 ...
                Tuple2<Double, Double> scoreSum = synSetScoreList.parallelStream()
                        .reduce(new Tuple2<>(0.0, 0.0), (s1, s2) -> new Tuple2<>(
                                ((s1._1 == 0.0) ? 0.0 : s1._2 / s1._1) + ((s2._1 == 0.0) ? 0.0 : s2._2 / s2._1),
                                ((s1._1 == 0.0) ? 0.0 : 1 / s1._1) + ((s2._1 == 0.0) ? 0.0 : 1 / s2._1)));

                double score = scoreSum._1 / scoreSum._2;

                return new Tuple2<>(word, score);
            }).collect(Collectors.toMap(synSet -> synSet._1, synSet -> synSet._2));

    return dict;
}

From source file:org.openlmis.fulfillment.web.util.BasicOrderDtoBuilder.java

private Map<UUID, FacilityDto> getFacilities(List<Order> orders) {
    Set<UUID> facilityIds = new HashSet<>();
    for (Order order : orders) {
        facilityIds.add(order.getFacilityId());
        facilityIds.add(order.getSupplyingFacilityId());
        facilityIds.add(order.getReceivingFacilityId());
        facilityIds.add(order.getRequestingFacilityId());
    }//from   ww w  .j a  va  2s.  c o  m
    return facilityReferenceDataService.findByIds(facilityIds).stream()
            .collect(Collectors.toMap(BaseDto::getId, Function.identity()));
}

From source file:com.diversityarrays.kdxplore.heatmap.HeatMapModelData.java

public HeatMapModelData(CurationContext ctx, ValueRetriever<?> xvr, ValueRetriever<?> yvr,
        TraitInstanceValueRetriever<?> tivr) {
    context = ctx;/*from   w  w  w . j a  va2  s . c  o m*/
    xValueRetriever = xvr;
    yValueRetriever = yvr;
    traitInstanceValueRetriever = tivr;

    zTraitInstance = traitInstanceValueRetriever.getTraitInstance();

    String traitInstanceName = context.makeTraitInstanceName(zTraitInstance);

    columnRowLabelOffset = new Point(xValueRetriever.getAxisZeroValue(), yValueRetriever.getAxisZeroValue());

    Class<?> valueClass = traitInstanceValueRetriever.getComparableValueClass();

    Dimension size = new Dimension(xValueRetriever.getAxisValueCount(), yValueRetriever.getAxisValueCount());

    model = new DefaultHeatMapModel<PlotOrSpecimen>(size, traitInstanceName,
            traitInstanceValueRetriever.getDisplayName(), yValueRetriever.getDisplayName(),
            xValueRetriever.getDisplayName());

    if (Double.class == valueClass) {

        DoubleGenerator generator = new DoubleGenerator();

        ValueInfo<Double> valueInfo = generator.generate(context, xValueRetriever, yValueRetriever,
                traitInstanceValueRetriever, plotPointsByMark, columnRowLabelOffset);

        Map<Point, List<PlotOrSpecimen>> psListByPoint = valueInfo.plotSpecimensByPoint;

        Function<Point, PlotOrSpecimen> psArrayForPoint = new Function<Point, PlotOrSpecimen>() {
            @Override
            public PlotOrSpecimen apply(Point pt) {
                List<PlotOrSpecimen> list = psListByPoint.get(pt);
                return Check.isEmpty(list) ? null : list.get(0);
                //                    return list.toArray(new PlotOrSpecimen[list.size()]);
            }
        };

        Map<Point, PlotOrSpecimen> contentByPoint = psListByPoint.keySet().stream()
                .collect(Collectors.toMap(Function.identity(), psArrayForPoint));

        Map<PlotOrSpecimen, Double> valueByContent = new HashMap<>();
        for (Point pt : contentByPoint.keySet()) {
            PlotOrSpecimen pos = contentByPoint.get(pt);
            Double value = valueInfo.valuesByPoint.get(pt);
            if (value != null) {
                valueByContent.put(pos, value);
            }
        }

        Function<Double, String> formatter = new Function<Double, String>() {
            Map<Double, String> displayByValue = valueInfo.displayByValue;

            @Override
            public String apply(Double t) {
                return displayByValue.get(t);
            }
        };
        valueModel = new DoubleValueModel<>(valueByContent, formatter);

        model.setCellContent(contentByPoint);

        missingOrBad = valueInfo.missingOrBad;
        suppressed = valueInfo.suppressed;
    } else if (Integer.class == valueClass || traitInstanceValueRetriever.supportsGetAxisValue()) {

        IntegerGenerator generator = new IntegerGenerator();

        ValueInfo<Integer> valueInfo = generator.generate(context, xValueRetriever, yValueRetriever,
                traitInstanceValueRetriever, plotPointsByMark, columnRowLabelOffset);

        Map<Point, List<PlotOrSpecimen>> psListByPoint = valueInfo.plotSpecimensByPoint;

        Function<Point, PlotOrSpecimen> psArrayForPoint = new Function<Point, PlotOrSpecimen>() {
            @Override
            public PlotOrSpecimen apply(Point pt) {
                List<PlotOrSpecimen> list = psListByPoint.get(pt);
                return Check.isEmpty(list) ? null : list.get(0);
                //                    return list.toArray(new PlotOrSpecimen[list.size()]);
            }
        };
        // FIXME for Specimen, we DO want the multiple results
        Map<Point, PlotOrSpecimen> contentByPoint = psListByPoint.keySet().stream()
                .collect(Collectors.toMap(Function.identity(), psArrayForPoint));

        Map<PlotOrSpecimen, Integer> valueByContent = new HashMap<>();
        for (Point pt : contentByPoint.keySet()) {
            PlotOrSpecimen pos = contentByPoint.get(pt);
            Integer value = valueInfo.valuesByPoint.get(pt);
            if (value != null) {
                valueByContent.put(pos, value);
            }
        }

        model.setCellContent(contentByPoint);

        Function<Integer, String> formatter = new Function<Integer, String>() {
            Map<Integer, String> displayByValue = valueInfo.displayByValue;

            @Override
            public String apply(Integer t) {
                return displayByValue.get(t);
            }
        };
        valueModel = new IntegerValueModel<>(valueByContent, formatter);

        missingOrBad = valueInfo.missingOrBad;
        suppressed = valueInfo.suppressed;
    } else {
        throw new RuntimeException(
                "Unsupported Value Retriever: " + traitInstanceValueRetriever.getDisplayName()); //$NON-NLS-1$
    }

}

From source file:io.magentys.maven.DonutMojo.java

private scala.collection.mutable.Map<String, String> customAttributes() {
    return JavaConverters
            .mapAsScalaMapConverter(/*from   w w  w. ja va 2s.  c  o m*/
                    customAttributes.stream().collect(Collectors.toMap(c -> c.getName(), c -> c.getValue())))
            .asScala();
}

From source file:com.epam.reportportal.gateway.CompositeInfoEndpoint.java

@RequestMapping(value = "/composite/health", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@ResponseBody//w w w. j a v a 2  s.  c om
public Map<String, ?> composeHealth() {
    return eurekaClient.getApplications().getRegisteredApplications().stream()
            .flatMap(app -> app.getInstances().stream())
            .collect(Collectors.toMap(InstanceInfo::getAppName, InstanceInfo::getStatus));

}