Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:org.talend.dataprep.api.dataset.row.DataSetRow.java

/**
 * Constructor with values.//  ww w  . j av a2  s .c  o m
 *
 * @param values the row value.
 */
public DataSetRow(RowMetadata rowMetadata, Map<String, ?> values) {
    this(rowMetadata);
    values.forEach((k, v) -> set(k, String.valueOf(v)));
}

From source file:org.egov.collection.integration.pgi.SbimopsAdaptor.java

private Map<String, String> prepareResponseMap(CloseableHttpResponse response) {
    try {// ww w. j a  va2  s  . c  om

        InputStreamReader inputStreamReader = new InputStreamReader(response.getEntity().getContent());
        BufferedReader reader = new BufferedReader(inputStreamReader);

        ObjectMapper objectMapper = new ObjectMapper();
        Map<String, Map<String, Object>> responseMap = null;

        try {
            responseMap = objectMapper.readValue(reader.readLine(),
                    new TypeReference<Map<String, Map<String, Object>>>() {
                    });
        } finally {
            reader.close();
            inputStreamReader.close();
        }
        if (responseMap == null || responseMap.isEmpty()) {
            LOGGER.info("Sbimops reconciliation response is null or empty");
            throw new ApplicationRuntimeException("SBIMOPS reconciliation response is null or empty");
        } else {
            if (LOGGER.isInfoEnabled())
                LOGGER.info("Sbimops reconciliation response : " + responseMap);
            Map<String, Object> responseParameterMap = (Map<String, Object>) responseMap.get(SBIMOPS_RECORDSET)
                    .get(SBIMOPS_ROW);
            final Map<String, String> responseSbimopsMap = new LinkedHashMap<>();
            responseParameterMap.forEach((key, value) -> responseSbimopsMap.put(key, value.toString()));
            return responseSbimopsMap;
        }
    } catch (IOException e) {
        LOGGER.error("SBIMOPS reconciliation, error while reading the response content");
        throw new ApplicationRuntimeException(
                " SBIMOPS reconciliation, error while reading the response content", e);
    }
}

From source file:com.qwazr.server.configuration.ServerConfiguration.java

protected ServerConfiguration(final Map<?, ?>... propertiesMaps) throws IOException {

    // Merge the maps.
    properties = new HashMap<>();
    if (propertiesMaps != null) {
        for (Map<?, ?> props : propertiesMaps)
            if (props != null)
                props.forEach((key, value) -> {
                    if (key != null && value != null)
                        properties.put(key.toString(), value.toString());
                });//from  w w  w . ja  va 2 s  .c  om
    }

    //Set the data directory
    dataDirectory = getDataDirectory(getStringProperty(QWAZR_DATA, null));
    if (dataDirectory == null)
        throw new IOException("The data directory has not been set.");
    if (!Files.exists(dataDirectory))
        throw new IOException("The data directory does not exists: " + dataDirectory.toAbsolutePath());
    if (!Files.isDirectory(dataDirectory))
        throw new IOException("The data directory is not a directory: " + dataDirectory.toAbsolutePath());

    //Set the temp directory
    tempDirectory = getTempDirectory(dataDirectory, getStringProperty(QWAZR_TEMP, null));
    if (!Files.exists(tempDirectory))
        Files.createDirectories(tempDirectory);
    if (!Files.exists(tempDirectory))
        throw new IOException("The temp directory does not exists: " + tempDirectory.toAbsolutePath());
    if (!Files.isDirectory(tempDirectory))
        throw new IOException("The temp directory is not a directory: " + tempDirectory.toAbsolutePath());

    //Set the configuration directories
    etcDirectories = getEtcDirectories(getStringProperty(QWAZR_ETC_DIR, null));
    etcFileFilter = buildEtcFileFilter(getStringProperty(QWAZR_ETC, null));

    //Set the listen address
    listenAddress = findListenAddress(getStringProperty(LISTEN_ADDR, null));

    //Set the public address
    publicAddress = findPublicAddress(getStringProperty(PUBLIC_ADDR, null), this.listenAddress);

    //Set the connectors
    webAppConnector = new WebConnector(publicAddress, getIntegerProperty(WEBAPP_PORT, null), 9090,
            getStringProperty(WEBAPP_AUTHENTICATION, null), getStringProperty(WEBAPP_REALM, null));
    webServiceConnector = new WebConnector(publicAddress, getIntegerProperty(WEBSERVICE_PORT, null), 9091,
            getStringProperty(WEBSERVICE_AUTHENTICATION, null), getStringProperty(WEBSERVICE_REALM, null));
    multicastConnector = new WebConnector(getStringProperty(MULTICAST_ADDR, null),
            getIntegerProperty(MULTICAST_PORT, null), 9091, null, null);

    // Collect the master address.
    final LinkedHashSet<String> set = new LinkedHashSet<>();
    try {
        findMatchingAddress(getStringProperty(QWAZR_MASTERS, null), set);
    } catch (SocketException e) {
        LOGGER.warning("Failed in extracting IP information. No master server is configured.");
    }
    this.masters = set.isEmpty() ? null : Collections.unmodifiableSet(set);

    this.groups = buildSet(getStringProperty(QWAZR_GROUPS, null), ",; \t", true);
}

From source file:org.codice.ddf.libs.klv.KlvDecoderTest.java

@Test
public void testKLVSet() throws Exception {
    byte[] klvBytes;

    try (final InputStream inputStream = getClass().getClassLoader().getResourceAsStream("testKLV.klv")) {
        klvBytes = IOUtils.toByteArray(inputStream);
    }/*  w  w w . j  a  v  a2 s .co  m*/

    final KlvContext klvContext = getKLVContext(DATA_ELEMENTS);

    final Map<String, KlvDataElement> decodedDataElements = new KlvDecoder(klvContext).decode(klvBytes)
            .getDataElements();

    assertThat(decodedDataElements.size(), is(1));
    assertThat(decodedDataElements, hasKey(UAS_DATALINK_LOCAL_SET_UNIVERSAL_KEY));

    final KlvContext localSet = ((KlvLocalSet) decodedDataElements.get(UAS_DATALINK_LOCAL_SET_UNIVERSAL_KEY))
            .getValue();

    final Map<String, KlvDataElement> localSetDataElements = localSet.getDataElements();

    assertThat(localSetDataElements.size(), is(DATA_ELEMENTS.size()));

    localSetDataElements.forEach((name, dataElement) -> {
        final Object expectedValue = EXPECTED_VALUES.get(name);
        assertThat(String.format("%s is not %s", name, expectedValue), dataElement.getValue(),
                is(expectedValue));
    });
}

From source file:com.streamsets.pipeline.stage.origin.httpserver.PushHttpReceiver.java

protected List<Record> parseRequestPayload(HttpServletRequest req, InputStream is) throws IOException {
    Map<String, String> customHeaderAttributes = getCustomHeaderAttributes(req);

    // parse request into records
    List<Record> records = new ArrayList<>();
    String requestId = System.currentTimeMillis() + "." + counter.getAndIncrement();
    try (DataParser parser = getParserFactory().getParser(requestId, is, "0")) {
        Record record = parser.parse();/*from   w  w w . ja  v a  2  s  .  co m*/
        while (record != null) {
            Record finalRecord = record;
            customHeaderAttributes.forEach((key, value) -> finalRecord.getHeader().setAttribute(key, value));
            records.add(finalRecord);
            record = parser.parse();
        }
    } catch (DataParserException ex) {
        throw new IOException(ex);
    }

    return records;
}

From source file:ijfx.core.batch.BatchService.java

public void extractOutput(BatchSingleInput input, Module module) {
    Map<String, Object> outputs = module.getOutputs();
    outputs.forEach((s, o) -> {
        logger.info(String.format("Trying to find output from %s = %s", s, o));
        if (Dataset.class.isAssignableFrom(o.getClass())) {
            logger.info("Extracting Dataset !");
            input.setDataset((Dataset) module.getOutput(s));
        } else if (ImageDisplay.class.isAssignableFrom(o.getClass())) {
            logger.info("Extracting ImageDisplay !");
            input.setDisplay((ImageDisplay) module.getOutput(s));
        } else if (o instanceof DatasetView) {
            logger.info("Extracting DatasetView !");
            input.setDatasetView((DatasetView) module.getOutput(s));
        }//from   w w  w .j  ava  2s. co m
    });

}

From source file:cc.arduino.contributions.packages.ContributionsIndexer.java

public Set<ContributedTool> getInstalledTools() {
    Set<ContributedTool> tools = new HashSet<>();
    if (index == null) {
        return tools;
    }// w w w  .  jav a2 s .  c  o m
    for (ContributedPackage pack : index.getPackages()) {
        Collection<ContributedPlatform> platforms = pack.getPlatforms().stream() //
                .filter(p -> p.isInstalled()) //
                .collect(Collectors.toList());
        Map<String, List<ContributedPlatform>> platformsByName = platforms.stream()
                .collect(Collectors.groupingBy(ContributedPlatform::getName));

        platformsByName.forEach((platformName, platformsWithName) -> {
            if (platformsWithName.size() > 1) {
                platformsWithName = platformsWithName.stream() //
                        .filter(p -> !p.isBuiltIn()) //
                        .collect(Collectors.toList());
            }
            for (ContributedPlatform p : platformsWithName) {
                tools.addAll(p.getResolvedTools());
            }
        });
    }
    return tools;
}

From source file:org.apache.brooklyn.enricher.stock.aggregator.AggregationJob.java

protected void scanEntity(Entity entityToScan, Map<String, String> costPerMonth,
        List<Map<String, String>> haPrimaries, List<Map<String, String>> licences,
        Map<String, List<Map<String, Object>>> locations, List<Map<String, String>> policyHighlights) {

    updateFromConfig(entityToScan, DASHBOARD_HA_PRIMARIES, haPrimaries);
    updateFromConfig(entityToScan, DASHBOARD_LICENSES, licences);
    updateFromConfig(entityToScan, DASHBOARD_POLICY_HIGHLIGHTS, policyHighlights);

    //Cost per month
    Map<String, String> costPerMonthFromEntity = entityToScan.sensors().get(DASHBOARD_COST_PER_MONTH);
    if (costPerMonthFromEntity == null || costPerMonthFromEntity.isEmpty()) {
        costPerMonthFromEntity = entityToScan.config().get(DASHBOARD_COST_PER_MONTH);
    }//from   w w  w .  j a v  a 2 s  .  c  o  m

    if (costPerMonthFromEntity != null) {
        costPerMonth.putAll(costPerMonthFromEntity);
    }

    //Locations merge
    //We are merging a Map, of Lists of Maps
    //The outer map is location type, e.g. servers to a list of data for that type
    //The inner map should contain the data e.g. name/icon/count.

    //Further complicating this is that the Map you get back from sensors/config doesn't conform to the generic expectations.
    //I.E. even if you type this as Map<String, List<Map<String, String>>> you will still get back an integer from
    //the inner map- hence all the weird casting bellow.
    Map<String, List<Map<String, Object>>> outerLocationMapFromEntity = entityToScan.sensors()
            .get(DASHBOARD_LOCATIONS);
    if (outerLocationMapFromEntity == null || outerLocationMapFromEntity.isEmpty()) {
        outerLocationMapFromEntity = entityToScan.config().get(DASHBOARD_LOCATIONS);
    }

    if (outerLocationMapFromEntity != null) {
        //loop through outer maps
        outerLocationMapFromEntity.forEach((outerLocationMapFromEntityKey, outerLocationMapFromEntityValue) -> {
            boolean found = false;
            for (Map.Entry<String, List<Map<String, Object>>> outerLocationMapFromMethodParam : locations
                    .entrySet()) {
                if (StringUtils.equals(outerLocationMapFromMethodParam.getKey(),
                        outerLocationMapFromEntityKey)) {
                    found = true;

                    //loop through list
                    Iterator<Map<String, Object>> listIteratorFromEntity = outerLocationMapFromEntityValue
                            .iterator();
                    while (listIteratorFromEntity.hasNext()) {
                        Map<String, Object> innerMapFromEntity = listIteratorFromEntity.next();
                        boolean foundInner = false;

                        //loop through inner map and merge
                        for (Map<String, Object> innerMapFromMethodParam : outerLocationMapFromMethodParam
                                .getValue()) {
                            if (StringUtils.equals((String) innerMapFromEntity.get(NAME_STRING),
                                    (String) innerMapFromMethodParam.get(NAME_STRING))) {

                                innerMapFromMethodParam.put(COUNT_STRING,
                                        (int) innerMapFromEntity.get(COUNT_STRING)
                                                + (int) innerMapFromMethodParam.get(COUNT_STRING));
                                foundInner = true;
                                break;
                            }
                        }

                        //If the entityToScan has a "name" not found in the method param then add it to the method param
                        if (!foundInner) {
                            outerLocationMapFromMethodParam.getValue().add(new HashMap<>(innerMapFromEntity));
                        }
                    }
                }

            }

            //If the entityToScan has an entry in the outer map that isn't in the method param, then add it
            if (!found) {
                ArrayList clonedList = new ArrayList();
                outerLocationMapFromEntityValue.forEach(mapToClone -> {
                    clonedList.add(new HashMap<>(mapToClone));

                });
                locations.put(outerLocationMapFromEntityKey, clonedList);
            }

        });
    }

    entityToScan.getChildren().forEach(childEntity -> scanEntity(childEntity, costPerMonth, haPrimaries,
            licences, locations, policyHighlights));
}

From source file:org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.PartitionStrategy.java

@Override
public void apply(final Traversal.Admin<?, ?> traversal) {
    final Graph graph = traversal.getGraph().orElseThrow(
            () -> new IllegalStateException("PartitionStrategy does not work with anonymous Traversals"));
    final Graph.Features.VertexFeatures vertexFeatures = graph.features().vertex();
    final boolean supportsMetaProperties = vertexFeatures.supportsMetaProperties();
    if (includeMetaProperties && !supportsMetaProperties)
        throw new IllegalStateException(
                "PartitionStrategy is configured to include meta-properties but the Graph does not support them");

    // no need to add has after mutating steps because we want to make it so that the write partition can
    // be independent of the read partition.  in other words, i don't need to be able to read from a partition
    // in order to write to it.
    final List<Step> stepsToInsertHasAfter = new ArrayList<>();
    stepsToInsertHasAfter.addAll(TraversalHelper.getStepsOfAssignableClass(GraphStep.class, traversal));
    stepsToInsertHasAfter.addAll(TraversalHelper.getStepsOfAssignableClass(VertexStep.class, traversal));
    stepsToInsertHasAfter//from   ww  w.j  a va 2 s .c om
            .addAll(TraversalHelper.getStepsOfAssignableClass(EdgeOtherVertexStep.class, traversal));
    stepsToInsertHasAfter.addAll(TraversalHelper.getStepsOfAssignableClass(EdgeVertexStep.class, traversal));

    // all steps that return a vertex need to have has(partitionKey,within,partitionValues) injected after it
    stepsToInsertHasAfter
            .forEach(
                    step -> TraversalHelper
                            .insertAfterStep(
                                    new HasStep(traversal,
                                            new HasContainer(partitionKey,
                                                    P.within(new ArrayList<>(readPartitions)))),
                                    step, traversal));

    if (includeMetaProperties) {
        final List<PropertiesStep> propertiesSteps = TraversalHelper
                .getStepsOfAssignableClass(PropertiesStep.class, traversal);
        propertiesSteps.forEach(step -> {
            // check length first because keyExists will return true otherwise
            if (step.getPropertyKeys().length > 0
                    && ElementHelper.keyExists(partitionKey, step.getPropertyKeys()))
                throw new IllegalStateException("Cannot explicitly request the partitionKey in the traversal");

            if (step.getReturnType() == PropertyType.PROPERTY) {
                // check the following step to see if it is a has(partitionKey, *) - if so then this strategy was
                // already applied down below via g.V().values() which injects a properties() step
                final Step next = step.getNextStep();
                if (!(next instanceof HasStep) || !((HasContainer) ((HasStep) next).getHasContainers().get(0))
                        .getKey().equals(partitionKey)) {
                    // use choose() to determine if the properties() step is called on a Vertex to get a VertexProperty
                    // if not, pass it through.
                    final Traversal choose = __
                            .choose(__.filter(new TypeChecker<>(VertexProperty.class)),
                                    __.has(partitionKey, P.within(new ArrayList<>(readPartitions))), __.__())
                            .filter(new PartitionKeyHider());
                    TraversalHelper.insertTraversal(step, choose.asAdmin(), traversal);
                }
            } else if (step.getReturnType() == PropertyType.VALUE) {
                // use choose() to determine if the values() step is called on a Vertex to get a VertexProperty
                // if not, pass it through otherwise explode g.V().values() to g.V().properties().has().value()
                final Traversal choose = __.choose(__.filter(new TypeChecker<>(Vertex.class)),
                        __.properties(step.getPropertyKeys())
                                .has(partitionKey, P.within(new ArrayList<>(readPartitions)))
                                .filter(new PartitionKeyHider()).value(),
                        __.__().filter(new PartitionKeyHider()));
                TraversalHelper.insertTraversal(step, choose.asAdmin(), traversal);
                traversal.removeStep(step);
            } else {
                throw new IllegalStateException(String.format("%s is not accounting for a particular %s %s",
                        PartitionStrategy.class.getSimpleName(), PropertyType.class.toString(),
                        step.getReturnType()));
            }
        });

        final List<PropertyMapStep> propertyMapSteps = TraversalHelper
                .getStepsOfAssignableClass(PropertyMapStep.class, traversal);
        propertyMapSteps.forEach(step -> {
            // check length first because keyExists will return true otherwise
            if (step.getPropertyKeys().length > 0
                    && ElementHelper.keyExists(partitionKey, step.getPropertyKeys()))
                throw new IllegalStateException("Cannot explicitly request the partitionKey in the traversal");

            if (step.getReturnType() == PropertyType.PROPERTY) {
                // via map() filter out properties that aren't in the partition if it is a PropertyVertex,
                // otherwise just let them pass through
                TraversalHelper.insertAfterStep(new LambdaMapStep<>(traversal, new MapPropertiesFilter()), step,
                        traversal);
            } else if (step.getReturnType() == PropertyType.VALUE) {
                // as this is a value map, replace that step with propertiesMap() that returns PropertyType.VALUE.
                // from there, add the filter as shown above and then unwrap the properties as they would have
                // been done under valueMap()
                final PropertyMapStep propertyMapStep = new PropertyMapStep(traversal, step.isIncludeTokens(),
                        PropertyType.PROPERTY, step.getPropertyKeys());
                TraversalHelper.replaceStep(step, propertyMapStep, traversal);

                final LambdaMapStep mapPropertiesFilterStep = new LambdaMapStep<>(traversal,
                        new MapPropertiesFilter());
                TraversalHelper.insertAfterStep(mapPropertiesFilterStep, propertyMapStep, traversal);
                TraversalHelper.insertAfterStep(new LambdaMapStep<>(traversal, new MapPropertiesConverter()),
                        mapPropertiesFilterStep, traversal);
            } else {
                throw new IllegalStateException(String.format("%s is not accounting for a particular %s %s",
                        PartitionStrategy.class.getSimpleName(), PropertyType.class.toString(),
                        step.getReturnType()));
            }
        });
    }

    final List<Step> stepsToInsertPropertyMutations = traversal.getSteps().stream()
            .filter(step -> step instanceof AddEdgeStep || step instanceof AddVertexStep
                    || step instanceof AddVertexStartStep
                    || (includeMetaProperties && step instanceof AddPropertyStep))
            .collect(Collectors.toList());

    stepsToInsertPropertyMutations.forEach(step -> {
        // note that with AddPropertyStep we just add the partition key/value regardless of whether this
        // ends up being a Vertex or not.  AddPropertyStep currently chooses to simply not bother
        // to use the additional "property mutations" if the Element being mutated is a Edge or
        // VertexProperty
        ((Mutating) step).addPropertyMutations(partitionKey, writePartition);

        if (includeMetaProperties) {
            // GraphTraversal folds g.addV().property('k','v') to just AddVertexStep/AddVertexStartStep so this
            // has to be exploded back to g.addV().property(cardinality, 'k','v','partition','A')
            if (step instanceof AddVertexStartStep || step instanceof AddVertexStep) {
                final Parameters parameters = ((Parameterizing) step).getParameters();
                final Map<Object, List<Object>> params = parameters.getRaw();
                params.forEach((k, v) -> {
                    final List<Step> addPropertyStepsToAppend = new ArrayList<>(v.size());
                    final VertexProperty.Cardinality cardinality = vertexFeatures.getCardinality((String) k);
                    v.forEach(o -> {
                        final AddPropertyStep addPropertyStep = new AddPropertyStep(traversal, cardinality, k,
                                o);
                        addPropertyStep.addPropertyMutations(partitionKey, writePartition);
                        addPropertyStepsToAppend.add(addPropertyStep);

                        // need to remove the parameter from the AddVertex/StartStep because it's now being added
                        // via the AddPropertyStep
                        parameters.remove(k);
                    });

                    Collections.reverse(addPropertyStepsToAppend);
                    addPropertyStepsToAppend.forEach(s -> TraversalHelper.insertAfterStep(s, step, traversal));
                });
            }
        }
    });
}

From source file:com.qwazr.utils.server.InFileSessionPersistenceManager.java

@Override
public void persistSessions(String deploymentName, Map<String, PersistentSession> sessionData) {
    if (sessionData == null)
        return;/*  w w  w. j a va  2  s.c  om*/
    final File deploymentDir = new File(sessionDir, deploymentName);
    if (!deploymentDir.exists())
        deploymentDir.mkdir();
    if (!deploymentDir.exists() && !deploymentDir.isDirectory()) {
        if (logger.isErrorEnabled())
            logger.error("Cannot create the session directory " + deploymentDir + ": persistence aborted.");
        return;
    }
    sessionData.forEach(
            (sessionId, persistentSession) -> writeSession(deploymentDir, sessionId, persistentSession));
}