Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:org.apache.samza.execution.JobNodeConfigurationGenerator.java

static Config mergeConfig(Map<String, String> originalConfig, Map<String, String> generatedConfig) {
    validateJobConfigs(originalConfig, generatedConfig);
    Map<String, String> mergedConfig = new HashMap<>(generatedConfig);

    originalConfig.forEach((k, v) -> {
        if (generatedConfig.containsKey(k) && !Objects.equals(generatedConfig.get(k), v)) {
            LOG.info("Replacing generated config for key: {} value: {} with original config value: {}", k,
                    generatedConfig.get(k), v);
        }//  ww w. j  a v  a 2s  . c o m
        mergedConfig.put(k, v);
    });

    return Util.rewriteConfig(new MapConfig(mergedConfig));
}

From source file:org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder.java

public static boolean deleteSchema() {
    Cluster cluster = null;//w  ww. jav  a2  s.  com
    Session session = null;
    try {
        log.info("delete Data from Cassandra.");
        cluster = createCluster();
        if (cluster == null) {
            return false;
        }
        session = cluster.connect();
        List<KeyspaceMetadata> keyspacesMetadateFromCassandra = cluster.getMetadata().getKeyspaces();
        if (keyspacesMetadateFromCassandra == null) {
            log.debug("filed to retrive a list of keyspaces from cassndra");
            return false;
        }
        log.debug("retrived Cassndra metadata.");
        Map<String, Map<String, List<String>>> cassndraMetadata = parseKeyspaceMetadata(
                keyspacesMetadateFromCassandra);
        cassndraMetadata.forEach((k, v) -> {
            if (AuditingTypesConstants.TITAN_KEYSPACE.equals(k)) {

                // session.execute("")
            } else if (AuditingTypesConstants.ARTIFACT_KEYSPACE.equals(k)) {

            } else if (AuditingTypesConstants.AUDIT_KEYSPACE.equals(k)) {

            }
        });

        System.out.println(cassndraMetadata);
        return true;
    } catch (Exception e) {
        log.info("deleteSchema failed with exception.", e);
    } finally {
        if (session != null) {
            session.close();
        }
        if (cluster != null) {
            cluster.close();
        }

    }

    return false;
}

From source file:com.cenrise.test.azkaban.PropsUtils.java

/**
 * @return the difference between oldProps and newProps.
 *//*from  w w  w  .j a v  a 2  s. co  m*/
public static String getPropertyDiff(Props oldProps, Props newProps) {

    final StringBuilder builder = new StringBuilder("");

    // oldProps can not be null during the below comparison process.
    if (oldProps == null) {
        oldProps = new Props();
    }

    if (newProps == null) {
        newProps = new Props();
    }

    final MapDifference<String, String> md = Maps.difference(toStringMap(oldProps, false),
            toStringMap(newProps, false));

    final Map<String, String> newlyCreatedProperty = md.entriesOnlyOnRight();
    if (newlyCreatedProperty != null && newlyCreatedProperty.size() > 0) {
        builder.append("Newly created Properties: ");
        newlyCreatedProperty.forEach((k, v) -> {
            builder.append("[ " + k + ", " + v + "], ");
        });
        builder.append("\n");
    }

    final Map<String, String> deletedProperty = md.entriesOnlyOnLeft();
    if (deletedProperty != null && deletedProperty.size() > 0) {
        builder.append("Deleted Properties: ");
        deletedProperty.forEach((k, v) -> {
            builder.append("[ " + k + ", " + v + "], ");
        });
        builder.append("\n");
    }

    final Map<String, MapDifference.ValueDifference<String>> diffProperties = md.entriesDiffering();
    if (diffProperties != null && diffProperties.size() > 0) {
        builder.append("Modified Properties: ");
        diffProperties.forEach((k, v) -> {
            builder.append("[ " + k + ", " + v.leftValue() + "-->" + v.rightValue() + "], ");
        });
    }
    return builder.toString();
}

From source file:com.wrmsr.wava.basic.BasicLoopInfo.java

public static Map<Name, Name> getLoopParents(SetMultimap<Name, Name> loopContents) {
    Map<Name, Name> loopParents = new HashMap<>();
    Map<Name, Set<Name>> map = loopContents.keySet().stream()
            .collect(toHashMap(identity(), loop -> new HashSet<>()));
    for (Name cur : loopContents.keySet()) {
        map.get(cur).add(ENTRY_NAME);//from  w  w w  .j ava  2 s.  c o  m
        Set<Name> children = loopContents.get(cur);
        for (Name child : children) {
            if (!cur.equals(child) && loopContents.containsKey(child)) {
                map.get(child).add(cur);
            }
        }
    }
    Map<Name, Integer> loopDepths = map.entrySet().stream()
            .collect(toHashMap(entry -> entry.getKey(), entry -> entry.getValue().size()));
    loopDepths.put(ENTRY_NAME, 0);
    int maxDepth = loopDepths.values().stream().mapToInt(Integer::intValue).max().orElse(0);
    List<List<Name>> depthLoopsLists = IntStream.range(0, maxDepth + 1).boxed()
            .<List<Name>>map(i -> new ArrayList<>()).collect(toArrayList());
    loopDepths.forEach((loop, depth) -> depthLoopsLists.get(depth).add(loop));
    Set<Name> seen = new HashSet<>();
    for (int depth = 1; depth < depthLoopsLists.size(); ++depth) {
        for (Name loop : depthLoopsLists.get(depth)) {
            Name parent = getOnlyElement(Sets.difference(map.get(loop), seen));
            checkState(loopDepths.get(parent) == depth - 1);
            loopParents.put(loop, parent);
        }
        seen.addAll(depthLoopsLists.get(depth - 1));
    }
    checkState(loopContents.keySet().equals(loopParents.keySet()));
    return loopParents;
}

From source file:org.apache.hadoop.hive.metastore.tools.BenchmarkTool.java

static void saveData(Map<String, DescriptiveStatistics> result, String location, TimeUnit scale)
        throws IOException {
    Path dir = Paths.get(location);
    if (!dir.toFile().exists()) {
        LOG.debug("creating directory {}", location);
        Files.createDirectories(dir);
    } else if (!dir.toFile().isDirectory()) {
        LOG.error("{} should be a directory", location);
    }/*from w w w. j a va2s.c  o  m*/

    // Create a new file for each benchmark and dump raw data to it.
    result.forEach((name, data) -> saveDataFile(location, name, data, scale));
}

From source file:com.hurence.logisland.connect.opc.CommonUtils.java

/**
 * Maps an opc data to a kafka connect object.
 *
 * @param opcData         the read data//www  . ja  v  a2 s  .  co  m
 * @param timestamp       the data timestamp
 * @param meta            the tag information
 * @param schema
 * @param valueSchema
 * @param additionalProps
 * @return
 */
public static Struct mapToConnectObject(OpcData opcData, Instant timestamp, TagInfo meta, Schema schema,
        SchemaAndValue valueSchema, Map<String, Object> additionalProps) {
    Struct value = new Struct(schema).put(OpcRecordFields.SAMPLED_TIMESTAMP, timestamp.toEpochMilli())
            .put(OpcRecordFields.SOURCE_TIMESTAMP, opcData.getTimestamp().toEpochMilli())
            .put(OpcRecordFields.TAG_ID, opcData.getTag())
            .put(OpcRecordFields.QUALITY, opcData.getQuality().name())
            .put(OpcRecordFields.SAMPLING_RATE, meta.getSamplingInterval().toMillis());
    additionalProps.forEach(value::put);

    if (valueSchema.value() != null) {
        value = value.put(OpcRecordFields.VALUE, valueSchema.value());
    }
    if (opcData.getOperationStatus().getLevel().compareTo(OperationStatus.Level.INFO) > 0) {
        value.put(OpcRecordFields.ERROR_CODE, opcData.getOperationStatus().getCode());
        if (opcData.getOperationStatus().getMessageDetail().isPresent()) {
            value.put(OpcRecordFields.ERROR_REASON, opcData.getOperationStatus().getMessageDetail().get());
        }
    }
    return value;
}

From source file:org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil.java

private static Map<TableName, List<String>> copyTableCFsMap(Map<TableName, List<String>> preTableCfs) {
    Map<TableName, List<String>> newTableCfs = new HashMap<>();
    preTableCfs.forEach((table, cfs) -> newTableCfs.put(table, cfs != null ? Lists.newArrayList(cfs) : null));
    return newTableCfs;
}

From source file:com.linecorp.armeria.server.docs.ThriftDocString.java

private static void traverseChildren(ImmutableMap.Builder<String, String> docStrings, String prefix,
        String delimiter, Object node) {
    if (node instanceof Map) {
        @SuppressWarnings("unchecked")
        final Map<String, Object> map = (Map<String, Object>) node;
        final String name = (String) map.get("name");
        final String doc = (String) map.get("doc");
        String childPrefix;// www  .  j av a  2 s  .c  o m
        if (name != null) {
            childPrefix = (prefix != null ? prefix : "") + delimiter + name;
            if (doc != null) {
                docStrings.put(childPrefix, doc);
            }
        } else {
            childPrefix = prefix;
        }
        map.forEach((key, value) -> traverseChildren(docStrings, childPrefix, DELIM, value));
    } else if (node instanceof Iterable) {
        @SuppressWarnings("unchecked")
        final Iterable<Object> children = (Iterable<Object>) node;
        children.forEach(child -> traverseChildren(docStrings, prefix, DELIM, child));
    }
}

From source file:org.codice.ddf.spatial.ogc.wps.process.endpoint.Validator.java

/**
 * @param inputDatas//from   w  w  w  . j  av a  2  s. com
 * @param inputDescriptions
 * @throws WpsException
 */
public static void validateProcessInputs(List<Data> inputDatas, List<DataDescription> inputDescriptions) {
    // bin the inputs by id
    Map<String, List<Data>> inputs = inputDatas.stream().collect(Collectors.groupingBy(Data::getId));

    // verify no unexpected inputs are being passed
    Map<String, DataDescription> inputDesc = inputDescriptions.stream()
            .collect(Collectors.toMap(DataDescription::getId, Function.identity()));
    inputs.keySet().forEach(key -> {
        if (!inputDesc.containsKey(key)) {
            throw new WpsException(
                    "One or more of the input identifiers passed does not match with any of the input identifiers of this process.",
                    "NoSuchInput", key);
        }
    });

    inputDesc.forEach((key, value) -> validateProcessInputsMinMaxOccurs(inputs.get(key), value));
}

From source file:com.asakusafw.workflow.executor.TaskExecutors.java

private static String encodeBatchArguments(Map<String, String> arguments) {
    if (arguments.isEmpty()) {
        // NOTE: never empty for windows
        return ",";
    }/*w w w . j  av a 2  s. c  o  m*/
    StringBuilder buf = new StringBuilder();
    arguments.forEach((k, v) -> {
        if (buf.length() != 0) {
            buf.append(',');
        }
        escape(buf, k);
        buf.append('=');
        escape(buf, v);
    });
    return buf.toString();
}