Example usage for java.util Map putIfAbsent

List of usage examples for java.util Map putIfAbsent

Introduction

In this page you can find the example usage for java.util Map putIfAbsent.

Prototype

default V putIfAbsent(K key, V value) 

Source Link

Document

If the specified key is not already associated with a value (or is mapped to null ) associates it with the given value and returns null , else returns the current value.

Usage

From source file:org.apdplat.superword.tools.MySQLUtils.java

public static Map<String, Set<String>> getAllWordPronunciation(String dictionary, Set<Word> words) {
    Map<String, Set<String>> map = new HashMap<>();
    String sql = "select word, pronunciation from word_pronunciation where dictionary=?";
    Connection con = getConnection();
    if (con == null) {
        return map;
    }//w  w  w.j  av a 2 s  .c  o m
    PreparedStatement pst = null;
    ResultSet rs = null;
    try {
        pst = con.prepareStatement(sql);
        pst.setString(1, dictionary);
        rs = pst.executeQuery();
        while (rs.next()) {
            String word = rs.getString(1);
            String pronunciation = rs.getString(2);
            if (StringUtils.isNotBlank(word) && StringUtils.isNotBlank(pronunciation)
                    && words.contains(new Word(word, ""))) {
                for (String item : extractPurePronunciation(pronunciation)) {
                    map.putIfAbsent(item, new HashSet());
                    map.get(item).add(word);
                }
            }
        }
    } catch (SQLException e) {
        LOG.error("??", e);
    } finally {
        close(con, pst, rs);
    }
    return map;
}

From source file:org.openhab.binding.yamahareceiver.internal.protocol.xml.DeviceDescriptorXML.java

private <T, V> Map<T, V> buildFeatureLookup(Node descNode, String funcValue, Function<String, T> converter,
        BiFunction<T, Element, V> factory) {
    Map<T, V> groupedElements = new HashMap<>();

    if (descNode != null) {
        Stream<Element> elements = getChildElements(descNode)
                .filter(x -> "Menu".equals(x.getTagName()) && funcValue.equals(x.getAttribute("Func")));

        elements.forEach(e -> {//from w w w.  j  a  v  a 2 s . c  om
            String tag = e.getAttribute("YNC_Tag");

            if (StringUtils.isNotEmpty(tag)) {
                T key = converter.apply(tag);
                if (key != null) {
                    V value = factory.apply(key, e);

                    // a VNC_Tag value might appear more than once (e.g. Zone B has Main_Zone tag)
                    groupedElements.putIfAbsent(key, value);
                }
            }
        });
    }

    return groupedElements;
}

From source file:com.kantenkugel.discordbot.jdocparser.JDocParser.java

private static Map<String, String> getInheritedMethods(Element summaryAnchor) {
    Map<String, String> inherited = new HashMap<>();
    if (summaryAnchor == null)
        return inherited;
    summaryAnchor = summaryAnchor.parent();
    Elements inheritAnchors = summaryAnchor.select("a[name^=\"methods.inherited.from.class\"]");
    for (Element inheritAnchor : inheritAnchors) {
        if (inheritAnchor.siblingElements().size() != 2)
            throw new RuntimeException("Got unexpected html while parsing inherited methods from class "
                    + inheritAnchor.attr("name"));
        Element next = inheritAnchor.nextElementSibling();
        if (!next.tagName().equals("h3"))
            throw new RuntimeException("Got unexpected html while parsing inherited methods from class "
                    + inheritAnchor.attr("name"));
        Element sub = next.children().last();
        if (sub == null || !sub.tagName().equals("a"))
            continue;
        String parent = sub.text().toLowerCase();
        next = next.nextElementSibling();
        if (!next.tagName().equals("code"))
            throw new RuntimeException("Got unexpected html while parsing inherited methods from class "
                    + inheritAnchor.attr("name"));
        for (sub = next.children().first(); sub != null; sub = sub.nextElementSibling()) {
            if (sub.tagName().equals("a")) {
                inherited.putIfAbsent(sub.text().toLowerCase(), parent);
            }/*from ww  w .ja v a 2s.  c o  m*/
        }
    }
    return inherited;
}

From source file:com.netflix.spinnaker.halyard.deploy.provider.v1.kubernetes.KubernetesProviderInterface.java

private void upsertSecret(AccountDeploymentDetails<KubernetesAccount> details, Set<String> files,
        String secretName, String namespace) {
    KubernetesClient client = getClient(details.getAccount());
    createNamespace(client, namespace);//from   w  w w .  j  a v  a  2  s .  c o m

    if (client.secrets().inNamespace(namespace).withName(secretName).get() != null) {
        client.secrets().inNamespace(namespace).withName(secretName).delete();
    }

    Map<String, String> secretContents = new HashMap<>();

    files.forEach(s -> {
        try {
            File f = new File(s);
            String name = f.getName();
            String data = new String(
                    Base64.getEncoder().encode(IOUtils.toString(new FileInputStream(f)).getBytes()));
            secretContents.putIfAbsent(name, data);
        } catch (IOException e) {
            throw new HalException(
                    new ConfigProblemBuilder(Severity.ERROR, "Unable to read contents of \"" + s + "\": " + e)
                            .build());
        }
    });

    SecretBuilder secretBuilder = new SecretBuilder();
    secretBuilder = secretBuilder.withNewMetadata().withName(secretName).withNamespace(namespace).endMetadata()
            .withData(secretContents);

    log.info("Staging secret " + secretName + " in namespace " + namespace + " with contents " + files);

    client.secrets().inNamespace(namespace).create(secretBuilder.build());
}

From source file:org.onosproject.influxdbmetrics.DefaultInfluxDbMetricsRetriever.java

/**
 * Returns all metric names that bound with node identification.
 *
 * @return all metric names//from  ww w  . java  2s  . co  m
 */
protected Map<NodeId, Set<String>> allMetricNames() {
    Map<NodeId, Set<String>> metricNameMap = Maps.newHashMap();
    Query query = new Query("SHOW MEASUREMENTS", database);
    List<QueryResult.Result> results = influxDB.query(query).getResults();
    List<List<Object>> rawMetricNames = results.get(0).getSeries().get(0).getValues();

    rawMetricNames.forEach(rawMetricName -> {
        String nodeIdStr = getNodeId(strip(rawMetricName.toString()));

        if (nodeIdStr != null) {
            NodeId nodeId = NodeId.nodeId(nodeIdStr);
            String metricName = getMetricName(strip(rawMetricName.toString()));

            if (!metricNameMap.containsKey(nodeId)) {
                metricNameMap.putIfAbsent(nodeId, Sets.newHashSet());
            }

            if (metricName != null) {
                metricNameMap.get(nodeId).add(metricName);
            }
        }
    });

    return metricNameMap;
}

From source file:org.apache.gobblin.service.modules.orchestration.Orchestrator.java

public void orchestrate(Spec spec) throws Exception {
    // Add below waiting because TopologyCatalog and FlowCatalog service can be launched at the same time
    this.topologyCatalog.get().getInitComplete().await();

    long startTime = System.nanoTime();
    if (spec instanceof FlowSpec) {
        Map<String, String> flowMetadata = getFlowMetadata((FlowSpec) spec);
        TimingEvent flowCompilationTimer = this.eventSubmitter.isPresent()
                ? this.eventSubmitter.get().getTimingEvent(TimingEvent.FlowTimings.FLOW_COMPILED)
                : null;//from   w w  w.j a v a  2 s . c om
        Dag<JobExecutionPlan> jobExecutionPlanDag = specCompiler.compileFlow(spec);

        if (jobExecutionPlanDag == null || jobExecutionPlanDag.isEmpty()) {
            Instrumented.markMeter(this.flowOrchestrationFailedMeter);
            _log.warn("Cannot determine an executor to run on for Spec: " + spec);
            return;
        }

        flowMetadata.putIfAbsent(TimingEvent.FlowEventConstants.FLOW_EXECUTION_ID_FIELD,
                jobExecutionPlanDag.getNodes().get(0).getValue().getJobSpec().getConfigAsProperties()
                        .getProperty(ConfigurationKeys.FLOW_EXECUTION_ID_KEY));

        if (flowCompilationTimer != null) {
            flowCompilationTimer.stop(flowMetadata);
        }

        // Schedule all compiled JobSpecs on their respective Executor
        for (Dag.DagNode<JobExecutionPlan> dagNode : jobExecutionPlanDag.getNodes()) {
            JobExecutionPlan jobExecutionPlan = dagNode.getValue();

            // Run this spec on selected executor
            SpecProducer producer = null;
            try {
                producer = jobExecutionPlan.getSpecExecutor().getProducer().get();
                Spec jobSpec = jobExecutionPlan.getJobSpec();

                if (!((JobSpec) jobSpec).getConfig().hasPath(ConfigurationKeys.FLOW_EXECUTION_ID_KEY)) {
                    _log.warn("JobSpec does not contain flowExecutionId.");
                }

                Map<String, String> jobMetadata = getJobMetadata(flowMetadata, jobExecutionPlan);
                _log.info(String.format("Going to orchestrate JobSpec: %s on Executor: %s", jobSpec, producer));

                TimingEvent jobOrchestrationTimer = this.eventSubmitter.isPresent()
                        ? this.eventSubmitter.get().getTimingEvent(TimingEvent.LauncherTimings.JOB_ORCHESTRATED)
                        : null;

                producer.addSpec(jobSpec);

                if (jobOrchestrationTimer != null) {
                    jobOrchestrationTimer.stop(jobMetadata);
                }
            } catch (Exception e) {
                _log.error("Cannot successfully setup spec: " + jobExecutionPlan.getJobSpec() + " on executor: "
                        + producer + " for flow: " + spec, e);
            }
        }
    } else {
        Instrumented.markMeter(this.flowOrchestrationFailedMeter);
        throw new RuntimeException("Spec not of type FlowSpec, cannot orchestrate: " + spec);
    }
    Instrumented.markMeter(this.flowOrchestrationSuccessFulMeter);
    Instrumented.updateTimer(this.flowOrchestrationTimer, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
}

From source file:org.codice.ddf.registry.schemabindings.RegistryPackageWebConverter.java

private static void putRegistryService(ServiceType service, Map<String, Object> registryObjectListMap) {
    if (service == null) {
        return;//from  w w w.j a  va  2 s  .com
    }
    Map<String, Object> serviceMap = new HashMap<>();

    putGeneralInfo(service, serviceMap);

    if (service.isSetServiceBinding()) {
        putServiceBindings(service.getServiceBinding(), serviceMap);
    }

    if (!serviceMap.isEmpty()) {
        registryObjectListMap.putIfAbsent(SERVICE_KEY, new ArrayList<Map<String, Object>>());
        ((List) registryObjectListMap.get(SERVICE_KEY)).add(serviceMap);
    }

}

From source file:fr.cph.stock.entities.Portfolio.java

protected Map<String, List<Equity>> getSectorByCompanies() {
    final Map<String, List<Equity>> map = new TreeMap<>();
    List<Equity> companies;
    for (final Equity equity : getEquities()) {
        if (equity.getCompany().getFund()) {
            companies = map.getOrDefault(Constants.FUND, new ArrayList<>());
            companies.add(equity);/*from   w w w .j  ava  2 s  .c om*/
            map.putIfAbsent(Constants.FUND, companies);
        } else {
            final String sector = equity.getCurrentSector();
            if (StringUtils.isEmpty(sector)) {
                companies = map.getOrDefault(Constants.UNKNOWN, new ArrayList<>());
                companies.add(equity);
                map.putIfAbsent(Constants.UNKNOWN, companies);
            } else {
                companies = map.getOrDefault(sector, new ArrayList<>());
                companies.add(equity);
                map.putIfAbsent(sector, companies);
            }
        }
    }
    return map;
}

From source file:io.klerch.alexa.state.handler.AWSS3StateHandler.java

/**
 * {@inheritDoc}/*from  w  w  w. j a  va2s  .co  m*/
 */
@Override
public Map<String, AlexaStateObject> readValues(final Map<String, AlexaScope> idsInScope)
        throws AlexaStateException {
    final Map<String, AlexaStateObject> stateObjectMap = new HashMap<>();
    // first read all the session-scoped items and put to result map
    stateObjectMap.putAll(super.readValues(idsInScope));

    idsInScope.forEach((id, scope) -> {
        if (scope.isIn(AlexaScope.USER, AlexaScope.APPLICATION)) {
            final String filePath = AlexaScope.USER.includes(scope) ? getUserScopedFilePath(id)
                    : getAppScopedFilePath(id);
            try {
                // get S3 file
                getS3FileContentsAsString(filePath)
                        // wrap its contents in state object
                        .map(fileContents -> new AlexaStateObject(id, fileContents, scope))
                        // add to result map
                        .ifPresent(stateObject -> stateObjectMap.putIfAbsent(id, stateObject));
            } catch (final AlexaStateException | AmazonS3Exception e) {
                // we are fine with an exception likely caused by file (state) not exists
                log.warn("Could not read from '" + filePath + "'.", e);
            }
        }
    });
    return stateObjectMap;
}

From source file:io.hops.hopsworks.common.util.HopsUtils.java

/**
 * Merge system and user defined configuration properties based on the replacement policy of each property
 * @param hopsworksParams System/default properties
 * @param userParameters User defined properties parsed by parseSparkProperties(String sparkProps)
 * @return A map with the replacement pattern and value for each property
 *//*from  w  w w .  ja va2 s  .c o  m*/
public static Map<String, String> mergeHopsworksAndUserParams(Map<String, ConfigProperty> hopsworksParams,
        Map<String, String> userParameters, boolean isJob) {
    Map<String, String> finalParams = new HashMap<>();
    Set<String> notReplacedUserParams = new HashSet<>();

    for (Map.Entry<String, String> userParam : userParameters.entrySet()) {
        if (hopsworksParams.containsKey(userParam.getKey())) {
            ConfigProperty prop = hopsworksParams.get(userParam.getKey());
            prop.replaceValue(userParam.getValue());
            finalParams.put(prop.getReplacementPattern(), prop.getValue());
        } else {
            notReplacedUserParams.add(userParam.getKey());
            if (isJob) {
                finalParams.put(userParam.getKey(), userParam.getValue());
            }
        }
    }

    String userParamsStr = "";
    if (!notReplacedUserParams.isEmpty()) {
        StringBuilder userParamsSb = new StringBuilder();
        userParamsSb.append(",\n");
        notReplacedUserParams.stream().forEach(p -> userParamsSb.append("\"").append(p).append("\": ")
                .append("\"").append(userParameters.get(p)).append("\"," + "\n"));

        userParamsStr = userParamsSb.toString();
        // Remove last comma and add a new line char
        userParamsStr = userParamsStr.trim().substring(0, userParamsStr.length() - 2) + "\n";
    }
    finalParams.put("spark_user_defined_properties", userParamsStr);

    for (ConfigProperty configProperty : hopsworksParams.values()) {
        finalParams.putIfAbsent(configProperty.getReplacementPattern(), configProperty.getValue());
    }

    return finalParams;
}