Example usage for java.util Map putAll

List of usage examples for java.util Map putAll

Introduction

In this page you can find the example usage for java.util Map putAll.

Prototype

void putAll(Map<? extends K, ? extends V> m);

Source Link

Document

Copies all of the mappings from the specified map to this map (optional operation).

Usage

From source file:io.wcm.wcm.ui.granite.resource.GraniteUiSyntheticResource.java

/**
 * Wrap a real resource and create a synthetic resource out of it.
 * Merges the given properties with the existing properties of the resource.
 * @param resource Real resource/*www.  ja va  2  s .  c om*/
 * @param valueMap Properties to be merged with the real properties
 * @return Resource
 */
public static Resource wrapMerge(Resource resource, ValueMap valueMap) {
    Map<String, Object> mergedProperties = new HashMap<>();
    mergedProperties.putAll(resource.getValueMap());
    mergedProperties.putAll(valueMap);
    return wrap(resource, new ValueMapDecorator(mergedProperties), resource.getChildren());
}

From source file:Main.java

private static Map<Long, String> getFilePathAndModyTime(File file) {
    Map<Long, String> map = new HashMap<Long, String>();
    if (file.isFile()) {
        map.put(file.lastModified(), file.getAbsolutePath());
    } else if (file.isDirectory()) {
        for (File f : file.listFiles()) {
            map.putAll(getFilePathAndModyTime(f));
        }//from  w w  w.  j  a v  a2 s .  c o m
    }
    return map;
}

From source file:com.yahoo.storm.yarn.TestIntegration.java

@SuppressWarnings({ "rawtypes", "unchecked" })
private static int execute(List<String> cmd) throws InterruptedException, IOException {
    LOG.info(Joiner.on(" ").join(cmd));
    ProcessBuilder pb = new ProcessBuilder(cmd);
    Map env = pb.environment();
    env.putAll(System.getenv());
    env.put(Environment.PATH.name(),
            "bin:" + storm_home + File.separator + "bin:" + env.get(Environment.PATH.name()));
    String yarn_conf_dir = yarn_site_xml.getParent().toString();
    env.put("STORM_YARN_CONF_DIR", yarn_conf_dir);
    List<URL> logback_xmls = Utils.findResources("logback.xml");
    if (logback_xmls != null && logback_xmls.size() >= 1) {
        String logback_xml = logback_xmls.get(0).getFile();
        LOG.debug("logback_xml:" + yarn_conf_dir + File.separator + "logback.xml");
        FileUtils.copyFile(new File(logback_xml), new File(yarn_conf_dir + File.separator + "logback.xml"));
    }//  ww  w.j  ava  2 s.  com
    List<URL> log4j_properties = Utils.findResources("log4j.properties");
    if (log4j_properties != null && log4j_properties.size() >= 1) {
        String log4j_properties_file = log4j_properties.get(0).getFile();
        LOG.debug("log4j_properties_file:" + yarn_conf_dir + File.separator + "log4j.properties");
        FileUtils.copyFile(new File(log4j_properties_file),
                new File(yarn_conf_dir + File.separator + "log4j.properties"));
    }

    Process proc = pb.start();
    Util.redirectStreamAsync(proc.getInputStream(), System.out);
    Util.redirectStreamAsync(proc.getErrorStream(), System.err);
    int status = proc.waitFor();
    return status;
}

From source file:com.ephesoft.gxt.batchinstance.client.shared.BatchInstanceProgressConvertor.java

public static final WorkflowDetailDTO getWorkflowDetailDTO(final WorkflowDetail workflowDetail,
        final String batchInstanceIdentifier) {
    WorkflowDetailDTO workflowDetailDTO = new WorkflowDetailDTO();
    if (null != workflowDetail) {
        workflowDetailDTO.setCurrentExecutingModule(workflowDetail.getCurrentExecutingModule());
        workflowDetailDTO.setCurrentExecutingPlugin(workflowDetail.getCurrentExecutingPlugin());
        List<String> executedModules = workflowDetail.getExecutedModuleList();
        if (CollectionUtils.isNotEmpty(executedModules)) {
            List<String> executedModuleDTOList = new ArrayList<String>(executedModules.size());
            executedModuleDTOList.addAll(executedModules);
            workflowDetailDTO.setExecutedModuleList(executedModuleDTOList);
            LOGGER.debug("Executed modules are set for batch instance: ", batchInstanceIdentifier);
        }/*from   w ww. j a va  2 s. c o m*/
        List<String> unexecutedModules = workflowDetail.getUnexecutedModuleList();
        if (CollectionUtils.isNotEmpty(unexecutedModules)) {
            List<String> unexecutedModuleDTOList = new ArrayList<String>(unexecutedModules.size());
            unexecutedModuleDTOList.addAll(unexecutedModules);
            workflowDetailDTO.setNonExecutedModuleList(unexecutedModuleDTOList);
            LOGGER.debug("Unexecuted modules are set for batch instance: ", batchInstanceIdentifier);
        }
        List<String> executedPlugins = workflowDetail.getExecutedPluginList();
        if (CollectionUtils.isNotEmpty(executedPlugins)) {
            List<String> executedPluginDTOList = new ArrayList<String>(executedPlugins.size());
            executedPluginDTOList.addAll(executedPlugins);
            workflowDetailDTO.setExecutedPluginList(executedPluginDTOList);
            LOGGER.debug("Executed plugins are set for batch instance: ", batchInstanceIdentifier);
        }
        List<String> unexecutedPlugins = workflowDetail.getUnexecutedPluginList();
        if (CollectionUtils.isNotEmpty(unexecutedPlugins)) {
            List<String> unexecutedPluginDTOList = new ArrayList<String>(unexecutedPlugins.size());
            unexecutedPluginDTOList.addAll(unexecutedPlugins);
            workflowDetailDTO.setNonExecutedPluginList(unexecutedPluginDTOList);
            LOGGER.debug("Unexecuted plugins are set for batch instance: ", batchInstanceIdentifier);
        }
        Map<String, List<String>> modulePluginMap = workflowDetail.getModulePluginMap();
        if (null != modulePluginMap) {
            Map<String, List<String>> modulePluginMapDTO = new HashMap<String, List<String>>(
                    modulePluginMap.size());
            modulePluginMapDTO.putAll(modulePluginMap);
            workflowDetailDTO.setModulePluginMap(modulePluginMapDTO);
            LOGGER.debug("Module-plugin map is set for batch instance: ", batchInstanceIdentifier);
        }
    }
    return workflowDetailDTO;
}

From source file:Main.java

public static <K, V> Map<K, V> merge(Map<K, V> c1, Map<K, V> c2) {
    if (c1 == null || c1.size() == 0) {
        return c2;
    }/*from w  ww.j  a  va 2 s. c om*/
    if (c2 == null || c2.size() == 0) {
        return c1;
    }
    Map<K, V> all = new HashMap<K, V>(c1.size() + c2.size());
    all.putAll(c1);
    all.putAll(c2);
    return all;
}

From source file:com.google.mr4c.hadoop.HadoopUtils.java

/**
  * @param varMap apply environment variable values from this map
  * @param vars apply existing values of these environment variables
*//*from  www .j av a  2s .  com*/
public static void applyEnvironmentVariables(JobConf conf, Map<String, String> varMap, List<String> vars) {
    Map<String, String> allMap = new HashMap<String, String>(System.getenv());
    allMap.keySet().retainAll(vars); // only the env we wanted
    allMap.putAll(varMap);
    List<String> assigns = new ArrayList<String>();
    for (String var : allMap.keySet()) {
        String val = allMap.get(var);
        if (!StringUtils.isEmpty(val)) {
            assigns.add(var + "=" + val);
        }
    }
    String value = StringUtils.join(assigns, ", ");
    conf.set(JobConf.MAPRED_MAP_TASK_ENV, value);
    conf.set(JobConf.MAPRED_REDUCE_TASK_ENV, value);
}

From source file:Main.java

/** Creates a single Map with fields from the passed in Map and all nested Maps (for Map and Collection of Map entry values) */
@SuppressWarnings("unchecked")
public static Map flattenNestedMap(Map theMap) {
    if (theMap == null)
        return null;
    Map outMap = new LinkedHashMap();
    for (Object entryObj : theMap.entrySet()) {
        Map.Entry entry = (Map.Entry) entryObj;
        Object value = entry.getValue();
        if (value instanceof Map) {
            outMap.putAll(flattenNestedMap((Map) value));
        } else if (value instanceof Collection) {
            for (Object colValue : (Collection) value) {
                if (colValue instanceof Map)
                    outMap.putAll(flattenNestedMap((Map) colValue));
            }//from w ww  .j  a  v  a 2s .  co  m
        } else {
            outMap.put(entry.getKey(), entry.getValue());
        }
    }
    return outMap;
}

From source file:com.github.fengtan.sophie.beans.SolrUtils.java

/**
 * Get a list of declared fields from the remote Solr server.
 * //  www .  ja v  a  2s  .c om
 * Dynamic fields are named according to how they are declared so may be
 * globs (e.g. dynamic_*).
 * 
 * @return Map of fields keyed by field name.
 * @throws SophieException
 *             If the remote fields cannot be fetched.
 */
@SuppressWarnings("unchecked")
public static Map<String, FieldInfo> getRemoteSchemaFields() throws SophieException {
    // Send request.
    LukeRequest request = new LukeRequest();
    request.setShowSchema(true);
    LukeResponse response;
    try {
        response = request.process(Sophie.client);
    } catch (SolrServerException | IOException | SolrException e) {
        throw new SophieException("Unable to fetch list of Solr fields", e);
    }
    // Extract regular fields.
    Map<String, FieldInfo> fields = new HashMap<String, FieldInfo>();
    fields.putAll(response.getFieldInfo());
    // Extract dynamic fields.
    NamedList<Object> schema = (NamedList<Object>) response.getResponse().get("schema");
    if (schema != null) {
        NamedList<Object> dynamicFields = (NamedList<Object>) schema.get("dynamicFields");
        if (dynamicFields != null) {
            for (Map.Entry<String, Object> dynamicField : dynamicFields) {
                FieldInfo fieldInfo = new FieldInfo(dynamicField.getKey());
                fieldInfo.read((NamedList<Object>) dynamicField.getValue());
                fields.put(dynamicField.getKey(), fieldInfo);
            }
        }
    }
    return fields;
}

From source file:jena.RuleMap.java

/**
 * Load a set of rule definitions including processing of
 * comment lines and any initial prefix definition lines.
 * Also notes the prefix definitions for adding to a later inf model.
 *///from   w  ww .j a va2s .c o  m
public static List<Rule> loadRules(BufferedReader src, Map<String, String> prefixes) {
    Rule.Parser parser = Rule.rulesParserFromReader(src);
    List<Rule> rules = Rule.parseRules(parser);
    prefixes.putAll(parser.getPrefixMap());
    return rules;
}

From source file:net.sf.zekr.engine.search.SearchUtils.java

/**
 * Simplifies sura name text to be used in a suggestion list.
 * /* www.j a v a  2 s .co m*/
 * @param text original text
 * @return simplified text
 */
public static String simplifySuranameText(String text) {
    ApplicationConfig conf = ApplicationConfig.getInstance();
    SearchInfo searchInfo = conf.getSearchInfo();
    Locale locale = QuranPropertiesUtils.getSuraNameModeLocale();
    String langCode = locale.getLanguage();
    text = text.toLowerCase(locale);
    Map<Pattern, String> rep = new LinkedHashMap<Pattern, String>(searchInfo.getDefaultReplacePattern());
    if (searchInfo.containsLanguageReplacePattern(langCode)) {
        rep.putAll(searchInfo.getReplacePattern(langCode));
    }
    Pattern punct = searchInfo.getPunctuation(langCode);
    if (punct != null) {
        rep.put(punct, "");
    }
    text = RegexUtils.replaceAll(rep, text);
    return text;
}