Example usage for java.util LinkedHashMap entrySet

List of usage examples for java.util LinkedHashMap entrySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap entrySet.

Prototype

public Set<Map.Entry<K, V>> entrySet() 

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:org.cloudifysource.dsl.internal.packaging.FileAppender.java

/**
 * Append all the files and copy the result file to dest file.
 * @param dest the destination file will contain all the appended files.
 * @param filesToAppend A map of files and comments to append.
 * @throws IOException ./*  w  ww  . j a v a 2 s .  c om*/
 */
public void appendAll(final File dest, final LinkedHashMap<File, String> filesToAppend) throws IOException {
    for (Entry<File, String> fileEntry : filesToAppend.entrySet()) {
        final File file = fileEntry.getKey();
        if (file != null && file.exists()) {
            append(fileEntry.getValue(), file);
        }
    }
    flush();
    FileUtils.copyFile(finalFile, dest);

    finalFile.delete();
}

From source file:com.ikanow.infinit.e.data_model.store.config.source.SourcePojo.java

private static LinkedHashMap<String, String> decodeKeysForDatabaseStorage(LinkedHashMap<String, String> in) {
    LinkedHashMap<String, String> transformed = new LinkedHashMap<String, String>();
    for (Map.Entry<String, String> entry : in.entrySet()) {
        transformed.put(entry.getKey().replace("%2e", "."), entry.getValue());
    }/*from  www  .  j  a  va  2 s.co  m*/
    return transformed;
}

From source file:com.ikanow.infinit.e.data_model.store.config.source.SourcePojo.java

private static LinkedHashMap<String, String> encodeKeysForDatabaseStorage(LinkedHashMap<String, String> in) {
    LinkedHashMap<String, String> transformed = new LinkedHashMap<String, String>();
    for (Map.Entry<String, String> entry : in.entrySet()) {
        transformed.put(entry.getKey().replace(".", "%2e"), entry.getValue());
    }/*from  ww w  . jav  a 2 s . c  o  m*/
    return transformed;
}

From source file:org.jboss.pressgang.ccms.contentspec.utils.CSTransformer.java

/**
 * Transform a Level CSNode entity object into a Level Object that can be added to a Content Specification.
 *
 * @param node                  The CSNode entity object to be transformed.
 * @param nodes                 A mapping of node entity ids to their transformed counterparts.
 * @param targetTopics          A mapping of target ids to SpecTopics.
 * @param relationshipFromNodes A list of CSNode entities that have relationships.
 * @param processes//  w w w  . ja  va  2s.c  o m
 * @return The transformed level entity.
 */
protected static Level transformLevel(final CSNodeWrapper node, final Map<Integer, Node> nodes,
        final Map<String, SpecTopic> targetTopics, final List<CSNodeWrapper> relationshipFromNodes,
        List<Process> processes) {
    final Level level;
    if (node.getNodeType() == CommonConstants.CS_NODE_APPENDIX) {
        level = new Appendix(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_CHAPTER) {
        level = new Chapter(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_PART) {
        level = new Part(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_PROCESS) {
        level = new Process(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_SECTION) {
        level = new Section(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_PREFACE) {
        level = new Preface(node.getTitle());
    } else if (node.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT) {
        level = new InitialContent();
    } else {
        throw new IllegalArgumentException("The passed node is not a Level");
    }

    level.setConditionStatement(node.getCondition());
    level.setTargetId(node.getTargetId());
    level.setUniqueId(node.getId() == null ? null : node.getId().toString());

    // Set the fixed url properties
    applyFixedURLs(node, level);

    // Collect any relationships for processing after everything is transformed
    if (node.getRelatedToNodes() != null && node.getRelatedToNodes().getItems() != null
            && !node.getRelatedToNodes().getItems().isEmpty()) {
        relationshipFromNodes.add(node);
    }

    // Transform the info topic node if one exists for the level
    if (node.getInfoTopicNode() != null) {
        final InfoTopic infoTopic = transformInfoTopic(node, node.getInfoTopicNode());
        level.setInfoTopic(infoTopic);
    }

    // Add all the levels/topics
    if (node.getChildren() != null && node.getChildren().getItems() != null) {
        final List<CSNodeWrapper> childNodes = node.getChildren().getItems();
        final HashMap<CSNodeWrapper, Node> levelNodes = new HashMap<CSNodeWrapper, Node>();
        final HashMap<CSNodeWrapper, SpecTopic> initialContentNodes = new HashMap<CSNodeWrapper, SpecTopic>();
        for (final CSNodeWrapper childNode : childNodes) {
            if (childNode.getNodeType() == CommonConstants.CS_NODE_TOPIC) {
                final SpecTopic topic = transformSpecTopic(childNode, nodes, targetTopics,
                        relationshipFromNodes);
                levelNodes.put(childNode, topic);
            } else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMENT) {
                final Comment comment = transformComment(childNode);
                levelNodes.put(childNode, comment);
            } else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMON_CONTENT) {
                final CommonContent commonContent = transformCommonContent(childNode);
                levelNodes.put(childNode, commonContent);
            } else if (childNode.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT_TOPIC) {
                final SpecTopic initialContentTopic = transformSpecTopicWithoutTypeCheck(childNode, nodes,
                        targetTopics, relationshipFromNodes);
                if (level instanceof InitialContent) {
                    levelNodes.put(childNode, initialContentTopic);
                } else {
                    initialContentNodes.put(childNode, initialContentTopic);
                }
            } else {
                final Level childLevel = transformLevel(childNode, nodes, targetTopics, relationshipFromNodes,
                        processes);
                levelNodes.put(childNode, childLevel);
            }
        }

        // Sort the level nodes so that they are in the right order based on next/prev values.
        final LinkedHashMap<CSNodeWrapper, Node> sortedMap = CSNodeSorter.sortMap(levelNodes);

        // PressGang 1.4+ stores the initial content inside it's own container, instead of on the level
        if (!(level instanceof InitialContent) && !initialContentNodes.isEmpty()) {
            final LinkedHashMap<CSNodeWrapper, SpecTopic> sortedInitialContentMap = CSNodeSorter
                    .sortMap(initialContentNodes);

            final InitialContent initialContent = new InitialContent();
            level.appendChild(initialContent);

            // Add the initial content topics to the level now that they are in the right order.
            final Iterator<Map.Entry<CSNodeWrapper, SpecTopic>> frontMatterIter = sortedInitialContentMap
                    .entrySet().iterator();
            while (frontMatterIter.hasNext()) {
                final Map.Entry<CSNodeWrapper, SpecTopic> entry = frontMatterIter.next();
                initialContent.appendSpecTopic(entry.getValue());
            }
        }

        // Add the child nodes to the level now that they are in the right order.
        final Iterator<Map.Entry<CSNodeWrapper, Node>> iter = sortedMap.entrySet().iterator();
        while (iter.hasNext()) {
            final Map.Entry<CSNodeWrapper, Node> entry = iter.next();

            level.appendChild(entry.getValue());
            // Add a new line to separate chapters/parts
            if (isNodeASeparatorLevel(entry.getValue()) && iter.hasNext()) {
                level.appendChild(new TextNode("\n"));
            }
        }
    }

    // Add the node to the list of processed nodes so that the relationships can be added once everything is processed
    nodes.put(node.getId(), level);

    // We need to keep track of processes to process their relationships
    if (level instanceof Process) {
        processes.add((Process) level);
    }

    return level;
}

From source file:com.opengamma.component.factory.engine.FunctionBlacklistComponentFactory.java

@Override
public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) {
    String classifier = "default";
    final Iterator<Map.Entry<String, String>> itr = configuration.entrySet().iterator();
    while (itr.hasNext()) {
        final Map.Entry<String, String> conf = itr.next();
        if ("classifier".equals(conf.getKey())) {
            classifier = conf.getValue();
        } else {/* w w w .  j av a 2s  . c  o  m*/
            try {
                if (conf.getValue().startsWith("::")) {
                    final Class<?> property = PropertyUtils.getPropertyType(_bean, conf.getKey());
                    final ComponentInfo info = repo.findInfo(property, conf.getValue().substring(2));
                    if (info != null) {
                        BeanUtils.setProperty(_bean, conf.getKey(), repo.getInstance(info));
                    } else {
                        BeanUtils.setProperty(_bean, conf.getKey(), conf.getValue());
                    }
                } else {
                    BeanUtils.setProperty(_bean, conf.getKey(), conf.getValue());
                }
            } catch (Exception e) {
                throw new OpenGammaRuntimeException("invalid property '" + conf.getKey() + "' on " + _bean, e);
            }
        }
        itr.remove();
    }
    final FunctionBlacklist blacklist = _bean.getObjectCreating();

    ComponentInfo infoRO = new ComponentInfo(FunctionBlacklist.class, classifier);
    infoRO.addAttribute(ComponentInfoAttributes.LEVEL, 1);
    infoRO.addAttribute(ComponentInfoAttributes.REMOTE_CLIENT_JAVA, RemoteFunctionBlacklist.class);
    repo.registerComponent(infoRO, blacklist);

    if (blacklist instanceof ManageableFunctionBlacklist) {
        ComponentInfo infoMng = new ComponentInfo(ManageableFunctionBlacklist.class, classifier);
        infoMng.addAttribute(ComponentInfoAttributes.LEVEL, 1);
        infoMng.addAttribute(ComponentInfoAttributes.REMOTE_CLIENT_JAVA,
                RemoteManageableFunctionBlacklist.class);
        repo.registerComponent(infoMng, blacklist);
    }
}

From source file:org.kitodo.sruimport.SRUImport.java

private String createQueryParameterString(LinkedHashMap<String, String> searchFields) {
    List<BasicNameValuePair> nameValuePairList = searchFields.entrySet().stream()
            .map(entry -> new BasicNameValuePair(entry.getKey(), entry.getValue()))
            .collect(Collectors.toList());
    return URLEncodedUtils.format(nameValuePairList, StandardCharsets.UTF_8);
}

From source file:org.apache.lens.cube.metadata.JAXBUtils.java

public static XPartition xpartitionFromPartition(String cubeTableName, Partition p, List<String> timePartCols)
        throws HiveException {
    XPartition xp = new XPartition();
    xp.setFactOrDimensionTableName(cubeTableName);
    xp.setPartitionParameters(new XProperties());
    xp.setSerdeParameters(new XProperties());
    xp.setName(p.getCompleteName());//from   w  w w  .j  av a  2  s .  co m
    xp.setLocation(p.getLocation());
    xp.setInputFormat(p.getInputFormatClass().getCanonicalName());
    xp.setOutputFormat(p.getOutputFormatClass().getCanonicalName());
    xp.getPartitionParameters().getProperty().addAll(xPropertiesFromMap(p.getParameters()));
    String upParam = p.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD);
    xp.setUpdatePeriod(XUpdatePeriod.valueOf(upParam));
    LinkedHashMap<String, String> partSpec = p.getSpec();
    xp.setFullPartitionSpec(new XPartSpec());
    for (Map.Entry<String, String> entry : partSpec.entrySet()) {
        XPartSpecElement e = new XPartSpecElement();
        e.setKey(entry.getKey());
        e.setValue(entry.getValue());
        xp.getFullPartitionSpec().getPartSpecElement().add(e);
    }
    try {
        xp.setTimePartitionSpec(new XTimePartSpec());
        xp.setNonTimePartitionSpec(new XPartSpec());
        for (Map.Entry<String, String> entry : partSpec.entrySet()) {
            if (timePartCols.contains(entry.getKey())) {
                XTimePartSpecElement timePartSpecElement = new XTimePartSpecElement();
                timePartSpecElement.setKey(entry.getKey());
                timePartSpecElement.setValue(getXMLGregorianCalendar(
                        UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).parse(entry.getValue())));
                xp.getTimePartitionSpec().getPartSpecElement().add(timePartSpecElement);
            } else {
                XPartSpecElement partSpecElement = new XPartSpecElement();
                partSpecElement.setKey(entry.getKey());
                partSpecElement.setValue(entry.getValue());
                xp.getNonTimePartitionSpec().getPartSpecElement().add(partSpecElement);
            }
        }
    } catch (ParseException exc) {
        log.debug("can't form time part spec from " + partSpec, exc);
        xp.setTimePartitionSpec(null);
        xp.setNonTimePartitionSpec(null);
    }
    xp.setSerdeClassname(p.getTPartition().getSd().getSerdeInfo().getSerializationLib());
    xp.getSerdeParameters().getProperty()
            .addAll(xPropertiesFromMap(p.getTPartition().getSd().getSerdeInfo().getParameters()));
    return xp;
}

From source file:org.kitodo.sruimport.SRUImport.java

private String createSearchFieldString(LinkedHashMap<String, String> searchFields)
        throws UnsupportedEncodingException {
    List<String> searchOperands = searchFields.entrySet().stream()
            .map(entry -> entry.getKey() + equalsOperand + entry.getValue()).collect(Collectors.toList());
    return URLEncoder.encode(String.join(" AND ", searchOperands), StandardCharsets.UTF_8.displayName());
}

From source file:by.stub.yaml.YamlParser.java

@SuppressWarnings("unchecked")
protected void mapParentYamlNodeToPojo(final StubHttpLifecycle parentStub,
        final LinkedHashMap<String, LinkedHashMap> parentNode) throws Exception {
    for (final Map.Entry<String, LinkedHashMap> parent : parentNode.entrySet()) {

        final LinkedHashMap<String, Object> httpSettings = (LinkedHashMap<String, Object>) parent.getValue();

        if (parent.getKey().equals(NODE_REQUEST)) {
            mapHttpSettingsToPojo(parentStub.getRequest(), httpSettings);
            continue;
        }//ww  w  . j  a  v  a 2 s  .  c o m

        mapHttpSettingsToPojo(parentStub.getResponse(), httpSettings);
    }
}

From source file:com.xyz.system.service.impl.DefinitionSourceFactoryBean.java

/**
 * resourceDetailService??LinkedHashMap<String, String>?URL??
 * DefaultFilterInvocationDefinitionSource?LinkedHashMap<RequestKey, ConfigAttributeDefinition>?.
 *///w  ww.j  a  va2 s . co m
protected LinkedHashMap<RequestKey, Collection<ConfigAttribute>> buildRequestMap() throws Exception {
    LinkedHashMap<String, String> srcMap = securityManager.getRequestMap();
    LinkedHashMap<RequestKey, Collection<ConfigAttribute>> distMap = new LinkedHashMap<RequestKey, Collection<ConfigAttribute>>();

    for (Map.Entry<String, String> entry : srcMap.entrySet()) {
        RequestKey key = new RequestKey(entry.getKey(), null);
        if (StringUtils.isNotBlank(entry.getValue())) {
            distMap.put(key, SecurityConfig.createListFromCommaDelimitedString(entry.getValue()));
        }
    }

    return distMap;
}