Example usage for java.util LinkedHashMap putAll

List of usage examples for java.util LinkedHashMap putAll

Introduction

In this page you can find the example usage for java.util LinkedHashMap putAll.

Prototype

void putAll(Map<? extends K, ? extends V> m);

Source Link

Document

Copies all of the mappings from the specified map to this map (optional operation).

Usage

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

/**
 * For the given server with serverKey ID, takes his {@link #getBaseLogListFrom(String)}
 * reply as toProcessLogList and fill the serversLogPresenceList for each base log
 * adding the serverKey to the list of presence for that base log.
 * //from   w w w  .ja  v a  2s.c om
 * If finalLogList is not null, also adds the missing entries to it.
 * 
 * @param serverKey
 * @param toProcessLogList
 * @param finalLogList
 * @param serversLogPresenceList
 */
private void fillServerPresenceList(String serverKey, LinkedHashMap<FTPFile, String> toProcessLogList,
        LinkedHashMap<FTPFile, String> finalLogList, LinkedHashMap<String, String> serversLogPresenceList) {

    if (toProcessLogList != null && !toProcessLogList.isEmpty()) {
        if (finalLogList == null || finalLogList.isEmpty()) {
            for (String partialUri : toProcessLogList.values()) {
                serversLogPresenceList.put(partialUri, serverKey);
            }
            if (finalLogList != null)
                finalLogList.putAll(toProcessLogList);
        } else {
            for (FTPFile ftpFile : toProcessLogList.keySet()) {
                String val = toProcessLogList.get(ftpFile);
                if (finalLogList.containsValue(val)) {
                    serversLogPresenceList.put(val, serversLogPresenceList.get(val) + " " + serverKey);
                    continue;
                } else {
                    finalLogList.put(ftpFile, val);
                    serversLogPresenceList.put(val, serverKey);
                }
            }
        }
    }
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

/**
 * Test method for 'java.util.LinkedHashMap.size()'.
 *//*from  w w w.j a v  a  2s. c  o  m*/
public void testSize() {
    LinkedHashMap<String, String> hashMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(hashMap);

    // Test size behavior on put
    assertEquals(hashMap.size(), SIZE_ZERO);
    hashMap.put(KEY_1, VALUE_1);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.put(KEY_2, VALUE_2);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.put(KEY_3, VALUE_3);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on remove
    hashMap.remove(KEY_1);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.remove(KEY_2);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.remove(KEY_3);
    assertEquals(hashMap.size(), SIZE_ZERO);

    // Test size behavior on putAll
    hashMap.put(KEY_1, VALUE_1);
    hashMap.put(KEY_2, VALUE_2);
    hashMap.put(KEY_3, VALUE_3);
    LinkedHashMap<String, String> srcMap = cloneLinkedHashMap(hashMap);
    hashMap.putAll(srcMap);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on clear
    hashMap.clear();
    assertEquals(hashMap.size(), SIZE_ZERO);
}

From source file:org.cloudifysource.dsl.internal.DSLReader.java

private Object readDslObject() throws DSLException {
    try {//from   w w  w. j  a  v  a  2s. co  m
        init();
    } catch (final IOException e) {
        throw new DSLException("Failed to initialize DSL Reader: " + e.getMessage(), e);
    }

    LinkedHashMap<Object, Object> properties = null;
    try {
        properties = createDSLProperties();
        createDSLOverrides(overridesFile, overridesScript, overrideProperties);
        overrideProperties(properties);
        addApplicationProperties(properties);
    } catch (final Exception e) {
        // catching exception here, as groovy config slurper may throw just
        // about anything
        String msg = null;
        if (propertiesFile != null) {
            msg = "Failed to load properties file " + this.propertiesFile.getName() + ": " + e.getMessage();
        } else {
            msg = "Failed to load properties file: " + e.getMessage();
        }
        throw new IllegalArgumentException(msg, e);
    }

    if (this.variables != null) {
        properties.putAll(this.variables);
    }
    ClusterInfo clusterInfoToUseInGsc = this.clusterInfo;
    if (clusterInfoToUseInGsc == null) {
        clusterInfoToUseInGsc = new ClusterInfo(null, 1, 0, 1, 0);
    }

    // create an uninitialized service context
    if (this.createServiceContext) {
        String canonicalPath = null;
        try {
            canonicalPath = workDir.getCanonicalPath();
        } catch (IOException e) {
            throw new DSLException("Failed to get canonical path of work directory: " + workDir
                    + ". Error was: " + e.getMessage(), e);
        }
        if (this.context == null) {
            if (isRunningInGSC) {
                this.context = new ServiceContextImpl(clusterInfoToUseInGsc, canonicalPath);
            } else {
                this.context = new ServiceContextImpl(new ClusterInfo(null, 1, 0, 1, 0), canonicalPath);
            }
        }

    }

    // create the groovy shell, loaded with our settings
    final GroovyShell gs = createGroovyShell(properties);
    final Object result = evaluateGroovyScript(gs);

    if (result == null) {
        throw new DSLException("The DSL evaluated to a null - check your syntax and try again");
    }

    if (this.createServiceContext) {
        if (!(result instanceof Service)) {
            throw new IllegalArgumentException(
                    "The DSL reader cannot create a service context to a DSL that does not evaluate to a Service. "
                            + "Set the 'createServiceContext' option to false if you do not need a service conext");
        }

        if (isRunningInGSC) {
            if (clusterInfoToUseInGsc.getName() == null) {
                clusterInfoToUseInGsc.setName(ServiceUtils.getAbsolutePUName(
                        CloudifyConstants.DEFAULT_APPLICATION_NAME, ((Service) result).getName()));
            }

            this.context.init((Service) result, admin, clusterInfoToUseInGsc);
        } else {

            this.context.initInIntegratedContainer((Service) result);
        }
    }

    this.dslClassLoader = gs.getClassLoader();
    return result;

}

From source file:org.openmicroscopy.shoola.agents.util.EditorUtil.java

/**
 * Transforms the light and its settings.
 *
 * @param data The data to transform.//w ww .  java2  s.  co m
 * @return See above.
 */
public static Map<String, Object> transformLightSourceAndSetting(ChannelAcquisitionData data) {
    LinkedHashMap<String, Object> details = new LinkedHashMap<String, Object>();
    Map<String, Object> m;

    if (data == null)
        m = transformLightSource(null);
    else
        m = transformLightSource(data.getLightSource());
    List<String> notSet = (List) m.get(NOT_SET);
    m.remove(NOT_SET);
    details.putAll(m);
    details.put(ATTENUATION, new Double(0));
    if (data == null) {
        details.put(WAVELENGTH, Integer.valueOf(0));
        notSet.add(ATTENUATION);
        notSet.add(WAVELENGTH);
        details.put(NOT_SET, notSet);
        return details;
    }
    Double f = data.getLigthSettingsAttenuation();
    double v = 0;
    if (f == null)
        notSet.add(ATTENUATION);
    else
        v = f;
    details.put(ATTENUATION, v * PERCENT_FRACTION);
    Integer i = data.getLigthSettingsWavelength();
    if (details.containsKey(WAVELENGTH)) {

        if (i != null) { //override the value.
            details.put(WAVELENGTH, i);
        }
    } else {
        int vi = 0;
        if (i == null)
            notSet.add(WAVELENGTH);
        else
            vi = i;
        details.put(WAVELENGTH, vi);
    }
    details.put(NOT_SET, notSet);
    return details;
}

From source file:org.openmicroscopy.shoola.agents.util.EditorUtil.java

/**
 * Transforms the passed objective./*from w  ww  .  jav  a2s  .  c  o m*/
 *
 * @param data The value to convert.
 * @return See above.
 */
public static Map<String, Object> transformObjectiveAndSettings(ImageAcquisitionData data) {
    LinkedHashMap<String, Object> details = new LinkedHashMap<String, Object>(9);
    Map<String, Object> m;

    if (data == null)
        m = transformObjective(null);
    else
        m = transformObjective(data.getObjective());
    List<String> notSet = (List<String>) m.get(NOT_SET);
    m.remove(NOT_SET);
    details.putAll(m);
    details.put(CORRECTION_COLLAR, Float.valueOf(0));
    details.put(MEDIUM, "");
    details.put(REFRACTIVE_INDEX, Float.valueOf(0));
    details.put(IRIS, null);
    if (data == null) {
        notSet.add(CORRECTION_COLLAR);
        notSet.add(MEDIUM);
        notSet.add(REFRACTIVE_INDEX);
        details.put(NOT_SET, notSet);
        return details;
    }

    double f = data.getCorrectionCollar();
    if (f < 0) {
        f = 0;
        notSet.add(CORRECTION_COLLAR);
    }
    details.put(CORRECTION_COLLAR, f);
    String s = data.getMedium();
    if (StringUtils.isBlank(s))
        notSet.add(MEDIUM);
    details.put(MEDIUM, s);
    f = data.getRefractiveIndex();
    if (f < 0) {
        f = 0;
        notSet.add(REFRACTIVE_INDEX);
    }
    details.put(REFRACTIVE_INDEX, f);
    details.put(NOT_SET, notSet);
    return details;
}

From source file:org.openmicroscopy.shoola.agents.util.EditorUtil.java

/**
 * Transforms the detector and the detector settings.
 *
 * @param data The value to convert.//ww  w .java2s. c  o m
 * @return See above.
 */
public static Map<String, Object> transformDetectorAndSettings(ChannelAcquisitionData data) {
    LinkedHashMap<String, Object> details = new LinkedHashMap<String, Object>(11);
    Map<String, Object> m;

    if (data == null)
        m = transformDetector(null);
    else
        m = transformDetector(data.getDetector());
    List<String> notSet = (List) m.get(NOT_SET);
    m.remove(NOT_SET);
    details.putAll(m);
    details.put(READ_OUT_RATE, new Double(0));
    details.put(BINNING, "");
    if (data == null) {
        notSet.add(READ_OUT_RATE);
        notSet.add(BINNING);
        details.put(NOT_SET, notSet);
        return details;
    }

    Double f = data.getDetectorSettingsGain();

    if (f != null) {
        details.put(GAIN, f);
        notSet.remove(GAIN);
    }

    f = data.getDetectorSettingsVoltage();
    if (f != null) {
        notSet.remove(VOLTAGE);
        details.put(VOLTAGE, UIUtilities.roundTwoDecimals(f));
    }

    f = data.getDetectorSettingsOffset();
    if (f != null) {
        notSet.remove(OFFSET);
        details.put(OFFSET, UIUtilities.roundTwoDecimals(f));
    }

    f = data.getDetectorSettingsReadOutRate();
    double v = 0;
    if (f == null) {
        v = 0;
        notSet.add(READ_OUT_RATE);
    } else
        v = UIUtilities.roundTwoDecimals(f);
    details.put(READ_OUT_RATE, v);
    String s = data.getDetectorSettingsBinning();
    if (StringUtils.isBlank(s)) {
        notSet.add(BINNING);
    }
    details.put(BINNING, s);
    details.put(NOT_SET, notSet);
    return details;
}

From source file:org.apache.asterix.optimizer.rules.subplan.InlineSubplanInputForNestedTupleSourceRule.java

/***
 * Deals with operators that are not SubplanOperator.
 *
 * @param op/*from  ww w. j  ava 2  s  .co m*/
 *            the operator to consider
 * @param context
 * @return
 * @throws AlgebricksException
 */
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> traverseNonSubplanOperator(
        ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
    Set<LogicalVariable> liveVars = new HashSet<>();
    VariableUtilities.getLiveVariables(op, liveVars);
    LinkedHashMap<LogicalVariable, LogicalVariable> replacedVarMap = new LinkedHashMap<>();
    LinkedHashMap<LogicalVariable, LogicalVariable> replacedVarMapForAncestor = new LinkedHashMap<>();
    boolean changed = false;
    for (Mutable<ILogicalOperator> childrenRef : op.getInputs()) {
        Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> resultFromChild = rewriteSubplanOperator(
                childrenRef, context);
        changed = changed || resultFromChild.first;
        for (Map.Entry<LogicalVariable, LogicalVariable> entry : resultFromChild.second.entrySet()) {
            LogicalVariable oldVar = entry.getKey();
            LogicalVariable newVar = entry.getValue();
            if (liveVars.contains(oldVar)) {
                // Maps live variables for its ancestors.
                replacedVarMapForAncestor.put(oldVar, newVar);
                // Recursively maps live variables for its ancestors.
                oldVar = newVar;
                while ((newVar = resultFromChild.second.get(newVar)) != null) {
                    replacedVarMapForAncestor.put(oldVar, newVar);
                    oldVar = newVar;
                }
            }
        }
        replacedVarMap.putAll(resultFromChild.second);
    }
    VariableUtilities.substituteVariables(op, replacedVarMap, context);
    context.computeAndSetTypeEnvironmentForOperator(op);
    return new Pair<>(changed, replacedVarMapForAncestor);
}

From source file:org.jahia.services.content.nodetypes.ExtendedNodeType.java

public Map<String, ExtendedNodeDefinition> getChildNodeDefinitionsAsMap() {
    if (allNodes == null) {
        LinkedHashMap<String, ExtendedNodeDefinition> allNodesMap = new LinkedHashMap<String, ExtendedNodeDefinition>();
        ExtendedNodeType[] supertypes = getSupertypes();
        for (int i = supertypes.length - 1; i >= 0; i--) {
            ExtendedNodeType nodeType = supertypes[i];
            Map<String, ExtendedNodeDefinition> c = new HashMap<String, ExtendedNodeDefinition>(
                    nodeType.getDeclaredChildNodeDefinitionsAsMap());
            Map<String, ExtendedNodeDefinition> over = new HashMap<String, ExtendedNodeDefinition>(nodes);
            over.keySet().retainAll(c.keySet());
            for (ExtendedNodeDefinition s : over.values()) {
                s.setOverride(true);/*ww w .j  a  v  a2s  .  com*/
            }
            c.keySet().removeAll(over.keySet());
            allNodesMap.putAll(c);
        }
        allNodesMap.putAll(nodes);
        this.allNodes = Collections.unmodifiableMap(allNodesMap);
    }

    return allNodes;
}

From source file:com.datatorrent.stram.engine.StreamingContainer.java

private void setupNode(OperatorDeployInfo ndi) {
    failedNodes.remove(ndi.id);/* w w  w .java 2  s.  co  m*/
    final Node<?> node = nodes.get(ndi.id);

    node.setup(node.context);

    /* setup context for all the input ports */
    LinkedHashMap<String, PortContextPair<InputPort<?>>> inputPorts = node
            .getPortMappingDescriptor().inputPorts;
    LinkedHashMap<String, PortContextPair<InputPort<?>>> newInputPorts = new LinkedHashMap<String, PortContextPair<InputPort<?>>>(
            inputPorts.size());
    for (OperatorDeployInfo.InputDeployInfo idi : ndi.inputs) {
        InputPort<?> port = inputPorts.get(idi.portName).component;
        PortContext context = new PortContext(idi.contextAttributes, node.context);
        newInputPorts.put(idi.portName, new PortContextPair<InputPort<?>>(port, context));
        port.setup(context);
    }
    inputPorts.putAll(newInputPorts);

    /* setup context for all the output ports */
    LinkedHashMap<String, PortContextPair<OutputPort<?>>> outputPorts = node
            .getPortMappingDescriptor().outputPorts;
    LinkedHashMap<String, PortContextPair<OutputPort<?>>> newOutputPorts = new LinkedHashMap<String, PortContextPair<OutputPort<?>>>(
            outputPorts.size());
    for (OperatorDeployInfo.OutputDeployInfo odi : ndi.outputs) {
        OutputPort<?> port = outputPorts.get(odi.portName).component;
        PortContext context = new PortContext(odi.contextAttributes, node.context);
        newOutputPorts.put(odi.portName, new PortContextPair<OutputPort<?>>(port, context));
        port.setup(context);
    }
    outputPorts.putAll(newOutputPorts);

    logger.debug("activating {} in container {}", node, containerId);
    /* This introduces need for synchronization on processNodeRequest which was solved by adding deleted field in StramToNodeRequest  */
    processNodeRequests(false);
    node.activate();
    eventBus.publish(new NodeActivationEvent(node));
}

From source file:org.apache.asterix.translator.LangExpressionToPlanTranslator.java

/**
 * Eliminate shared operator references in a query plan rooted at <code>currentOpRef.getValue()</code>.
 * Deep copy a new query plan subtree whenever there is a shared operator reference.
 *
 * @param currentOpRef,/*from   w  w  w.  ja  v  a  2s. co  m*/
 *            the operator reference to consider
 * @param opRefSet,
 *            the set storing seen operator references so far.
 * @return a mapping that maps old variables to new variables, for the ancestors of
 *         <code>currentOpRef</code> to replace variables properly.
 * @throws AsterixException
 */
private LinkedHashMap<LogicalVariable, LogicalVariable> eliminateSharedOperatorReference(
        Mutable<ILogicalOperator> currentOpRef, Set<Mutable<ILogicalOperator>> opRefSet)
        throws AsterixException {
    try {
        opRefSet.add(currentOpRef);
        AbstractLogicalOperator currentOperator = (AbstractLogicalOperator) currentOpRef.getValue();

        // Recursively eliminates shared references in nested plans.
        if (currentOperator.hasNestedPlans()) {
            // Since a nested plan tree itself can never be shared with another nested plan tree in
            // another operator, the operation called in the if block does not need to replace
            // any variables further for <code>currentOpRef.getValue()</code> nor its ancestor.
            AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) currentOperator;
            for (ILogicalPlan plan : opWithNestedPlan.getNestedPlans()) {
                for (Mutable<ILogicalOperator> rootRef : plan.getRoots()) {
                    Set<Mutable<ILogicalOperator>> nestedOpRefSet = new HashSet<>();
                    eliminateSharedOperatorReference(rootRef, nestedOpRefSet);
                }
            }
        }

        int childIndex = 0;
        LinkedHashMap<LogicalVariable, LogicalVariable> varMap = new LinkedHashMap<>();
        for (Mutable<ILogicalOperator> childRef : currentOperator.getInputs()) {
            if (opRefSet.contains(childRef)) {
                // There is a shared operator reference in the query plan.
                // Deep copies the child plan.
                LogicalOperatorDeepCopyWithNewVariablesVisitor visitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
                        context, null);
                ILogicalOperator newChild = childRef.getValue().accept(visitor, null);
                LinkedHashMap<LogicalVariable, LogicalVariable> cloneVarMap = visitor
                        .getInputToOutputVariableMapping();

                // Substitute variables according to the deep copy which generates new variables.
                VariableUtilities.substituteVariables(currentOperator, cloneVarMap, null);
                varMap.putAll(cloneVarMap);

                // Sets the new child.
                childRef = new MutableObject<>(newChild);
                currentOperator.getInputs().set(childIndex, childRef);
            }

            // Recursively eliminate shared operator reference for the operator subtree,
            // even if it is a deep copy of some other one.
            LinkedHashMap<LogicalVariable, LogicalVariable> childVarMap = eliminateSharedOperatorReference(
                    childRef, opRefSet);
            // Substitute variables according to the new subtree.
            VariableUtilities.substituteVariables(currentOperator, childVarMap, null);

            // Updates mapping like <$a, $b> in varMap to <$a, $c>, where there is a mapping <$b, $c>
            // in childVarMap.
            for (Map.Entry<LogicalVariable, LogicalVariable> entry : varMap.entrySet()) {
                LogicalVariable newVar = childVarMap.get(entry.getValue());
                if (newVar != null) {
                    entry.setValue(newVar);
                }
            }
            varMap.putAll(childVarMap);
            ++childIndex;
        }

        // Only retain live variables for parent operators to substitute variables.
        Set<LogicalVariable> liveVars = new HashSet<>();
        VariableUtilities.getLiveVariables(currentOperator, liveVars);
        varMap.values().retainAll(liveVars);
        return varMap;
    } catch (AlgebricksException e) {
        throw new AsterixException(e);
    }
}