Example usage for java.util LinkedHashMap remove

List of usage examples for java.util LinkedHashMap remove

Introduction

In this page you can find the example usage for java.util LinkedHashMap remove.

Prototype

V remove(Object key);

Source Link

Document

Removes the mapping for a key from this map if it is present (optional operation).

Usage

From source file:org.jahia.ajax.gwt.helper.PublicationHelper.java

public void keepOnlyTranslation(LinkedHashMap<String, GWTJahiaPublicationInfo> all) throws RepositoryException {
    Set<String> keys = new HashSet<String>(all.keySet());

    for (String key : keys) {
        GWTJahiaPublicationInfo gwtinfo = all.get(key);
        if (gwtinfo.getI18nUuid() == null) {
            all.remove(key);
        } else {//  w ww .j  av a 2s  . c  om
            gwtinfo.remove("uuid");
        }
    }
}

From source file:org.mycard.net.network.RequestQueue.java

/** helper */
private Request removeFirst(LinkedHashMap<HttpHost, LinkedList<Request>> requestQueue) {
    Request ret = null;/*from  w  w w .  j  a  v  a  2  s  . c o  m*/
    Iterator<Map.Entry<HttpHost, LinkedList<Request>>> iter = requestQueue.entrySet().iterator();
    if (iter.hasNext()) {
        Map.Entry<HttpHost, LinkedList<Request>> entry = iter.next();
        LinkedList<Request> reqList = entry.getValue();
        ret = reqList.removeFirst();
        if (reqList.isEmpty()) {
            requestQueue.remove(entry.getKey());
        }
    }
    return ret;
}

From source file:com.concursive.connect.web.modules.members.portlets.InviteMembersPortlet.java

private LinkedHashMap<String, String> updateMemberList(ActionRequest request, String member, String id,
        LinkedHashMap<String, String> memberMap, LinkedHashMap<String, String> memberPresentMap) {
    //return if the id passed is not valid or empty
    if (NO_MATCH_FOUND.equals(id) || !StringUtils.hasText(id)) {
        return memberMap;
    }//from ww w  .j  av a  2 s  .  co  m

    LinkedHashMap<String, String> members = new LinkedHashMap<String, String>(memberMap);
    //remove the entry if the userid(s) was already added
    if (members.containsValue(id)) {
        members.remove(member);
        return members;
    }

    User currentUser = PortalUtils.getUser(request);
    Project currentProject = PortalUtils.getProject(request);

    String arrUserId[] = id.split(",");
    String userIds = "";
    for (String userId : arrUserId) {
        //discard the userid if added before
        if (members.containsValue(userId)) {
            continue;
        }
        //discard the userid if its the current user
        if (Integer.parseInt(userId) == currentUser.getId()) {
            continue;
        }
        //discard the userid if the user is already a member
        if (currentProject.getTeam().getTeamMember(Integer.parseInt(userId)) != null) {
            continue;
        }

        userIds += userId + ",";
    }
    userIds = DimDimUtils.trimComma(userIds);

    //check if there are ids not discarded
    if (StringUtils.hasText(userIds)) {
        //if its not a multi match then check if the user was previous added to any multi match list.
        if (arrUserId.length == 1) {
            checkDuplicates(members, member, userIds);
        }
        members.put(member, userIds);
    } else {
        //remove from the member list if its not a multi match
        if (arrUserId.length == 1) {
            memberPresentMap.put(member, id);
            members.remove(member);
        } else {
            members.put(member, NO_MATCH_FOUND);
        }
    }
    return members;
}

From source file:shuffle.fwk.config.ConfigManager.java

/**
 * Sets the value for the given key to the given newValue.
 * //from  w  ww.ja  va2  s  .  c  o  m
 * @param type
 *           The EntryType for this value
 * @param key
 *           The key for this value
 * @param newValue
 *           The actual value for this value
 *           
 * @return True if anything changed. False otherwise.
 */
public boolean setEntry(EntryType type, String key, ConfigEntry newValue) {
    if (key == null || type == null) {
        return false;
    }
    boolean changed = false;
    synchronized (data) {
        if (data.containsKey(type)) {
            LinkedHashMap<String, ConfigEntry> mappings = data.get(type);
            ConfigEntry oldValue = mappings.get(key);
            changed |= !(newValue == oldValue || newValue != null && newValue.equals(oldValue));
            if (newValue == null) {
                mappings.remove(key);
            } else {
                mappings.put(key, newValue);
            }
        }
    }
    return changed;
}

From source file:com.proofpoint.jmx.MBeanRepresentation.java

public MBeanRepresentation(MBeanServer mbeanServer, ObjectName objectName, ObjectMapper objectMapper)
        throws JMException {
    this.objectName = objectName;

    MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName);

    className = mbeanInfo.getClassName();
    description = mbeanInfo.getDescription();
    descriptor = toMap(mbeanInfo.getDescriptor());

    ///*  w w w.  j av  a2 s .co m*/
    // Attributes
    //
    LinkedHashMap<String, MBeanAttributeInfo> attributeInfos = Maps.newLinkedHashMap();
    for (MBeanAttributeInfo attributeInfo : mbeanInfo.getAttributes()) {
        attributeInfos.put(attributeInfo.getName(), attributeInfo);
    }

    String[] attributeNames = attributeInfos.keySet().toArray(new String[attributeInfos.size()]);
    ImmutableList.Builder<AttributeRepresentation> attributes = ImmutableList.builder();
    for (Attribute attribute : mbeanServer.getAttributes(objectName, attributeNames).asList()) {
        String attributeName = attribute.getName();

        // use remove so we only include one value for each attribute
        MBeanAttributeInfo attributeInfo = attributeInfos.remove(attributeName);
        if (attributeInfo == null) {
            // unknown extra attribute, could have been added after MBeanInfo was fetched
            continue;
        }

        Object attributeValue = attribute.getValue();
        AttributeRepresentation attributeRepresentation = new AttributeRepresentation(attributeInfo,
                attributeValue, objectMapper);
        attributes.add(attributeRepresentation);
    }
    this.attributes = attributes.build();

    //
    // Operations
    //
    ImmutableList.Builder<OperationRepresentation> operations = ImmutableList.builder();
    for (MBeanOperationInfo operationInfo : mbeanInfo.getOperations()) {
        operations.add(new OperationRepresentation(operationInfo));
    }
    this.operations = operations.build();
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testIsEmpty() {
    LinkedHashMap<String, String> srcMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(srcMap);

    LinkedHashMap<String, String> dstMap = new LinkedHashMap<String, String>();
    checkEmptyLinkedHashMapAssumptions(dstMap);

    dstMap.putAll(srcMap);/* w ww.  j  a v  a 2s . com*/
    assertTrue(dstMap.isEmpty());

    dstMap.put(KEY_KEY, VALUE_VAL);
    assertFalse(dstMap.isEmpty());

    dstMap.remove(KEY_KEY);
    assertTrue(dstMap.isEmpty());
    assertEquals(dstMap.size(), 0);
}

From source file:org.openscore.lang.compiler.utils.ExecutableBuilder.java

public Executable transformToExecutable(ParsedSlang parsedSlang, String execName,
        Map<String, Object> executableRawData) {

    Validate.notEmpty(executableRawData,
            "Error compiling " + parsedSlang.getName() + ". Executable data for: " + execName + " is empty");
    Validate.notNull(parsedSlang, "Slang source for " + execName + " is null");

    Map<String, Serializable> preExecutableActionData = new HashMap<>();
    Map<String, Serializable> postExecutableActionData = new HashMap<>();

    transformersHandler.validateKeyWords(execName, executableRawData,
            ListUtils.union(preExecTransformers, postExecTransformers), execAdditionalKeywords);

    preExecutableActionData.putAll(transformersHandler.runTransformers(executableRawData, preExecTransformers));
    postExecutableActionData//from   w  ww .j a  va2 s  .  com
            .putAll(transformersHandler.runTransformers(executableRawData, postExecTransformers));

    @SuppressWarnings("unchecked")
    List<Input> inputs = (List<Input>) preExecutableActionData.remove(SlangTextualKeys.INPUTS_KEY);
    @SuppressWarnings("unchecked")
    List<Output> outputs = (List<Output>) postExecutableActionData.remove(SlangTextualKeys.OUTPUTS_KEY);
    @SuppressWarnings("unchecked")
    List<Result> results = (List<Result>) postExecutableActionData.remove(SlangTextualKeys.RESULTS_KEY);

    String namespace = parsedSlang.getNamespace();
    Map<String, String> imports = parsedSlang.getImports();
    resolveSystemProperties(inputs, imports);
    Map<String, SlangFileType> dependencies;
    switch (parsedSlang.getType()) {
    case FLOW:

        if (!executableRawData.containsKey(SlangTextualKeys.WORKFLOW_KEY)) {
            throw new RuntimeException("Error compiling " + parsedSlang.getName() + ". Flow: " + execName
                    + " has no workflow property");
        }
        LinkedHashMap<String, Map<String, Object>> workFlowRawData;
        try {
            workFlowRawData = (LinkedHashMap) executableRawData.get(SlangTextualKeys.WORKFLOW_KEY);
        } catch (ClassCastException ex) {
            throw new RuntimeException("Flow: '" + execName
                    + "' syntax is illegal.\nBelow 'workflow' property there should be a map of tasks and not a list");
        }
        if (MapUtils.isEmpty(workFlowRawData)) {
            throw new RuntimeException("Error compiling " + parsedSlang.getName() + ". Flow: " + execName
                    + " has no workflow data");
        }

        Workflow onFailureWorkFlow = null;
        LinkedHashMap<String, Map<String, Object>> onFailureData;
        try {
            onFailureData = (LinkedHashMap) workFlowRawData.remove(SlangTextualKeys.ON_FAILURE_KEY);
        } catch (ClassCastException ex) {
            throw new RuntimeException("Flow: '" + execName
                    + "' syntax is illegal.\nBelow 'on_failure' property there should be a map of tasks and not a list");
        }
        if (MapUtils.isNotEmpty(onFailureData)) {
            onFailureWorkFlow = compileWorkFlow(onFailureData, imports, null, true);
        }

        Workflow workflow = compileWorkFlow(workFlowRawData, imports, onFailureWorkFlow, false);
        //todo: add system properties dependencies?
        dependencies = fetchDirectTasksDependencies(workflow);
        return new Flow(preExecutableActionData, postExecutableActionData, workflow, namespace, execName,
                inputs, outputs, results, dependencies);

    case OPERATIONS:
        Map<String, Object> actionRawData;
        try {
            actionRawData = (Map<String, Object>) executableRawData.get(SlangTextualKeys.ACTION_KEY);
        } catch (ClassCastException ex) {
            throw new RuntimeException("Operation: '" + execName
                    + "' syntax is illegal.\nBelow 'action' property there should be a map of values such as: 'python_script:' or 'java_action:'");
        }

        if (MapUtils.isEmpty(actionRawData)) {
            throw new RuntimeException("Error compiling " + parsedSlang.getName() + ". Operation: " + execName
                    + " has no action data");
        }
        Action action = compileAction(actionRawData);
        //todo: add system properties dependencies?
        dependencies = new HashMap<>();
        return new Operation(preExecutableActionData, postExecutableActionData, action, namespace, execName,
                inputs, outputs, results, dependencies);
    default:
        throw new RuntimeException(
                "Error compiling " + parsedSlang.getName() + ". It is not of flow or operations type");
    }
}

From source file:org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor.java

/**
 * Generate the MapRed Local Work for the given map-join operator
 *
 * @param newWork//w w  w  .  j  av  a2 s.  c  o  m
 * @param mapJoinOp
 *          map-join operator for which local work needs to be generated.
 * @param bigTablePos
 * @throws SemanticException
 */
private static void genMapJoinLocalWork(MapredWork newWork, MapJoinOperator mapJoinOp, int bigTablePos)
        throws SemanticException {
    // keep the small table alias to avoid concurrent modification exception
    ArrayList<String> smallTableAliasList = new ArrayList<String>();

    // create a new  MapredLocalWork
    MapredLocalWork newLocalWork = new MapredLocalWork(
            new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
            new LinkedHashMap<String, FetchWork>());

    for (Map.Entry<String, Operator<? extends OperatorDesc>> entry : newWork.getMapWork().getAliasToWork()
            .entrySet()) {
        String alias = entry.getKey();
        Operator<? extends OperatorDesc> op = entry.getValue();

        // if the table scan is for big table; then skip it
        // tracing down the operator tree from the table scan operator
        Operator<? extends OperatorDesc> parentOp = op;
        Operator<? extends OperatorDesc> childOp = op.getChildOperators().get(0);
        while ((childOp != null) && (!childOp.equals(mapJoinOp))) {
            parentOp = childOp;
            assert parentOp.getChildOperators().size() == 1;
            childOp = parentOp.getChildOperators().get(0);
        }
        if (childOp == null) {
            throw new SemanticException("Cannot find join op by tracing down the table scan operator tree");
        }
        // skip the big table pos
        int i = childOp.getParentOperators().indexOf(parentOp);
        if (i == bigTablePos) {
            continue;
        }
        // set alias to work and put into smallTableAliasList
        newLocalWork.getAliasToWork().put(alias, op);
        smallTableAliasList.add(alias);
        // get input path and remove this alias from pathToAlias
        // because this file will be fetched by fetch operator
        LinkedHashMap<String, ArrayList<String>> pathToAliases = newWork.getMapWork().getPathToAliases();

        // keep record all the input path for this alias
        HashSet<String> pathSet = new HashSet<String>();
        HashSet<String> emptyPath = new HashSet<String>();
        for (Map.Entry<String, ArrayList<String>> entry2 : pathToAliases.entrySet()) {
            String path = entry2.getKey();
            ArrayList<String> list = entry2.getValue();
            if (list.contains(alias)) {
                // add to path set
                pathSet.add(path);
                //remove this alias from the alias list
                list.remove(alias);
                if (list.size() == 0) {
                    emptyPath.add(path);
                }
            }
        }
        //remove the path, with which no alias associates
        for (String path : emptyPath) {
            pathToAliases.remove(path);
        }

        // create fetch work
        FetchWork fetchWork = null;
        List<Path> partDir = new ArrayList<Path>();
        List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();

        for (String tablePath : pathSet) {
            PartitionDesc partitionDesc = newWork.getMapWork().getPathToPartitionInfo().get(tablePath);
            // create fetchwork for non partitioned table
            if (partitionDesc.getPartSpec() == null || partitionDesc.getPartSpec().size() == 0) {
                fetchWork = new FetchWork(new Path(tablePath), partitionDesc.getTableDesc());
                break;
            }
            // if table is partitioned,add partDir and partitionDesc
            partDir.add(new Path(tablePath));
            partDesc.add(partitionDesc);
        }
        // create fetchwork for partitioned table
        if (fetchWork == null) {
            TableDesc table = newWork.getMapWork().getAliasToPartnInfo().get(alias).getTableDesc();
            fetchWork = new FetchWork(partDir, partDesc, table);
        }
        // set alias to fetch work
        newLocalWork.getAliasToFetchWork().put(alias, fetchWork);
    }
    // remove small table ailias from aliasToWork;Avoid concurrent modification
    for (String alias : smallTableAliasList) {
        newWork.getMapWork().getAliasToWork().remove(alias);
    }

    // set up local work
    newWork.getMapWork().setMapRedLocalWork(newLocalWork);
    // remove reducer
    newWork.setReduceWork(null);
}

From source file:com.streamsets.pipeline.stage.destination.hive.HiveMetastoreTargetIT.java

@Test
public void testPartitionMismatch() throws Exception {
    HiveMetastoreTarget hiveTarget = new HiveMetastoreTargetBuilder().build();

    TargetRunner runner = new TargetRunner.Builder(HiveMetastoreTarget.class, hiveTarget)
            .setOnRecordError(OnRecordError.TO_ERROR).build();
    runner.runInit();//from w  w  w. ja  v a2s  .com

    LinkedHashMap<String, HiveTypeInfo> columns = new LinkedHashMap<>();
    columns.put("name", HiveType.STRING.getSupport().generateHiveTypeInfoFromResultSet("STRING"));

    LinkedHashMap<String, HiveTypeInfo> partitions = new LinkedHashMap<>();
    partitions.put("dt1", HiveType.STRING.getSupport().generateHiveTypeInfoFromResultSet("STRING"));
    partitions.put("dt2", HiveType.STRING.getSupport().generateHiveTypeInfoFromResultSet("STRING"));

    Field newTableField = HiveMetastoreUtil.newSchemaMetadataFieldBuilder("default", "tbl", columns, partitions,
            true, BaseHiveIT.getDefaultWareHouseDir(), HiveMetastoreUtil.generateAvroSchema(columns, "tbl"));

    Record record = RecordCreator.create();
    record.set(newTableField);
    Assert.assertTrue(HiveMetastoreUtil.isSchemaChangeRecord(record));

    runner.runWrite(ImmutableList.of(record));
    Assert.assertEquals("There should be no error records", 0, runner.getErrorRecords().size());

    //More Partitions (3) than configured
    LinkedHashMap<String, String> partitionVals = new LinkedHashMap<>();
    partitionVals.put("dt1", "2016");
    partitionVals.put("dt2", "2017");
    partitionVals.put("dt3", "2018");

    Field newPartitionField1 = HiveMetastoreUtil.newPartitionMetadataFieldBuilder("default", "tbl",
            partitionVals, "/user/hive/warehouse/tbl/dt1=2016/dt2=2017/dt3=2018");
    record = RecordCreator.create();
    record.set(newPartitionField1);
    runner.runWrite(ImmutableList.of(record));
    Assert.assertEquals("There should be one error record", 1, runner.getErrorRecords().size());
    Record errorRecord = runner.getErrorRecords().get(0);
    Assert.assertEquals(errorRecord.getHeader().getErrorCode(), Errors.HIVE_27.name());

    //Resetting the runner
    runner.getErrorRecords().clear();
    //Remove 3 partition names, less number of partitions than configured.
    partitionVals.remove("dt2");
    partitionVals.remove("dt3");

    Field newPartitionField2 = HiveMetastoreUtil.newPartitionMetadataFieldBuilder("default", "tbl",
            partitionVals, "/user/hive/warehouse/tbl/dt1=2016/dt2=2017/dt3=2018");

    record = RecordCreator.create();
    record.set(newPartitionField2);
    runner.runWrite(ImmutableList.of(record));
    Assert.assertEquals("There should be one error record", 1, runner.getErrorRecords().size());
    errorRecord = runner.getErrorRecords().get(0);
    Assert.assertEquals(errorRecord.getHeader().getErrorCode(), Errors.HIVE_27.name());
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testKeysConflict() {
    LinkedHashMap<Object, String> hashMap = new LinkedHashMap<Object, String>();

    hashMap.put(STRING_ZERO_KEY, STRING_ZERO_VALUE);
    hashMap.put(INTEGER_ZERO_KEY, INTEGER_ZERO_VALUE);
    hashMap.put(ODD_ZERO_KEY, ODD_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), INTEGER_ZERO_VALUE);
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(INTEGER_ZERO_KEY);
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    hashMap.remove(ODD_ZERO_KEY);//  ww w.  jav  a2s.  co  m
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(STRING_ZERO_KEY);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), null);
    assertEquals(hashMap.size(), 0);
}