List of usage examples for java.util LinkedHashMap remove
V remove(Object key);
From source file:com.datatorrent.stram.plan.physical.PhysicalPlan.java
/** * Remove all physical operators for the given logical operator. * All connected streams must have been previously removed. * @param om/*from w w w.j a v a2 s . c o m*/ */ public void removeLogicalOperator(OperatorMeta om) { PMapping opers = this.logicalToPTOperator.get(om); if (opers == null) { throw new AssertionError("Operator not in physical plan: " + om.getName()); } for (PTOperator oper : opers.partitions) { removePartition(oper, opers); } for (StreamMapping ug : opers.outputStreams.values()) { for (PTOperator oper : ug.cascadingUnifiers) { removePTOperator(oper); } if (ug.finalUnifier != null) { removePTOperator(ug.finalUnifier); } } LinkedHashMap<OperatorMeta, PMapping> copyMap = Maps.newLinkedHashMap(this.logicalToPTOperator); copyMap.remove(om); this.logicalToPTOperator = copyMap; }
From source file:org.dcm4che.tool.dcmqrscp.DcmQRSCP.java
public Attributes calculateStorageCommitmentResult(String calledAET, Attributes actionInfo) throws DicomServiceException { Sequence requestSeq = actionInfo.getSequence(Tag.ReferencedSOPSequence); int size = requestSeq.size(); String[] sopIUIDs = new String[size]; Attributes eventInfo = new Attributes(6); eventInfo.setString(Tag.RetrieveAETitle, VR.AE, calledAET); eventInfo.setString(Tag.StorageMediaFileSetID, VR.SH, ddReader.getFileSetID()); eventInfo.setString(Tag.StorageMediaFileSetUID, VR.SH, ddReader.getFileSetUID()); eventInfo.setString(Tag.TransactionUID, VR.UI, actionInfo.getString(Tag.TransactionUID)); Sequence successSeq = eventInfo.newSequence(Tag.ReferencedSOPSequence, size); Sequence failedSeq = eventInfo.newSequence(Tag.FailedSOPSequence, size); LinkedHashMap<String, String> map = new LinkedHashMap<String, String>(size * 4 / 3); for (int i = 0; i < sopIUIDs.length; i++) { Attributes item = requestSeq.get(i); map.put(sopIUIDs[i] = item.getString(Tag.ReferencedSOPInstanceUID), item.getString(Tag.ReferencedSOPClassUID)); }// w ww.java 2 s .c o m DicomDirReader ddr = ddReader; try { Attributes patRec = ddr.findPatientRecord(); while (patRec != null) { Attributes studyRec = ddr.findStudyRecord(patRec); while (studyRec != null) { Attributes seriesRec = ddr.findSeriesRecord(studyRec); while (seriesRec != null) { Attributes instRec = ddr.findLowerInstanceRecord(seriesRec, true, sopIUIDs); while (instRec != null) { String iuid = instRec.getString(Tag.ReferencedSOPInstanceUIDInFile); String cuid = map.remove(iuid); if (cuid.equals(instRec.getString(Tag.ReferencedSOPClassUIDInFile))) successSeq.add(refSOP(iuid, cuid, Status.Success)); else failedSeq.add(refSOP(iuid, cuid, Status.ClassInstanceConflict)); instRec = ddr.findNextInstanceRecord(instRec, true, sopIUIDs); } seriesRec = ddr.findNextSeriesRecord(seriesRec); } studyRec = ddr.findNextStudyRecord(studyRec); } patRec = ddr.findNextPatientRecord(patRec); } } catch (IOException e) { LOG.info("Failed to M-READ " + dicomDir, e); throw new DicomServiceException(Status.ProcessingFailure, e); } for (Map.Entry<String, String> entry : map.entrySet()) { failedSeq.add(refSOP(entry.getKey(), entry.getValue(), Status.NoSuchObjectInstance)); } if (failedSeq.isEmpty()) eventInfo.remove(Tag.FailedSOPSequence); return eventInfo; }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
public void testEntrySet() { LinkedHashMap<String, String> hashMap = new LinkedHashMap<String, String>(); checkEmptyLinkedHashMapAssumptions(hashMap); Set<Entry<String, String>> entrySet = hashMap.entrySet(); assertNotNull(entrySet);/*w w w . ja v a 2 s. com*/ // Check that the entry set looks right hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_1); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); Iterator<Entry<String, String>> itSet = entrySet.iterator(); Map.Entry<String, String> entry = itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_1); // Check that entries in the entrySet are update correctly on overwrites hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_2); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); itSet = entrySet.iterator(); entry = itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_2); // Check that entries are updated on removes hashMap.remove(KEY_TEST_ENTRY_SET); checkEmptyLinkedHashMapAssumptions(hashMap); }
From source file:gate.util.reporting.PRTimeReporter.java
/** * Sorts LinkedHashMap by its values(natural descending order). keeps the * duplicates as it is.//from w ww. ja va 2 s . c om * * @param passedMap * An Object of type LinkedHashMap to be sorted by its values. * * @return An Object containing the sorted LinkedHashMap. */ private LinkedHashMap<String, String> sortHashMapByValues(LinkedHashMap<String, String> passedMap) { List<String> mapKeys = new ArrayList<String>(passedMap.keySet()); List<String> mapValues = new ArrayList<String>(passedMap.values()); Collections.sort(mapValues, new ValueComparator()); Collections.sort(mapKeys); Collections.reverse(mapValues); LinkedHashMap<String, String> sortedMap = new LinkedHashMap<String, String>(); Iterator<String> valueIt = mapValues.iterator(); while (valueIt.hasNext()) { String val = valueIt.next(); Iterator<String> keyIt = mapKeys.iterator(); while (keyIt.hasNext()) { String key = keyIt.next(); String comp1 = passedMap.get(key).toString(); String comp2 = val.toString(); if (comp1.equals(comp2)) { passedMap.remove(key); mapKeys.remove(key); sortedMap.put(key, val); break; } } } return sortedMap; }
From source file:org.apache.tez.mapreduce.input.TestMultiMRInput.java
@Test(timeout = 5000) public void testMultipleSplits() throws Exception { Path workDir = new Path(TEST_ROOT_DIR, "testMultipleSplits"); JobConf jobConf = new JobConf(defaultConf); jobConf.setInputFormat(org.apache.hadoop.mapred.SequenceFileInputFormat.class); FileInputFormat.setInputPaths(jobConf, workDir); MRInputUserPayloadProto.Builder builder = MRInputUserPayloadProto.newBuilder(); builder.setGroupingEnabled(false);/* www . ja v a 2 s.c om*/ builder.setConfigurationBytes(TezUtils.createByteStringFromConf(jobConf)); byte[] payload = builder.build().toByteArray(); InputContext inputContext = createTezInputContext(payload); MultiMRInput input = new MultiMRInput(inputContext, 2); input.initialize(); List<Event> eventList = new ArrayList<Event>(); LinkedHashMap<LongWritable, Text> data = new LinkedHashMap<LongWritable, Text>(); String file1 = "file1"; LinkedHashMap<LongWritable, Text> data1 = createInputData(localFs, workDir, jobConf, file1, 0, 10); String file2 = "file2"; LinkedHashMap<LongWritable, Text> data2 = createInputData(localFs, workDir, jobConf, file2, 10, 20); data.putAll(data1); data.putAll(data2); SequenceFileInputFormat<LongWritable, Text> format = new SequenceFileInputFormat<LongWritable, Text>(); InputSplit[] splits = format.getSplits(jobConf, 2); assertEquals(2, splits.length); MRSplitProto splitProto1 = MRInputHelpers.createSplitProto(splits[0]); InputDataInformationEvent event1 = InputDataInformationEvent.createWithSerializedPayload(0, splitProto1.toByteString().asReadOnlyByteBuffer()); MRSplitProto splitProto2 = MRInputHelpers.createSplitProto(splits[1]); InputDataInformationEvent event2 = InputDataInformationEvent.createWithSerializedPayload(0, splitProto2.toByteString().asReadOnlyByteBuffer()); eventList.clear(); eventList.add(event1); eventList.add(event2); input.handleEvents(eventList); int readerCount = 0; for (KeyValueReader reader : input.getKeyValueReaders()) { readerCount++; while (reader.next()) { if (data.size() == 0) { fail("Found more records than expected"); } Object key = reader.getCurrentKey(); Object val = reader.getCurrentValue(); assertEquals(val, data.remove(key)); } } assertEquals(2, readerCount); }
From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. * // w w w .j av a2s . c om * @param ef * @param bms * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly. */ private static LinkedHashMap<FactorValue, List<BioMaterial>> chunkOnFactor(ExperimentalFactor ef, List<BioMaterial> bms) { if (bms == null) { return null; } LinkedHashMap<FactorValue, List<BioMaterial>> chunks = new LinkedHashMap<FactorValue, List<BioMaterial>>(); /* * Get the factor values in the order we have things right now */ for (BioMaterial bm : bms) { for (FactorValue fv : bm.getFactorValues()) { if (!ef.getFactorValues().contains(fv)) { continue; } if (chunks.keySet().contains(fv)) { continue; } chunks.put(fv, new ArrayList<BioMaterial>()); } } /* * What if bm doesn't have a value for the factorvalue. Need a dummy value. */ FactorValue dummy = FactorValue.Factory.newInstance(ef); dummy.setValue(""); dummy.setId(-1L); chunks.put(dummy, new ArrayList<BioMaterial>()); for (BioMaterial bm : bms) { boolean found = false; for (FactorValue fv : bm.getFactorValues()) { if (ef.getFactorValues().contains(fv)) { found = true; assert chunks.containsKey(fv); chunks.get(fv).add(bm); } } if (!found) { if (log.isDebugEnabled()) log.debug(bm + " has no value for factor=" + ef + "; using dummy value"); chunks.get(dummy).add(bm); } } if (chunks.get(dummy).size() == 0) { if (log.isDebugEnabled()) log.debug("removing dummy"); chunks.remove(dummy); } log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size()); /* * Sanity check */ int total = 0; for (FactorValue fv : chunks.keySet()) { List<BioMaterial> chunk = chunks.get(fv); total += chunk.size(); } assert total == bms.size() : "expected " + bms.size() + ", got " + total; return chunks; }
From source file:gate.util.reporting.DocTimeReporter.java
/** * Sorts LinkedHashMap by its values(natural descending order). keeps the * duplicates as it is./*from w w w . ja v a 2 s .com*/ * * @param passedMap * An Object of type LinkedHashMap to be sorted by its values. * @return An Object containing the sorted LinkedHashMap. */ private LinkedHashMap<?, ?> sortHashMapByValues(LinkedHashMap<String, String> passedMap) { List<String> mapKeys = new ArrayList<String>(passedMap.keySet()); List<String> mapValues = new ArrayList<String>(passedMap.values()); Collections.sort(mapValues, new ValueComparator()); Collections.sort(mapKeys); // Reversing the collection to sort the values in descending order Collections.reverse(mapValues); LinkedHashMap<String, String> sortedMap = new LinkedHashMap<String, String>(); Iterator<String> valueIt = mapValues.iterator(); while (valueIt.hasNext()) { String val = valueIt.next(); Iterator<String> keyIt = mapKeys.iterator(); while (keyIt.hasNext()) { String key = keyIt.next(); String comp1 = passedMap.get(key).toString(); String comp2 = val.toString(); if (comp1.equals(comp2)) { passedMap.remove(key); mapKeys.remove(key); sortedMap.put(key, val); break; } } } return sortedMap; }
From source file:edu.umd.ks.cm.util.siscm.dao.impl.SisCmDaoImpl.java
private String getNaturalLanguageForStatement(String booleanExpression, List<ReqComponentReference> reqComponentList) throws Exception { HashMap reqComponentMap = new HashMap(); LinkedHashMap<Integer, Integer> parPositionMap = new LinkedHashMap<Integer, Integer>(); ArrayList<Integer> parLeftList = new ArrayList<Integer>(); for (ReqComponentReference reqComponent : reqComponentList) { String translation = this.reqComponentTranslator.translate(reqComponent.getReqComponent(), "KUALI.RULE.CATALOG", "en"); if (translation != null && translation.length() > 0 && translation.substring(translation.length() - 1).equals(".")) translation = translation.substring(0, translation.length() - 1); reqComponentMap.put(reqComponent.getBooleanId(), translation); }/* w w w . ja v a 2s . c om*/ BooleanFunction booleanFunction = new BooleanFunction(booleanExpression); List<String> funcSymbs = booleanFunction.getSymbols(); for (int i = 0; i < funcSymbs.size(); i++) { if (funcSymbs.get(i).equals("(")) { parLeftList.add(i); } int parLeftLast = parLeftList.size() - 1; if (funcSymbs.get(i).equals(")")) { parPositionMap.put(parLeftList.get(parLeftLast), i); parLeftList.remove(parLeftLast); } } // For the expression (A + B + (C * D)) want to remove outer () if (parPositionMap.containsKey(0) && parPositionMap.get(0) == funcSymbs.size() - 1) { parPositionMap.remove(0); funcSymbs.set(0, "null"); funcSymbs.set(funcSymbs.size() - 1, "null"); } if (!parPositionMap.isEmpty()) { for (Integer key : parPositionMap.keySet()) { StringBuffer funcSymb = new StringBuffer(""); int pos = 0; String expr = ""; for (int i = key + 1; i < parPositionMap.get(key); i++) { String funcSymbAdd = funcSymbs.get(i); if (!funcSymbAdd.equals("+") && !funcSymbAdd.equals("*") && !funcSymbAdd.equals("null")) { expr = (String) reqComponentMap.get(funcSymbAdd); if (pos == 0 && !funcSymbAdd.substring(0, 1).equals("V") && expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) { expr = expr.substring(1, expr.length() - 1); } pos = 1; //convert the first character of 'expr' to lower case, if necessary if (expr.length() > 0) { char ch0 = expr.charAt(0); if (ch0 <= 'Z' && ch0 >= 'A') { if (expr.length() > 1) { char ch1 = expr.charAt(1); if (ch1 >= 'a' && ch1 <= 'z') { expr = expr.substring(0, 1).toLowerCase() + expr.substring(1); } } else { expr = expr.toLowerCase(); } } } funcSymb.append(expr); } else if (funcSymbAdd.equals("+")) { funcSymb.append("; or "); } else if (funcSymbAdd.equals("*")) { funcSymb.append("; and "); } } // for int i String id = "V" + Integer.toString(key); funcSymb.insert(0, "("); funcSymb.append(")"); reqComponentMap.put(id, funcSymb.toString()); funcSymbs.set(key, id); for (int i = key + 1; i < parPositionMap.get(key) + 1; i++) funcSymbs.set(i, "null"); } } List<String> funcSymbsNew = new ArrayList<String>(); for (int i = 0; i < funcSymbs.size(); i++) { if (!funcSymbs.get(i).equals("null")) funcSymbsNew.add(funcSymbs.get(i)); } String nl = ""; if (funcSymbsNew.size() == 1) { nl = (String) reqComponentMap.get(funcSymbsNew.get(0)); if (nl.substring(0, 1).equals("(") && nl.substring(nl.length() - 1).equals(")")) nl = nl.substring(1, nl.length() - 1); } else { int pos = 0; String expr = ""; for (int i = 0; i < funcSymbsNew.size(); i++) { if (!funcSymbsNew.get(i).equals("*") && !funcSymbsNew.get(i).equals("+")) { expr = (String) reqComponentMap.get(funcSymbsNew.get(i)); if (pos == 0) { if (expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) expr = expr.substring(1, expr.length() - 1); pos = 1; } else { if (funcSymbsNew.get(i).substring(0, 1).equals("V") && expr.length() > 2 && expr.substring(0, 1).equals("(") && expr.substring(expr.length() - 1).equals(")")) expr = expr.substring(1, expr.length() - 1); } nl = nl + expr; } else if (funcSymbsNew.get(i).equals("+")) { if ((i > 0 && funcSymbsNew.get(i - 1).substring(0, 1).equals("V")) || (i < (funcSymbsNew.size() - 1) && funcSymbsNew.get(i + 1).substring(0, 1).equals("V"))) nl = nl + ". Or "; else nl = nl + "; or "; } else if (funcSymbsNew.get(i).equals("*")) { if ((i > 0 && funcSymbsNew.get(i - 1).substring(0, 1).equals("V")) || (i < (funcSymbsNew.size() - 1) && funcSymbsNew.get(i + 1).substring(0, 1).equals("V"))) nl = nl + ". And "; else nl = nl + "; and "; } } } //TODO: Fix Capitalization nl = nl.substring(0, 1).toUpperCase() + nl.substring(1); return nl.trim(); }
From source file:com.vmware.bdd.cli.commands.ClusterCommands.java
private void prettyOutputDetailNodegroups(TopologyType topology, LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames, List<NodeGroupRead> nodegroups) throws Exception { LinkedHashMap<String, List<String>> nColumnNamesWithGetMethodNames = new LinkedHashMap<String, List<String>>(); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_NODE_NAME, Arrays.asList("getName")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_NODE_VERSION, Arrays.asList("getVersion")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_HOST, Arrays.asList("getHostName")); if (topology == TopologyType.RACK_AS_RACK || topology == TopologyType.HVE) { nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_RACK, Arrays.asList("getRack")); }//from w ww .j a va 2 s .c o m nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_IP, Arrays.asList("fetchMgtIp")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_HDFS_IP, Arrays.asList("fetchHdfsIp")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MAPRED_IP, Arrays.asList("fetchMapredIp")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_STATUS, Arrays.asList("getStatus")); nColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_TASK, Arrays.asList("getAction")); for (NodeGroupRead nodegroup : nodegroups) { CommandsUtils.printInTableFormat(ngColumnNamesWithGetMethodNames, new NodeGroupRead[] { nodegroup }, Constants.OUTPUT_INDENT); List<NodeRead> nodes = nodegroup.getInstances(); if (nodes != null) { LinkedHashMap<String, List<String>> nColumnNamesWithGetMethodNamesClone = (LinkedHashMap<String, List<String>>) nColumnNamesWithGetMethodNames .clone(); if (!nodes.isEmpty() && (nodes.get(0).getIpConfigs() == null || (!nodes.get(0).getIpConfigs().containsKey(NetTrafficType.HDFS_NETWORK) && !nodes.get(0).getIpConfigs().containsKey(NetTrafficType.MAPRED_NETWORK)))) { nColumnNamesWithGetMethodNamesClone.remove(Constants.FORMAT_TABLE_COLUMN_HDFS_IP); nColumnNamesWithGetMethodNamesClone.remove(Constants.FORMAT_TABLE_COLUMN_MAPRED_IP); } System.out.println(); CommandsUtils.printInTableFormat(nColumnNamesWithGetMethodNamesClone, nodes.toArray(), new StringBuilder().append(Constants.OUTPUT_INDENT).append(Constants.OUTPUT_INDENT) .toString()); } System.out.println(); } CommandsUtils.prettyOutputErrorNode(nodegroups); }
From source file:net.sf.jasperreports.engine.fill.DelayedFillActions.java
protected void doCollectElementEvaluations(JRPrintPage page, List<JRPrintElement> elements, final ElementEvaluationsCollector collector, boolean clearEmpty) { FillPageKey pageKey = new FillPageKey(page); for (Map.Entry<JREvaluationTime, LinkedHashMap<FillPageKey, LinkedMap<Object, EvaluationBoundAction>>> boundMapEntry : actionsMap .entrySet()) {/*from ww w .j av a2 s. co m*/ final JREvaluationTime evaluationTime = boundMapEntry.getKey(); LinkedHashMap<FillPageKey, LinkedMap<Object, EvaluationBoundAction>> map = boundMapEntry.getValue(); synchronized (map) { final LinkedMap<Object, EvaluationBoundAction> actionsMap = map.get(pageKey); if (actionsMap != null && !actionsMap.isEmpty()) { // FIXME optimize for pages with a single virtual block // create a deep element visitor PrintElementVisitor<Void> visitor = new UniformPrintElementVisitor<Void>(true) { @Override protected void visitElement(JRPrintElement element, Void arg) { // remove the action from the map because we're saving it as part of the page. // ugly cast but acceptable for now. ElementEvaluationAction action = (ElementEvaluationAction) actionsMap.remove(element); if (action != null) { if (log.isDebugEnabled()) { log.debug(id + " collecting evaluation " + evaluationTime + " of element " + element); } collector.collect(element, action.element, evaluationTime); } } }; for (JRPrintElement element : elements) { element.accept(visitor, null); } if (clearEmpty && actionsMap.isEmpty()) { map.remove(pageKey); } } } } }