List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.java
/** * Set the key and value description for all the tasks rooted at the given * task. Loops over all the tasks recursively. * * @param task// www .j av a 2s.com */ public static void setKeyAndValueDescForTaskTree(Task<? extends Serializable> task) { if (task instanceof ConditionalTask) { List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks(); for (Task<? extends Serializable> tsk : listTasks) { setKeyAndValueDescForTaskTree(tsk); } } else if (task instanceof ExecDriver) { MapredWork work = (MapredWork) task.getWork(); work.getMapWork().deriveExplainAttributes(); HashMap<String, Operator<? extends OperatorDesc>> opMap = work.getMapWork().getAliasToWork(); if (opMap != null && !opMap.isEmpty()) { for (Operator<? extends OperatorDesc> op : opMap.values()) { setKeyAndValueDesc(work.getReduceWork(), op); } } } else if (task != null && (task.getWork() instanceof TezWork)) { TezWork work = (TezWork) task.getWork(); for (BaseWork w : work.getAllWorkUnsorted()) { if (w instanceof MapWork) { ((MapWork) w).deriveExplainAttributes(); } } } else if (task instanceof SparkTask) { SparkWork work = (SparkWork) task.getWork(); for (BaseWork w : work.getAllWorkUnsorted()) { if (w instanceof MapWork) { ((MapWork) w).deriveExplainAttributes(); } } } if (task.getChildTasks() == null) { return; } for (Task<? extends Serializable> childTask : task.getChildTasks()) { setKeyAndValueDescForTaskTree(childTask); } }
From source file:gov.llnl.lc.smt.command.port.SmtPort.java
/** * Describe the method here/* w w w .j a v a 2 s . c o m*/ * * @see describe related java objects * ***********************************************************/ private void dumpAllPorts() { HashMap<String, OSM_Port> ports = getOSM_Ports(); for (OSM_Port p : ports.values()) System.out.println(p.toVerboseString()); }
From source file:org.openmrs.logic.db.hibernate.HibernateLogicObsDAO.java
@SuppressWarnings("unchecked") private List<Obs> logicToHibernate(LogicExpression expression, Cohort who, LogicContext logicContext) throws LogicException { Criteria criteria = sessionFactory.getCurrentSession().createCriteria(Obs.class); Date indexDate = logicContext.getIndexDate(); Operator transformOperator = null;/* w ww.j ava2s .co m*/ LogicTransform transform = expression.getTransform(); Integer numResults = null; if (transform != null) { transformOperator = transform.getTransformOperator(); numResults = transform.getNumResults(); } if (numResults == null) { numResults = 1; } // set the transform and evaluate the right criteria // if there is any if (transformOperator == Operator.LAST) { criteria.addOrder(Order.desc("obsDatetime")).addOrder(Order.desc("dateCreated")) .addOrder(Order.desc("obsId")); } else if (transformOperator == Operator.FIRST) { criteria.addOrder(Order.asc("obsDatetime")).addOrder(Order.asc("dateCreated")) .addOrder(Order.asc("obsId")); } else if (transformOperator == Operator.DISTINCT) { criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); } else { criteria.addOrder(Order.desc("obsDatetime")); } Criterion c = this.getCriterion(expression, indexDate, criteria); if (c != null) { criteria.add(c); } List<Obs> results = new ArrayList<Obs>(); criteria.add(Restrictions.eq("voided", false)); criteria.add(Restrictions.in("person.personId", who.getMemberIds())); results.addAll(criteria.list()); //return a single result per patient for these operators //I don't see an easy way to do this in hibernate so I am //doing some postprocessing if (transformOperator == Operator.FIRST || transformOperator == Operator.LAST) { HashMap<Integer, ArrayList<Obs>> nResultMap = new HashMap<Integer, ArrayList<Obs>>(); for (Obs currResult : results) { Integer currPersonId = currResult.getPersonId(); ArrayList<Obs> prevResults = nResultMap.get(currPersonId); if (prevResults == null) { prevResults = new ArrayList<Obs>(); nResultMap.put(currPersonId, prevResults); } if (prevResults.size() < numResults) { prevResults.add(currResult); } } if (nResultMap.values().size() > 0) { results.clear(); for (ArrayList<Obs> currPatientObs : nResultMap.values()) { results.addAll(currPatientObs); } } } return results; }
From source file:gov.llnl.lc.smt.command.port.SmtPort.java
/** * Describe the method here// w ww. j a v a 2 s .co m * * @see describe related java objects * ***********************************************************/ private void dumpAllPorts(OSM_LinkSpeed lspeed) { HashMap<String, OSM_Port> ports = getOSM_Ports(); for (OSM_Port p : ports.values()) { if ((p.isActive() && lspeed != null)) if (lspeed == OSM_LinkSpeed.get(p)) System.out.println(getPortSummary(OMService, p)); } }
From source file:de.uni_potsdam.hpi.bpt.promnicat.persistenceApi.orientdbObj.index.IndexIntersection.java
/** * Load the intersecting referenced objects from the specified indices. * First load the database ids from all indices, intersect them, and load the remaining ids. * // ww w .j a v a 2s . c o m * @return the resulting {@link IndexCollectionElement}s */ @SuppressWarnings({ "unchecked", "rawtypes" }) public Collection<IndexCollectionElement<V>> load() { //load dbIds only and sort them by result set size TreeList rawResults = new TreeList(); //no generics possible int maxSize = 0; for (AbstractIndex index : indices) { ResultSet<V> oneResultSet = new ResultSet<V>(index.loadIdsOnly(), index.getName()); rawResults.add(oneResultSet); maxSize = Math.max(maxSize, oneResultSet.getSize()); } // create a list of intersecting dbIds // start with the smallest result set and intersect with the second smallest, intersect this result with the third smallest a.s.o. HashSet<String> intersectingDbIds = new HashSet<String>(maxSize); for (Object r : rawResults) { ResultSet<V> aResult = (ResultSet<V>) r; if (intersectingDbIds.isEmpty()) { intersectingDbIds.addAll(aResult.getDbIds()); } else { intersectingDbIds.retainAll(aResult.getDbIds()); } if (intersectingDbIds.isEmpty()) { break; } } //create Map of IndexElements each, i.e. group by referenced id. Every group is stored in a IndexCollectedElement HashMap<String, IndexCollectionElement<V>> finalElements = new HashMap<String, IndexCollectionElement<V>>( indices.size()); for (Object r : rawResults) { ResultSet<V> aResult = (ResultSet<V>) r; for (IndexElement indexElement : aResult.getList()) { String currentString = indexElement.getDbId(); if (intersectingDbIds.contains(currentString)) { if (!finalElements.containsKey(currentString)) { finalElements.put(currentString, new IndexCollectionElement<V>(currentString)); } finalElements.get(currentString).addIndexElements(indexElement); } } } //load pojos for (IndexCollectionElement<V> collectionElement : finalElements.values()) { collectionElement.loadPojo(papi); } return finalElements.values(); }
From source file:org.openmrs.logic.db.hibernate.HibernateLogicEncounterDAO.java
@SuppressWarnings("unchecked") private List<Encounter> logicToHibernate(LogicExpression expression, Cohort who, LogicContext logicContext) throws LogicException { Criteria criteria = sessionFactory.getCurrentSession().createCriteria(Encounter.class); Date indexDate = logicContext.getIndexDate(); Operator transformOperator = null;/*from w w w . jav a2 s.c o m*/ LogicTransform transform = expression.getTransform(); Integer numResults = null; if (transform != null) { transformOperator = transform.getTransformOperator(); numResults = transform.getNumResults(); } if (numResults == null) { numResults = 1; } // set the transform and evaluate the right criteria // if there is any if (transformOperator == Operator.LAST) { criteria.addOrder(Order.desc("encounterDatetime")).addOrder(Order.desc("dateCreated")) .addOrder(Order.desc("encounterId")); } else if (transformOperator == Operator.FIRST) { criteria.addOrder(Order.asc("encounterDatetime")).addOrder(Order.asc("encounterId")); } else if (transformOperator == Operator.DISTINCT) { criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); } else { criteria.addOrder(Order.desc("encounterDatetime")); } Criterion c = this.getCriterion(expression, indexDate, criteria); if (c != null) { criteria.add(c); } List<Encounter> results = new ArrayList<Encounter>(); criteria.add(Restrictions.eq("voided", false)); criteria.add(Restrictions.in("patient.personId", who.getMemberIds())); results.addAll(criteria.list()); //return a single result per patient for these operators //I don't see an easy way to do this in hibernate so I am //doing some postprocessing if (transformOperator == Operator.FIRST || transformOperator == Operator.LAST) { HashMap<Integer, ArrayList<Encounter>> nResultMap = new HashMap<Integer, ArrayList<Encounter>>(); for (Encounter currResult : results) { Integer currPersonId = currResult.getPatient().getPersonId(); ArrayList<Encounter> prevResults = nResultMap.get(currPersonId); if (prevResults == null) { prevResults = new ArrayList<Encounter>(); nResultMap.put(currPersonId, prevResults); } if (prevResults.size() < numResults) { prevResults.add(currResult); } } if (nResultMap.values().size() > 0) { results.clear(); for (ArrayList<Encounter> currPatientEncounter : nResultMap.values()) { results.addAll(currPatientEncounter); } } } return results; }
From source file:com.google.gwt.emultest.java.util.HashMapTest.java
public void testHashMapMap() { HashMap<Integer, Integer> srcMap = new HashMap<Integer, Integer>(); assertNotNull(srcMap);//from ww w . j av a2 s . co m checkEmptyHashMapAssumptions(srcMap); srcMap.put(INTEGER_1, INTEGER_11); srcMap.put(INTEGER_2, INTEGER_22); srcMap.put(INTEGER_3, INTEGER_33); HashMap<Integer, Integer> hashMap = new HashMap<Integer, Integer>(srcMap); assertFalse(hashMap.isEmpty()); assertTrue(hashMap.size() == SIZE_THREE); Collection<Integer> valColl = hashMap.values(); assertTrue(valColl.contains(INTEGER_11)); assertTrue(valColl.contains(INTEGER_22)); assertTrue(valColl.contains(INTEGER_33)); Collection<Integer> keyColl = hashMap.keySet(); assertTrue(keyColl.contains(INTEGER_1)); assertTrue(keyColl.contains(INTEGER_2)); assertTrue(keyColl.contains(INTEGER_3)); }
From source file:com.globalsight.everest.tda.TdaHelper.java
private void writeTranslationUnit(OutputStreamWriter m_outputStream, HashMap needHitMTTuTuvMap) throws IOException { // put all tus into array. Object[] key_tus = needHitMTTuTuvMap.keySet().toArray(); Tu[] tusInArray = new Tu[key_tus.length]; for (int key = 0; key < key_tus.length; key++) { tusInArray[key] = (Tu) key_tus[key]; }// w w w . j av a 2s . c om // put all tuvs into array Object[] value_tuvs = needHitMTTuTuvMap.values().toArray(); Tuv[] tuvsInArray = new Tuv[value_tuvs.length]; for (int value = 0; value < value_tuvs.length; value++) { tuvsInArray[value] = (Tuv) value_tuvs[value]; } for (int i = 0; i < tuvsInArray.length; i++) { Tu tu = (Tu) key_tus[i]; Tuv tuv = (Tuv) value_tuvs[i]; String m_strEOL = "\r\n"; String m_space = " "; m_outputStream.write("<trans-unit id=\"" + tu.getId() + "\""); m_outputStream.write(">"); m_outputStream.write(m_strEOL); m_outputStream.write("<source>"); m_outputStream.write(tuv.getGxmlExcludeTopTags()); m_outputStream.write("</source>"); m_outputStream.write(m_strEOL); m_outputStream.write("<target></target>"); m_outputStream.write(m_strEOL); m_outputStream.write("</trans-unit>"); m_outputStream.write(m_strEOL); } }
From source file:org.apache.hadoop.hive.ql.parse.mr2.GenMR2Utils.java
/** * Set the key and value description for all the tasks rooted at the given * task. Loops over all the tasks recursively. * * @param task//from w w w . j av a2s. c om */ public static void setKeyAndValueDescForTaskTree(Task<? extends Serializable> task) { if (task instanceof ConditionalTask) { List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks(); for (Task<? extends Serializable> tsk : listTasks) { setKeyAndValueDescForTaskTree(tsk); } } else if (task instanceof MR2ExecDriver) { MR2Work work = (MR2Work) task.getWork(); work.getMapWork().deriveExplainAttributes(); HashMap<String, Operator<? extends OperatorDesc>> opMap = work.getMapWork().getAliasToWork(); if (opMap != null && !opMap.isEmpty()) { for (Operator<? extends OperatorDesc> op : opMap.values()) { setKeyAndValueDesc(work.getReduceWork(), op); } } } else if (task != null && (task.getWork() instanceof TezWork)) { TezWork work = (TezWork) task.getWork(); for (BaseWork w : work.getAllWorkUnsorted()) { if (w instanceof MapWork) { ((MapWork) w).deriveExplainAttributes(); } } } else if (task instanceof SparkTask) { SparkWork work = (SparkWork) task.getWork(); for (BaseWork w : work.getAllWorkUnsorted()) { if (w instanceof MapWork) { ((MapWork) w).deriveExplainAttributes(); } } } if (task.getChildTasks() == null) { return; } for (Task<? extends Serializable> childTask : task.getChildTasks()) { setKeyAndValueDescForTaskTree(childTask); } }
From source file:org.apache.nutch.indexer.IndexingFilters.java
public IndexingFilters(Configuration conf) { /* Get indexingfilter.order property */ String order = conf.get(INDEXINGFILTER_ORDER); ObjectCache objectCache = ObjectCache.get(conf); this.indexingFilters = (IndexingFilter[]) objectCache.getObject(IndexingFilter.class.getName()); if (this.indexingFilters == null) { /*//from www.j a v a2 s . c o m * If ordered filters are required, prepare array of filters based on * property */ String[] orderedFilters = null; if (order != null && !order.trim().equals("")) { orderedFilters = order.split("\\s+"); } try { ExtensionPoint point = PluginRepository.get(conf).getExtensionPoint(IndexingFilter.X_POINT_ID); if (point == null) throw new RuntimeException(IndexingFilter.X_POINT_ID + " not found."); Extension[] extensions = point.getExtensions(); HashMap<String, IndexingFilter> filterMap = new HashMap<String, IndexingFilter>(); for (int i = 0; i < extensions.length; i++) { Extension extension = extensions[i]; IndexingFilter filter = (IndexingFilter) extension.getExtensionInstance(); LOG.info("Adding " + filter.getClass().getName()); if (!filterMap.containsKey(filter.getClass().getName())) { filter.addIndexBackendOptions(conf); filterMap.put(filter.getClass().getName(), filter); } } /* * If no ordered filters required, just get the filters in an * indeterminate order */ if (orderedFilters == null) { objectCache.setObject(IndexingFilter.class.getName(), filterMap.values().toArray(new IndexingFilter[0])); /* Otherwise run the filters in the required order */ } else { ArrayList<IndexingFilter> filters = new ArrayList<IndexingFilter>(); for (int i = 0; i < orderedFilters.length; i++) { IndexingFilter filter = filterMap.get(orderedFilters[i]); if (filter != null) { filter.addIndexBackendOptions(conf); filters.add(filter); } } objectCache.setObject(IndexingFilter.class.getName(), filters.toArray(new IndexingFilter[filters.size()])); } } catch (PluginRuntimeException e) { throw new RuntimeException(e); } this.indexingFilters = (IndexingFilter[]) objectCache.getObject(IndexingFilter.class.getName()); } }