List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:com.shizhefei.view.multitype.MultiTypeAdapter.java
private List<ItemBinder<ITEM_DATA>> toItemData(@NonNull List<? extends ITEM_DATA> addList, boolean refresh) { HashMap<? extends ITEM_DATA, ItemBinder<ITEM_DATA>> removeItemData = null; if (refresh) { removeItemData = data_Providers; data_Providers = new HashMap<>(); }//from w w w .j av a 2 s . com List<ItemBinder<ITEM_DATA>> providers = new ArrayList<>(addList.size()); for (ITEM_DATA object : addList) { ItemBinder<ITEM_DATA> itemBinder; if (refresh) { itemBinder = removeItemData.remove(object); } else { itemBinder = data_Providers.get(object); } if (itemBinder == null) { itemBinder = factory.buildItemData(object); } data_Providers.put(object, itemBinder); providers.add(itemBinder); providerIndex.put(itemBinder.providerType, itemBinder.provider); } if (refresh) { //?Fragment doRemoveItemData(removeItemData.values()); } return providers; }
From source file:org.apache.ranger.service.XResourceService.java
public List<VXPermMap> updatePermMaps(List<VXPermMap> vxPermMaps) { List<VXPermMap> updatedPermList = new ArrayList<VXPermMap>(); List<Long> listOfUser = new ArrayList<Long>(); List<Long> listOfGroup = new ArrayList<Long>(); //[1] : Populate perm maps for user and groups for (VXPermMap vxPermMap : vxPermMaps) { if (vxPermMap.getPermFor() == AppConstants.XA_PERM_FOR_USER) { if (!listOfUser.contains(vxPermMap.getUserId())) { listOfUser.add(vxPermMap.getUserId()); }/*from w w w . ja va 2 s .c o m*/ } else if (vxPermMap.getPermFor() == AppConstants.XA_PERM_FOR_GROUP) { if (!listOfGroup.contains(vxPermMap.getGroupId())) { listOfGroup.add(vxPermMap.getGroupId()); } } } HashMap<Long, HashMap<Integer, VXPermMap>> userPermMap = new HashMap<Long, HashMap<Integer, VXPermMap>>(); for (Long userId : listOfUser) { HashMap<Integer, VXPermMap> userPerm = new HashMap<Integer, VXPermMap>(); for (VXPermMap vxPermMap : vxPermMaps) { if (vxPermMap.getPermFor() == AppConstants.XA_PERM_FOR_USER && vxPermMap.getUserId() == userId) { userPerm.put(vxPermMap.getPermType(), vxPermMap); } } userPermMap.put(userId, userPerm); } //[2] : HashMap<Long, HashMap<Integer, VXPermMap>> groupPermMap = new HashMap<Long, HashMap<Integer, VXPermMap>>(); for (Long groupId : listOfGroup) { HashMap<Integer, VXPermMap> groupPerm = new HashMap<Integer, VXPermMap>(); for (VXPermMap vxPermMap : vxPermMaps) { Long permGroupId = vxPermMap.getGroupId(); int permFor = vxPermMap.getPermFor(); if (permFor == AppConstants.XA_PERM_FOR_GROUP && permGroupId.equals(groupId)) { groupPerm.put(vxPermMap.getPermType(), vxPermMap); } } groupPermMap.put(groupId, groupPerm); } for (Long userId : listOfUser) { HashMap<Integer, VXPermMap> map = userPermMap.get(userId); VXPermMap vxPermMap = null; if (!map.isEmpty()) { vxPermMap = map.values().iterator().next(); } if (vxPermMap == null) { continue; } if (map.get(AppConstants.XA_PERM_TYPE_READ) == null) { vxPermMap.setPermType(AppConstants.XA_PERM_TYPE_READ); map.put(AppConstants.XA_PERM_TYPE_READ, vxPermMap); } if (map.size() > 1 && map.get(AppConstants.XA_PERM_TYPE_EXECUTE) == null) { vxPermMap.setPermType(AppConstants.XA_PERM_TYPE_EXECUTE); map.put(AppConstants.XA_PERM_TYPE_EXECUTE, vxPermMap); } userPermMap.put(userId, map); } for (Long groupId : listOfGroup) { HashMap<Integer, VXPermMap> map = groupPermMap.get(groupId); VXPermMap vxPermMap = null; if (!map.isEmpty()) { vxPermMap = map.values().iterator().next(); } if (vxPermMap == null) { continue; } if (map.get(AppConstants.XA_PERM_TYPE_READ) == null) { vxPermMap.setPermType(AppConstants.XA_PERM_TYPE_READ); map.put(AppConstants.XA_PERM_TYPE_READ, vxPermMap); } if (map.size() > 1 && map.get(AppConstants.XA_PERM_TYPE_EXECUTE) == null) { vxPermMap.setPermType(AppConstants.XA_PERM_TYPE_EXECUTE); map.put(AppConstants.XA_PERM_TYPE_EXECUTE, vxPermMap); } groupPermMap.put(groupId, map); } // [3] : for (Entry<Long, HashMap<Integer, VXPermMap>> entry : userPermMap.entrySet()) { for (Entry<Integer, VXPermMap> innerEntry : entry.getValue().entrySet()) { updatedPermList.add(innerEntry.getValue()); } } for (Entry<Long, HashMap<Integer, VXPermMap>> entry : groupPermMap.entrySet()) { for (Entry<Integer, VXPermMap> innerEntry : entry.getValue().entrySet()) { updatedPermList.add(innerEntry.getValue()); } } return updatedPermList; }
From source file:com.sun.labs.aura.fb.DataManager.java
/** * Gets an array of tag ItemInfos that represents a merged cloud made from * the distinctive tags of all the provided artists. * // w w w. ja v a2s. c om * @param artistIDs keys of artists to include * @param size * @return a normalized tag cloud */ public ItemInfo[] getMergedCloud(String[] artistKeys, int size) { String cacheKey = StringUtils.join(artistKeys); ItemInfo[] cached = artistsToCloud.get(cacheKey); if (cached != null) { return cached; } // // For each artist, get its distinctive tags and throw them // into a merged set HashMap<String, Scored<ArtistTag>> merged = new HashMap<String, Scored<ArtistTag>>(); for (String artist : artistKeys) { try { List<Scored<ArtistTag>> tags = mdb.artistGetDistinctiveTags(artist, size); for (Scored<ArtistTag> scored : tags) { ArtistTag tag = scored.getItem(); Scored<ArtistTag> existing = merged.get(tag.getName()); if (existing != null) { existing.setScore(existing.getScore() + scored.getScore()); } else { merged.put(tag.getName(), scored); } } } catch (AuraException e) { logger.info("Failed to get tags for artist " + artist); } } // // Sort the merged values by descending score Collection<Scored<ArtistTag>> values = merged.values(); List<Scored<ArtistTag>> l = new ArrayList<Scored<ArtistTag>>(values.size()); l.addAll(values); Collections.sort(l, new Comparator<Scored<ArtistTag>>() { public int compare(Scored<ArtistTag> o1, Scored<ArtistTag> o2) { return -1 * (new Double(o1.getScore()).compareTo(new Double(o2.getScore()))); } }); // // Get the top N, normalizing as we go int numTags = Math.min(size, values.size()); double maxScore = 1; if (numTags > 0) { maxScore = l.get(0).getScore(); } ItemInfo[] items = new ItemInfo[numTags]; for (int i = 0; i < numTags; i++) { Scored<ArtistTag> scored = l.get(i); ArtistTag tag = scored.getItem(); items[i] = new ItemInfo(tag.getKey(), tag.getName(), scored.getScore() / maxScore, tag.getPopularity(), ItemInfo.CONTENT_TYPE.TAG); } artistsToCloud.put(cacheKey, items); return items; }
From source file:de.tudarmstadt.ukp.dkpro.core.io.conll.ConllUWriter.java
private void convert(JCas aJCas, PrintWriter aOut) { Map<SurfaceForm, Collection<Token>> surfaceIdx = indexCovered(aJCas, SurfaceForm.class, Token.class); Int2ObjectMap<SurfaceForm> surfaceBeginIdx = new Int2ObjectOpenHashMap<>(); for (SurfaceForm sf : select(aJCas, SurfaceForm.class)) { surfaceBeginIdx.put(sf.getBegin(), sf); }// w ww. j av a 2 s .c om for (Sentence sentence : select(aJCas, Sentence.class)) { HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>(); // Tokens List<Token> tokens = selectCovered(Token.class, sentence); for (int i = 0; i < tokens.size(); i++) { Row row = new Row(); row.id = i + 1; row.token = tokens.get(i); row.noSpaceAfter = (i + 1 < tokens.size()) && row.token.getEnd() == tokens.get(i + 1).getBegin(); ctokens.put(row.token, row); } // Dependencies for (Dependency rel : selectCovered(Dependency.class, sentence)) { if (StringUtils.isBlank(rel.getFlavor()) || DependencyFlavor.BASIC.equals(rel.getFlavor())) { ctokens.get(rel.getDependent()).deprel = rel; } else { ctokens.get(rel.getDependent()).deps.add(rel); } } // Write sentence in CONLL-U format for (Row row : ctokens.values()) { String lemma = UNUSED; if (writeLemma && (row.token.getLemma() != null)) { lemma = row.token.getLemma().getValue(); } String pos = UNUSED; if (writePos && (row.token.getPos() != null)) { POS posAnno = row.token.getPos(); pos = posAnno.getPosValue(); } String cpos = UNUSED; if (writeCPos && (row.token.getPos() != null) && row.token.getPos().getCoarseValue() != null) { POS posAnno = row.token.getPos(); cpos = posAnno.getCoarseValue(); } int headId = UNUSED_INT; String deprel = UNUSED; String deps = UNUSED; if (writeDependency) { if ((row.deprel != null)) { deprel = row.deprel.getDependencyType(); headId = ctokens.get(row.deprel.getGovernor()).id; if (headId == row.id) { // ROOT dependencies may be modeled as a loop, ignore these. headId = 0; } } StringBuilder depsBuf = new StringBuilder(); for (Dependency d : row.deps) { if (depsBuf.length() > 0) { depsBuf.append('|'); } // Resolve self-looping root to 0-indexed root int govId = ctokens.get(d.getGovernor()).id; if (govId == row.id) { govId = 0; } depsBuf.append(govId); depsBuf.append(':'); depsBuf.append(d.getDependencyType()); } if (depsBuf.length() > 0) { deps = depsBuf.toString(); } } String head = UNUSED; if (headId != UNUSED_INT) { head = Integer.toString(headId); } String feats = UNUSED; if (writeMorph && (row.token.getMorph() != null)) { feats = row.token.getMorph().getValue(); } String misc = UNUSED; if (row.noSpaceAfter) { misc = "SpaceAfter=No"; } SurfaceForm sf = surfaceBeginIdx.get(row.token.getBegin()); if (sf != null) { @SuppressWarnings({ "unchecked", "rawtypes" }) List<Token> covered = (List) surfaceIdx.get(sf); int id1 = ctokens.get(covered.get(0)).id; int id2 = ctokens.get(covered.get(covered.size() - 1)).id; aOut.printf("%d-%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", id1, id2, sf.getValue(), UNUSED, UNUSED, UNUSED, UNUSED, UNUSED, UNUSED, UNUSED, UNUSED); } aOut.printf("%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", row.id, row.token.getCoveredText(), lemma, cpos, pos, feats, head, deprel, deps, misc); } aOut.println(); } }
From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java
private CovarianceMatrix fastMDC(double[][] data, int h) { CovarianceMatrix ret = null;//from www. j ava 2 s. c om int n = data.length; // If n is small (say, n <= 600) int small = this.fastMCDPoints; int p = data[0].length; int low = (n + p + 1) / 2; // The default h is [(n + p + 1)/2], but the user may choose any integer // h with [(n + p + 1)/2] <= h <= n if (h < low || h > n) { h = low; } // If h = n, then the MCn location estimate T is the average of the // whole dataset, and the MCn scatter estimate S is its covariance // matrix. Report these and stop. if (h == n) { ret = new CovarianceMatrix(data, this.numberOfThreads); } else { // If p = 1 (univariate data), compute the MCn esti-mate (T, S) by // the exact algorithm of Rousseeuw and Leroy (1987, pp. 171-172) in // O(n log n) time; then stop. // if (p == 1) { // ret = new CovarianceMatrix(data, 1); //} //else { if (n <= small) { int[] indexArray = new int[data.length]; for (int i = 0; i < data.length; i++) { indexArray[i] = i; } HashMap<Double, LinkedList<CovarianceMatrix>> map = getInit10(data, indexArray, h, n, p); HashMap<Double, LinkedList<CovarianceMatrix>> map2 = new HashMap<Double, LinkedList<CovarianceMatrix>>(); // for the 10 results with lowest det(S_3) for (LinkedList<CovarianceMatrix> covlist : map.values()) { for (CovarianceMatrix covmat : covlist) { CovarianceMatrix pre = convergence(data, indexArray, covmat, h); Matrix pre_mat = new Matrix(pre.getCovMat()); double pre_det = pre_mat.det(); if (map2.containsKey(pre_det)) { LinkedList<CovarianceMatrix> hilf = map2.get(pre_det); hilf.push(pre); map2.put(pre_det, hilf); } else { LinkedList<CovarianceMatrix> hilf = new LinkedList<CovarianceMatrix>(); hilf.push(pre); map2.put(pre_det, hilf); } } } // report the solution (T, S) with lowest det(S) ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map2.keySet()); Collections.sort(sortedList); ret = map2.get(sortedList.get(0)).getFirst(); } else { HashMap<Double, LinkedList<CovarianceMatrix>> map = fast(data, h, n, p); ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map.keySet()); Collections.sort(sortedList); ret = map.get(sortedList.get(0)).getFirst(); } } } return ret; }
From source file:com.github.lucapino.jira.GenerateReleaseNotesMojo.java
private HashMap<String, List<JiraIssue>> processIssues(List<JiraIssue> issues) throws MojoFailureException { HashMap<String, List<JiraIssue>> jiraIssues = new HashMap<>(); jiraIssues.put("add", new ArrayList<JiraIssue>()); jiraIssues.put("fix", new ArrayList<JiraIssue>()); jiraIssues.put("update", new ArrayList<JiraIssue>()); for (JiraIssue issue : issues) { String issueCategory;//ww w . j a v a 2 s . com String issueType = issue.getType(); if (issueType.equalsIgnoreCase("new feature") || issueType.equalsIgnoreCase("task") || issueType.equalsIgnoreCase("internaltask") || issueType.equalsIgnoreCase("sub-task")) { // add issueCategory = "add"; } else if (issueType.equalsIgnoreCase("bug") || issueType.equalsIgnoreCase("internalbug")) { // fix issueCategory = "fix"; } else { // update issueCategory = "update"; } List<JiraIssue> currentList = jiraIssues.get(issueCategory); currentList.add(issue); jiraIssues.put(issueCategory, currentList); } for (List<JiraIssue> list : jiraIssues.values()) { Collections.sort(list, new JiraIssueComparator()); } return jiraIssues; }
From source file:org.apache.axis.wsdl.toJava.JavaStubWriter.java
/** * This method returns a set of all the TypeEntry in a given PortType. * The elements of the returned HashSet are Types. * //from ww w. ja v a 2s .co m * @param portType * @return */ private HashSet getTypesInPortType(PortType portType) { HashSet types = new HashSet(); HashSet firstPassTypes = new HashSet(); // Get all the types from all the operations List operations = portType.getOperations(); for (int i = 0; i < operations.size(); ++i) { Operation op = (Operation) operations.get(i); firstPassTypes.addAll(getTypesInOperation(op)); } // Add all the types nested and derived from the types // in the first pass. Iterator i = firstPassTypes.iterator(); while (i.hasNext()) { TypeEntry type = (TypeEntry) i.next(); if (!types.contains(type)) { types.add(type); types.addAll(type.getNestedTypes(symbolTable, true)); } } if (emitter.isAllWanted()) { HashMap rawSymbolTable = symbolTable.getHashMap(); for (Iterator j = rawSymbolTable.values().iterator(); j.hasNext();) { Vector typeVector = (Vector) j.next(); for (Iterator k = typeVector.iterator(); k.hasNext();) { Object symbol = k.next(); if (symbol instanceof DefinedType) { TypeEntry type = (TypeEntry) symbol; if (!types.contains(type)) { types.add(type); } } } } } return types; }
From source file:gov.nih.nci.cabig.caaers.service.synchronizer.StudyAgentsSynchronizer.java
public void migrate(Study dbStudy, Study xmlStudy, DomainObjectImportOutcome<Study> outcome) { //Ignore if the section is empty- Update- This is no longer true since CTEP sync should remove and override the agents // if(CollectionUtils.isEmpty(xmlStudy.getStudyAgents())){ // return; // }// ww w. j a v a 2s . c o m //create an index of existing agents in the dbStudy. HashMap<String, StudyAgent> dbStudyAgentIndexMap = new HashMap<String, StudyAgent>(); for (StudyAgent sa : dbStudy.getActiveStudyAgents()) { dbStudyAgentIndexMap.put(sa.getAgentName(), sa); } //identify new study agents, also update existing ones. for (StudyAgent xmlStudyAgent : xmlStudy.getStudyAgents()) { StudyAgent sa = dbStudyAgentIndexMap.remove(xmlStudyAgent.getAgentName()); if (sa == null) { //newly added one, so add it to study dbStudy.addStudyAgent(xmlStudyAgent); continue; } //existing one - so update if necessary //BJ : the original code did not do anything, so nothing to update. //Update the StudyAgents as it can change between Adders Sync sa.setIndType(xmlStudyAgent.getIndType()); sa.setPartOfLeadIND(xmlStudyAgent.getPartOfLeadIND()); sa.getStudyAgentINDAssociations().clear(); for (StudyAgentINDAssociation ass : xmlStudyAgent.getStudyAgentINDAssociations()) { sa.addStudyAgentINDAssociation(ass); } } //now soft delete, all the ones not present in XML Study AbstractMutableRetireableDomainObject.retire(dbStudyAgentIndexMap.values()); }
From source file:com.cloudera.impala.catalog.CatalogServiceCatalog.java
/** * Returns all user defined functions (aggregate and scalar) in the specified database. * Functions are not returned in a defined order. *///from w w w.ja v a 2 s. c o m public List<Function> getFunctions(String dbName) throws DatabaseNotFoundException { Db db = getDb(dbName); if (db == null) { throw new DatabaseNotFoundException("Database does not exist: " + dbName); } // Contains map of overloaded function names to all functions matching that name. HashMap<String, List<Function>> dbFns = db.getAllFunctions(); List<Function> fns = new ArrayList<Function>(dbFns.size()); for (List<Function> fnOverloads : dbFns.values()) { for (Function fn : fnOverloads) { fns.add(fn); } } return fns; }
From source file:ec.coevolve.MultiPopCoevolutionaryEvaluatorExtra.java
@Override void loadElites(final EvolutionState state, int whichSubpop) { Subpopulation subpop = state.population.subpops[whichSubpop]; // Update hall of fame if (hallOfFame != null) { int best = 0; Individual[] oldinds = subpop.individuals; for (int x = 1; x < oldinds.length; x++) { if (betterThan(oldinds[x], oldinds[best])) { best = x;//w ww. j a va 2 s . c o m } } hallOfFame[whichSubpop].add((Individual) subpop.individuals[best].clone()); } int index = 0; // Last champions if (lastChampions > 0) { for (int i = 1; i <= lastChampions && i <= hallOfFame[whichSubpop].size(); i++) { eliteIndividuals[whichSubpop][index++] = (Individual) hallOfFame[whichSubpop] .get(hallOfFame[whichSubpop].size() - i).clone(); } } double randChamps = randomChampions; // Novel champions if (novelChampions > 0) { Individual[] behaviourElite = behaviourElite(state, whichSubpop); for (int i = 0; i < behaviourElite.length; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) behaviourElite[i].clone(); //System.out.println(whichSubpop + "\t" + ((ExpandedFitness) behaviourElite[i].fitness).getFitnessScore()); } randChamps = randomChampions + (novelChampions - behaviourElite.length); } // Random champions if (randChamps > 0) { // Choose random positions ArrayList<Integer> pos = new ArrayList<Integer>(hallOfFame[whichSubpop].size()); for (int i = 0; i < hallOfFame[whichSubpop].size(); i++) { pos.add(i); } Collections.shuffle(pos); for (int i = 0; i < pos.size() && i < randChamps; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) hallOfFame[whichSubpop].get(pos.get(i)) .clone(); } } // NEAT Elite if (neatElite > 0) { NEATGeneticAlgorithm neat = ((NEATSubpop) subpop).getNEAT(); if (!neat.getSpecies().specieList().isEmpty()) { HashMap<Integer, Individual> specieBests = new HashMap<Integer, Individual>( neat.getSpecies().specieList().size() * 2); Chromosome[] genoTypes = neat.population().genoTypes(); for (int i = 0; i < genoTypes.length; i++) { int specie = ((NEATChromosome) genoTypes[i]).getSpecieId(); if (!specieBests.containsKey(specie) || betterThan(subpop.individuals[i], specieBests.get(specie))) { specieBests.put(specie, subpop.individuals[i]); } } Individual[] specBests = new Individual[specieBests.size()]; specieBests.values().toArray(specBests); QuickSort.qsort(specBests, new EliteComparator2()); for (int i = 0; i < specBests.length && i < neatElite; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) specBests[i].clone(); } } } // Fill remaining with the elite of the current pop int toFill = numElite - index; if (toFill == 1) { // Just one to place Individual best = subpop.individuals[0]; for (int x = 1; x < subpop.individuals.length; x++) { if (betterThan(subpop.individuals[x], best)) { best = subpop.individuals[x]; } } eliteIndividuals[whichSubpop][index++] = (Individual) best.clone(); } else if (toFill > 1) { Individual[] orderedPop = Arrays.copyOf(subpop.individuals, subpop.individuals.length); QuickSort.qsort(orderedPop, new EliteComparator2()); // load the top N individuals for (int j = 0; j < toFill; j++) { eliteIndividuals[whichSubpop][index++] = (Individual) orderedPop[j].clone(); } } }