Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:com.sonicle.webtop.vfs.VfsManager.java

private void initFileSystems() throws WTException {
    synchronized (storeFileSystems) {

        List<Store> myStores = listStores();
        for (Store store : myStores) {
            addStoreFileSystemToCache(store);
            /*//from  w w w. j  a  v a2  s  .  c  om
            StoreFileSystem sfs = null;
            try {
               sfs = createFileSystem(store);
            } catch(URISyntaxException ex) {
               throw new WTException(ex, "Unable to parse URI");
            }
            storeFileSystems.put(String.valueOf(store.getStoreId()), sfs);
            */
        }

        List<StoreShareRoot> roots = listIncomingStoreRoots();
        for (StoreShareRoot root : roots) {
            HashMap<Integer, StoreShareFolder> folders = listIncomingStoreFolders(root.getShareId());
            for (StoreShareFolder folder : folders.values()) {
                addStoreFileSystemToCache(folder.getStore());

                /*
                StoreFileSystem sfs = null;
                try {
                   sfs = createFileSystem(folder.getStore());
                } catch(URISyntaxException ex) {
                   throw new WTException(ex, "Unable to parse URI");
                }
                storeFileSystems.put(String.valueOf(folder.getStore().getStoreId()), sfs);
                */
            }
        }
    }
}

From source file:org.apache.stratos.usage.agent.persist.UsageDataPersistenceTask.java

/**
 * this method create a Summarizer object for each tenant and call accumulate() method to
 * accumulate usage statistics/*from ww w.  j av a 2 s.c om*/
 *
 * @param jobQueue usage data persistence jobs
 * @throws org.apache.stratos.usage.agent.exception.UsageException
 *
 */

public void persistUsage(Queue<BandwidthUsage> jobQueue) throws UsageException {

    // create a map to hold summarizer objects against tenant id
    HashMap<Integer, Summarizer> summarizerMap = new HashMap<Integer, Summarizer>();

    // if the jobQueue is not empty
    for (int i = 0; i < configuration.getUsageTasksNumberOfRecordsPerExecution() && !jobQueue.isEmpty(); i++) {

        // get the first element from the queue, which is a BandwidthUsage object
        BandwidthUsage usage = jobQueue.poll();

        // get the tenant id
        int tenantId = usage.getTenantId();

        //get the Summarizer object corresponds to the tenant id
        Summarizer summarizer = summarizerMap.get(tenantId);

        // when tenant invoke service for the first time, no corresponding summarizer object in
        // the map
        if (summarizer == null) {
            //create a Summarizer object and put to the summarizerMap
            summarizer = new Summarizer();
            summarizerMap.put(tenantId, summarizer);
        }

        //  now accumulate usage
        summarizer.accumulate(usage);
    }

    //Finished accumulating. Now publish the events

    // get the collection view of values in summarizerMap
    Collection<Summarizer> summarizers = summarizerMap.values();

    // for each summarizer object call the publish method
    for (Summarizer summarizer : summarizers) {
        summarizer.publish();
    }
}

From source file:it.acubelab.smaph.SmaphAnnotator.java

/**
 * Given a list of urls, creates a mapping from the url position to the
 * Wikipedia page ID of that URL. If an url is not a Wikipedia url, no
 * mapping is added./*from  w ww .j  a  va  2 s . co  m*/
 * 
 * @param urls
 *            a list of urls.
 * @return a mapping from position to Wikipedia page IDs.
 */
private HashMap<Integer, Integer> urlsToRankID(List<String> urls) {
    HashMap<Integer, Integer> result = new HashMap<>();
    HashMap<Integer, String> rankToTitle = new HashMap<>();
    for (int i = 0; i < urls.size(); i++) {
        String title = decodeWikiUrl(urls.get(i));
        if (title != null)
            rankToTitle.put(i, title);
    }

    try {
        wikiApi.prefetchTitles(new Vector<String>(rankToTitle.values()));
    } catch (XPathExpressionException | IOException | ParserConfigurationException | SAXException e) {
        throw new RuntimeException(e);
    }
    for (int rank : rankToTitle.keySet()) {
        int wid;
        try {
            wid = wikiApi.getIdByTitle(rankToTitle.get(rank));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        if (wid != -1) {
            result.put(rank, wid);
            SmaphAnnotatorDebugger.out.printf("Found Wikipedia url:%s rank:%d id:%d%n", urls.get(rank), rank,
                    wid);
        } else
            SmaphAnnotatorDebugger.out.printf("Discarding Wikipedia url:%s rank:%d id:%d%n", urls.get(rank),
                    rank, wid);
    }
    return result;
}

From source file:com.splicemachine.derby.impl.sql.execute.actions.DDLConstantOperation.java

/**
 * Add and drop dependencies of an object on UDTs.
 *
 * @param lcc Interpreter's state variable for this session.
 * @param dd Metadata//from   ww  w  . ja  v a 2s . co  m
 * @param dependent Object which depends on UDT
 * @param addUdtMap Map of UDTs for which dependencies should be added
 * @param dropUdtMap Map of UDT for which dependencies should be dropped
 */
private void adjustUDTDependencies(LanguageConnectionContext lcc, DataDictionary dd, Dependent dependent,
        HashMap addUdtMap, HashMap dropUdtMap) throws StandardException {
    // again, nothing to do if there are no columns of udt type
    if ((addUdtMap.size() == 0) && (dropUdtMap.size() == 0)) {
        return;
    }

    TransactionController tc = lcc.getTransactionExecute();
    DependencyManager dm = dd.getDependencyManager();
    ContextManager cm = lcc.getContextManager();

    // add new dependencies
    Iterator addIterator = addUdtMap.values().iterator();
    while (addIterator.hasNext()) {
        AliasDescriptor ad = (AliasDescriptor) addIterator.next();

        dm.addDependency(dependent, ad, cm);
    }

    // drop dependencies that are orphaned
    Iterator dropIterator = dropUdtMap.values().iterator();
    while (dropIterator.hasNext()) {
        AliasDescriptor ad = (AliasDescriptor) dropIterator.next();

        DependencyDescriptor dependency = new DependencyDescriptor(dependent, ad);

        dd.dropStoredDependency(dependency, tc);
    }
}

From source file:ca.uhn.fhir.jpa.term.TerminologyLoaderSvc.java

UploadStatistics processSnomedCtFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
    final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
    final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
    final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
    final Set<String> validConceptIds = new HashSet<String>();

    IRecordHandler handler = new SctHandlerConcept(validConceptIds);
    iterateOverZipFile(theZipBytes, SCT_FILE_CONCEPT, handler, '\t', null);

    ourLog.info("Have {} valid concept IDs", validConceptIds.size());

    handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
    iterateOverZipFile(theZipBytes, SCT_FILE_DESCRIPTION, handler, '\t', null);

    ourLog.info("Got {} concepts, cloning map", code2concept.size());
    final HashMap<String, TermConcept> rootConcepts = new HashMap<String, TermConcept>(code2concept);

    handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
    iterateOverZipFile(theZipBytes, SCT_FILE_RELATIONSHIP, handler, '\t', null);

    theZipBytes.clear();/*from   ww w.  ja  va 2 s .  c  o m*/

    ourLog.info("Looking for root codes");
    for (Iterator<Entry<String, TermConcept>> iter = rootConcepts.entrySet().iterator(); iter.hasNext();) {
        if (iter.next().getValue().getParents().isEmpty() == false) {
            iter.remove();
        }
    }

    ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(),
            code2concept.size());

    Counter circularCounter = new Counter();
    for (TermConcept next : rootConcepts.values()) {
        long count = circularCounter.getThenAdd();
        float pct = ((float) count / rootConcepts.size()) * 100.0f;
        ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count,
                rootConcepts.size(), pct);
        dropCircularRefs(next, new ArrayList<String>(), code2concept, circularCounter);
    }

    codeSystemVersion.getConcepts().addAll(rootConcepts.values());
    String url = SCT_URL;
    storeCodeSystem(theRequestDetails, codeSystemVersion, url);

    return new UploadStatistics(code2concept.size());
}

From source file:de.tudarmstadt.ukp.clarin.webanno.conllu.ConllUWriter.java

private void convert(JCas aJCas, PrintWriter aOut) {
    Map<SurfaceForm, Collection<Token>> surfaceIdx = indexCovered(aJCas, SurfaceForm.class, Token.class);
    Int2ObjectMap<SurfaceForm> surfaceBeginIdx = new Int2ObjectOpenHashMap<>();
    for (SurfaceForm sf : select(aJCas, SurfaceForm.class)) {
        surfaceBeginIdx.put(sf.getBegin(), sf);
    }/*from   w  ww  .j a v  a  2  s  .com*/

    for (Sentence sentence : select(aJCas, Sentence.class)) {
        HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>();

        // Tokens
        List<Token> tokens = selectCovered(Token.class, sentence);

        for (int i = 0; i < tokens.size(); i++) {
            Row row = new Row();
            row.id = i + 1;
            row.token = tokens.get(i);
            row.noSpaceAfter = (i + 1 < tokens.size()) && row.token.getEnd() == tokens.get(i + 1).getBegin();
            ctokens.put(row.token, row);
        }

        // Dependencies
        for (Dependency rel : selectCovered(Dependency.class, sentence)) {
            String flavor = FSUtil.getFeature(rel, "flavor", String.class);
            if (StringUtils.isBlank(flavor) || DependencyFlavor.BASIC.equals(flavor)) {
                ctokens.get(rel.getDependent()).deprel = rel;
            } else {
                ctokens.get(rel.getDependent()).deps.add(rel);
            }
        }

        // Write sentence in CONLL-U format
        for (Row row : ctokens.values()) {
            String lemma = UNUSED;
            if (writeLemma && (row.token.getLemma() != null)) {
                lemma = row.token.getLemma().getValue();
            }

            String pos = UNUSED;
            String cpos = UNUSED;
            if (writePos && (row.token.getPos() != null)) {
                POS posAnno = row.token.getPos();
                pos = posAnno.getPosValue();
                cpos = dkpro2ud.get(posAnno.getClass());
                if (StringUtils.isBlank(cpos)) {
                    cpos = pos;
                }
            }

            int headId = UNUSED_INT;
            String deprel = UNUSED;
            String deps = UNUSED;
            if (writeDependency) {
                if ((row.deprel != null)) {
                    deprel = row.deprel.getDependencyType();
                    headId = ctokens.get(row.deprel.getGovernor()).id;
                    if (headId == row.id) {
                        // ROOT dependencies may be modeled as a loop, ignore these.
                        headId = 0;
                    }
                }

                StringBuilder depsBuf = new StringBuilder();
                for (Dependency d : row.deps) {
                    if (depsBuf.length() > 0) {
                        depsBuf.append('|');
                    }
                    // Resolve self-looping root to 0-indexed root
                    int govId = ctokens.get(d.getGovernor()).id;
                    if (govId == row.id) {
                        govId = 0;
                    }
                    depsBuf.append(govId);
                    depsBuf.append(':');
                    depsBuf.append(d.getDependencyType());
                }
                if (depsBuf.length() > 0) {
                    deps = depsBuf.toString();
                }
            }

            String head = UNUSED;
            if (headId != UNUSED_INT) {
                head = Integer.toString(headId);
            }

            String feats = UNUSED;
            if (writeMorph && (row.token.getMorph() != null)) {
                feats = row.token.getMorph().getValue();
            }

            String misc = UNUSED;
            if (row.noSpaceAfter) {
                misc = "SpaceAfter=No";
            }

            SurfaceForm sf = surfaceBeginIdx.get(row.token.getBegin());
            if (sf != null) {
                @SuppressWarnings({ "unchecked", "rawtypes" })
                List<Token> covered = (List) surfaceIdx.get(sf);
                int id1 = ctokens.get(covered.get(0)).id;
                int id2 = ctokens.get(covered.get(covered.size() - 1)).id;
                aOut.printf("%d-%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", id1, id2, sf.getValue(), UNUSED,
                        UNUSED, UNUSED, UNUSED, UNUSED, UNUSED, UNUSED, UNUSED);
            }

            aOut.printf("%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", row.id, row.token.getCoveredText(), lemma,
                    cpos, pos, feats, head, deprel, deps, misc);
        }

        aOut.println();
    }
}

From source file:com.clustercontrol.poller.impl.MultipleOidsUtils.java

public Collection<VariableBinding> query(Target target, OID[] rootOids) throws IOException {
    HashMap<OID, VariableBinding> result = new HashMap<OID, VariableBinding>();

    if ((rootOids == null) || (rootOids.length == 0)) {
        throw new IllegalArgumentException("No OIDs specified");
    }/*from  ww  w  . ja  v  a 2 s .  c  om*/

    PDU request = pduFactory.createPDU(target);
    // Bulk????MaxRepetitions
    if (request.getType() == PDU.GETBULK) {
        // DefaultPDUFactory?????????????PDU??
        request.setMaxRepetitions(this.factory.getMaxRepetitions());
    }

    RootOidAndOidMapping mapping = new RootOidAndOidMapping(rootOids);
    int requestCounter = 0;
    int responseCounter = 0;
    while (!mapping.isEmpty()) {
        ArrayList<OID> oidList = mapping.getOidList();
        log.debug(target.getAddress() + " oidList.size=" + oidList.size());
        RootOidAndOidMapping oldMapping = new RootOidAndOidMapping(mapping);

        PDU response = sendRequest(request, target, oidList);
        requestCounter++;
        if (response == null) {
            log.info(target.getAddress() + " response is null : result.size=" + result.values().size());
            throw new IOException(MessageConstant.MESSAGE_TIME_OUT.getMessage());
        }

        Vector<? extends VariableBinding> vbs = response.getVariableBindings();
        int requestOidSize = request.getVariableBindings().size();//requestOidSize <= oidList.size()

        for (int i = 0; i < vbs.size(); i++) {
            responseCounter++;
            VariableBinding vb = vbs.get(i);
            log.trace("oid=" + vb.getOid() + ", " + vb.toString());

            int colIndex = i % requestOidSize;
            OID oldOid = oidList.get(colIndex);
            OID rootOid = oldMapping.getRootOidByOid(oldOid);
            if (rootOid == null) {
                continue;
            }

            mapping.removeByRootOid(rootOid);

            if (vb.isException()) {
                log.debug("exception " + target.getAddress() + ", " + vb.toString()); // endOfMibView
                continue;
            }

            OID oid = vb.getOid();
            if (!oid.startsWith(rootOid)) {
                continue;
            }

            if (result.containsKey(oid)) {
                continue;
            }

            result.put(oid, vb);
            mapping.put(rootOid, oid);
        }
    }

    // SNMP???????
    String message = target.getAddress() + ", requestCounter=" + requestCounter + ", responseCounter="
            + responseCounter;
    if (requestCounter > 200 || responseCounter > 10000) {
        log.warn(message);
    } else if (requestCounter > 100 || responseCounter > 5000) {
        log.info(message);
    } else {
        log.debug(message);
    }

    return result.values();
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestFaultTolerantPlacementPolicyBadHostsAndRacks.java

@Test
public void testChooseTargetRackBasedWithBadRack() throws Exception {
    String fileName = new String("/hay/" + FILENAME);
    DFSTestUtil.createFile(cluster.getFileSystem(), new Path(fileName),
            (long) STRIPE_SIZE * BLOCK_SIZE * BLOCKS_PER_RACK, (short) 1, // replication
            (long) 0);
    HashMap<String, Integer> rackMapCount = new HashMap<String, Integer>();
    LocatedBlocks blocks = cluster.getNameNode().namesystem.getBlockLocations(fileName, 0, Long.MAX_VALUE);
    // This means that nothing should exist on any of the bad host or rack.
    // This also means that each host should have 4X the blocks it would
    // have otherwise = 1.
    HashMap<String, Integer> rackCount = new HashMap<String, Integer>();
    HashMap<String, Integer> hostCount = new HashMap<String, Integer>();
    for (LocatedBlock b : blocks.getLocatedBlocks()) {
        for (DatanodeInfo i : b.getLocations()) {
            String rack = i.getNetworkLocation();
            String host = i.getName();
            // Should not get placed in a bad rack or bad host.
            assertFalse(TestFaultTolerantPlacementPolicyBadHostsAndRacks.BAD_RACKS.contains(rack));
            assertFalse(TestFaultTolerantPlacementPolicyBadHostsAndRacks.BAD_HOSTS.contains(host));
            Integer count = rackCount.get(rack);
            if (count == null) {
                rackCount.put(rack, new Integer(1));
            } else {
                int newCount = count.intValue() + 1;
                rackCount.put(rack, newCount);
            }/*from  ww w  .j  av  a2 s  . c o  m*/
            count = hostCount.get(host);
            if (count == null) {
                hostCount.put(host, new Integer(1));
            } else {
                int newCount = count.intValue() + 1;
                hostCount.put(host, newCount);
            }
        }
    }

    for (Integer count : hostCount.values()) {
        // Since the # good hosts is 1/4th, the number of blocks per
        // host should be 4X (=4)
        assertEquals(count.intValue(), 4);
    }
    for (Integer count : rackCount.values()) {
        // Since the number of good racks is 1/2th, the number of blocks
        // per rack should be 2X
        assertEquals(count.intValue(), 2 * BLOCKS_PER_RACK);
    }
}

From source file:freed.utils.AppSettingsManager.java

public void saveMediaProfiles(HashMap<String, VideoMediaProfile> mediaProfileHashMap) {
    SharedPreferences.Editor editor = settings.edit();
    editor.remove(getApiSettingString(SETTING_MEDIAPROFILES));
    editor.commit();/*from w w w .  j  av a2  s  .c o  m*/
    Set<String> set = new HashSet<String>();
    for (VideoMediaProfile profile : mediaProfileHashMap.values())
        set.add(profile.GetString());
    editor.putStringSet(getApiSettingString(SETTING_MEDIAPROFILES), set);
    if (!settings.getBoolean("tmp", false))
        editor.putBoolean("tmp", true);
    else
        editor.putBoolean("tmp", false);
    editor.commit();
}

From source file:de.tor.tribes.ui.panels.MinimapPanel.java

private DefaultPieDataset buildDataset(HashMap<Object, Marker> marks) {
    DefaultPieDataset dataset = new DefaultPieDataset();

    if (iCurrentView == ID_ALLY_CHART) {
        HashMap<Ally, Integer> allyCount = MapPanel.getSingleton().getMapRenderer().getAllyCount();
        int overallVillages = 0;

        //count all villages
        for (Integer count : allyCount.values()) {
            overallVillages += count;/*from w ww.ja va2s  .co m*/
        }

        double rest = 0;
        for (Ally a : allyCount.keySet()) {
            Integer v = allyCount.get(a);
            Double perc = (double) v / (double) overallVillages * 100;

            if (perc > 5.0) {
                dataset.setValue(a.getTag(), perc);
                Marker m = MarkerManager.getSingleton().getMarker(a);

                if (m != null) {
                    marks.put(a, m);
                }
                dataset.setValue(a.getTag(), new Double((double) v / (double) overallVillages * 100));
            } else {
                rest += perc;
            }
        }

        dataset.setValue("Sonstige", rest);
    } else {
        HashMap<Tribe, Integer> tribeCount = MapPanel.getSingleton().getMapRenderer().getTribeCount();

        int overallVillages = 0;
        //count all villages

        for (Integer trbCnt : tribeCount.values()) {
            overallVillages += trbCnt;
        }

        double rest = 0;
        for (Tribe t : tribeCount.keySet()) {
            Integer v = tribeCount.get(t);

            Double perc = (double) v / (double) overallVillages * 100;
            if (perc > 5.0) {
                dataset.setValue(t.getName(), perc);
                Marker m = MarkerManager.getSingleton().getMarker(t);
                if (m != null) {
                    marks.put(t, m);
                }
                dataset.setValue(t.getName(), new Double((double) v / (double) overallVillages * 100));
            } else {
                rest += perc;
            }
        }

        dataset.setValue("Sonstige", rest);
    }
    return dataset;
}