Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:edu.illinois.cs.cogcomp.transliteration.CSPTransliteration.java

public static CSPExampleCounts LearnModel(String word1, String word2, CSPModel model) {
    CSPExampleCounts result = new CSPTransliteration().new CSPExampleCounts();

    int paddingSize = Math.max(model.productionContextSize, model.segContextSize);
    String paddedWord = StringUtils.repeat('_', paddingSize) + word1 + StringUtils.repeat('_', paddingSize);
    HashMap<Triple<Double, String, String>, Pair<SparseDoubleVector<Triple<Integer, String, String>>, Double>> lastArg = new HashMap<>();

    Pair<SparseDoubleVector<Triple<Integer, String, String>>, Double> raw = LearnModel(paddingSize, paddedWord,
            word1, word2, model, lastArg);

    if (raw.getSecond() == 0)
        raw.setFirst(new SparseDoubleVector<Triple<Integer, String, String>>());
    else/*from   w w  w.  ja  v a  2 s  . com*/
        //raw.x = Program.segSums[Math.Min(39,word1.length()-1)][Math.Min(39,word2.length()-1)] * (raw.x);
        raw.setFirst(raw.getFirst().divide(raw.getSecond()));
    //raw.x = raw.y >= 1 ? raw.x : raw.x / raw.y;

    if (model.emMode == CSPModel.EMMode.MaxSourceSeg) {
        HashMap<Pair<Integer, String>, Triple<Integer, String, String>> bestProdProbs = new HashMap<>();
        SparseDoubleVector<Pair<Integer, String>> maxProdProbs = new SparseDoubleVector<>();
        for (Triple<Integer, String, String> key : raw.getFirst().keySet()) {
            Double value = raw.getFirst().get(key);

            Pair<Integer, String> keyXY = new Pair<>(key.getFirst(), key.getSecond());

            if (maxProdProbs.get(keyXY) < value) {
                bestProdProbs.put(keyXY, key);
                maxProdProbs.put(keyXY, value);
            }
        }

        raw.getFirst().Clear();
        for (Triple<Integer, String, String> triple : bestProdProbs.values())
            raw.getFirst().put(triple, 1.0);

    } else if (model.emMode == CSPModel.EMMode.BySourceSeg) {
        //Dictionary<Pair<int, String>, Triple<int, String, String>> bestProdProbs = new Dictionary<Pair<int, String>, Triple<int, String, String>>();
        SparseDoubleVector<Pair<Integer, String>> sumProdProbs = new SparseDoubleVector<>();
        for (Triple<Integer, String, String> key : raw.getFirst().keySet()) {
            Double value = raw.getFirst().get(key);
            Pair<Integer, String> keyXY = new Pair<>(key.getFirst(), key.getSecond());
            sumProdProbs.put(keyXY, sumProdProbs.get(keyXY) + value);
        }

        SparseDoubleVector<Triple<Integer, String, String>> newCounts = new SparseDoubleVector<>(
                raw.getFirst().size());
        for (Triple<Integer, String, String> key : raw.getFirst().keySet()) {
            Double value = raw.getFirst().get(key);
            Pair<Integer, String> keyXY = new Pair<>(key.getFirst(), key.getSecond());
            newCounts.put(key, value / sumProdProbs.get(keyXY));
        }
        raw.setFirst(newCounts);
    }

    result.productionCounts = new SparseDoubleVector<>(raw.getFirst().size());
    result.segCounts = new SparseDoubleVector<>(raw.getFirst().size());

    for (Triple<Integer, String, String> key : raw.getFirst().keySet()) {
        Double value = raw.getFirst().get(key);
        Pair<Triple<String, String, String>, String> pckey = new Pair<>(
                new Triple<>(
                        WikiTransliteration
                                .GetLeftContext(paddedWord, key.getFirst(), model.productionContextSize),
                        key.getSecond(),
                        WikiTransliteration.GetRightContext(paddedWord,
                                key.getFirst() + key.getSecond().length(), model.productionContextSize)),
                key.getThird());
        result.productionCounts.put(pckey, result.productionCounts.get(pckey) + value);

        Triple<String, String, String> sckey = new Triple<>(
                WikiTransliteration.GetLeftContext(paddedWord, key.getFirst(), model.segContextSize),
                key.getSecond(), WikiTransliteration.GetRightContext(paddedWord,
                        key.getFirst() + key.getSecond().length(), model.segContextSize));
        result.segCounts.put(sckey, result.segCounts.get(sckey) + value);
    }

    return result;
}

From source file:net.certiv.authmgr.task.section.model.AnalyzeModel.java

private void analyzeWords(String category, String partition, HashMap<String, WordProbabilityPT> wordsMap) {

    // convert to sorted set
    WordProbPTComparator sorter = new WordProbPTComparator();
    TreeSet<WordProbabilityPT> wordProbs = new TreeSet<WordProbabilityPT>(sorter);
    wordProbs.addAll(wordsMap.values());

    // now accumulate and print statistics
    StringBuffer wordlist = new StringBuffer();
    int k = 0;//from   w w  w .ja  va 2 s  .  com
    for (Iterator<WordProbabilityPT> it = wordProbs.iterator(); it.hasNext() && k < 20; k++) {
        WordProbabilityPT wp = it.next();
        String word = wp.getWord();
        double prob = wp.getProbability();
        double count = wp.getMatchingCount();

        BigDecimal probBD = new BigDecimal(prob).setScale(8, BigDecimal.ROUND_HALF_UP);
        String countStr = Util.rightAlign("" + count, 6);
        String wordAbbr = StringUtils.abbreviate(word, 13);
        wordlist.append(Util.leftAlign(wordAbbr, 14) + probBD + "/" + countStr + "| ");
    }
    Log.info(this, Util.leftAlign(partition + ":", 14) + Util.rightAlign("" + wordProbs.size(), 5) + " = "
            + wordlist.toString());
}

From source file:com.jaeksoft.searchlib.crawler.file.process.CrawlFileThread.java

final private void smartDelete(FileCrawlQueue crawlQueue, FileInfo fileInfo) throws SearchLibException {
    crawlQueue.delete(currentStats, fileInfo.getUri());
    if (fileInfo.getFileType() != FileTypeEnum.directory)
        return;//from   w w  w  .  ja  v  a2s  .c  om
    HashMap<String, FileInfo> indexFileMap = new HashMap<String, FileInfo>();
    try {
        fileManager.getFileInfoList(new URI(fileInfo.getUri()), indexFileMap);
        for (FileInfo fi : indexFileMap.values())
            smartDelete(crawlQueue, fi);
    } catch (UnsupportedEncodingException e) {
        Logging.warn(e);
    } catch (URISyntaxException e) {
        Logging.warn(e);
    }
}

From source file:de.whs.poodle.controllers.student.StudentStartController.java

@RequestMapping(method = RequestMethod.GET)
public String get(@ModelAttribute Student student, Model model,
        @RequestParam(defaultValue = "0") boolean evaluationSaved) {
    // get all course terms that the student is enrolled in and all the worksheets in them
    HashMap<CourseTerm, CourseTermWorksheets> courseTermWorksheetsMap = worksheetRepo
            .getWorksheetsForStudent(student.getId());

    // create lists of all the exercise / mc worksheet so we can create the "is completed" maps
    List<ExerciseWorksheet> allExerciseWorksheets = courseTermWorksheetsMap.values().stream()
            .flatMap(w -> w.getExerciseWorksheets().stream()).collect(Collectors.toList());

    List<InstructorMcWorksheet> allMcWorksheets = courseTermWorksheetsMap.values().stream()
            .flatMap(w -> w.getMcWorksheets().stream()).collect(Collectors.toList());

    List<EvaluationWorksheet> allEvaluationWorksheets = courseTermWorksheetsMap.values().stream()
            .map(ctws -> ctws.getEvaluationWorksheet()).filter(ws -> ws != null).collect(Collectors.toList());

    // maps that define whether the student has completed the worksheet
    Map<ExerciseWorksheet, Boolean> exerciseWorksheetIsCompletedMap = feedbackRepo
            .getExerciseWorksheetIsCompletedMap(student.getId(), allExerciseWorksheets);

    Map<InstructorMcWorksheet, Boolean> mcWorksheetIsCompletedMap = mcStatisticsRepo
            .getInstructorMcWorksheetIsCompletedMap(student.getId(), allMcWorksheets);

    Map<EvaluationWorksheet, Boolean> evaluationIsCompletedMap = evaluationWorksheetRepo
            .getEvaluationIsCompletedMap(student.getId(), allEvaluationWorksheets);

    Map<Worksheet, Boolean> worksheetIsCompletedMap = new HashMap<>();
    worksheetIsCompletedMap.putAll(exerciseWorksheetIsCompletedMap);
    worksheetIsCompletedMap.putAll(mcWorksheetIsCompletedMap);
    worksheetIsCompletedMap.putAll(evaluationIsCompletedMap);

    // Exercises that contain new comments by an instructor
    List<Statistic> statisticsWithNewComments = statisticsRepo
            .getStatisticsWithNewCommentsForStudent(student.getId());

    model.addAttribute("courseTermWorksheetsMap", courseTermWorksheetsMap);
    model.addAttribute("worksheetIsCompletedMap", worksheetIsCompletedMap);
    model.addAttribute("statisticsWithNewComments", statisticsWithNewComments);

    if (evaluationSaved)
        model.addAttribute("okMessageCode", "evaluationSaved");

    return "student/start";
}

From source file:hu.ppke.itk.nlpg.purepos.decoder.BeamedViterbi.java

private HashMap<NGram<Integer>, Node> prune(final HashMap<NGram<Integer>, Node> beam) {

    HashMap<NGram<Integer>, Node> ret = new HashMap<NGram<Integer>, Node>();
    // System.err.println(beam);
    // try {//from   w  w  w.  j a va2 s  .  co  m
    Node maxNode = Collections.max(beam.values());
    Double max = maxNode.getWeight();
    for (NGram<Integer> key : beam.keySet()) {
        Node actNode = beam.get(key);
        Double actVal = actNode.getWeight();
        if (!(actVal < max - logTheta)) {
            ret.put(key, actNode);
        }
    }
    // } catch (Exception e) {
    // e.printStackTrace();
    // System.err.println(beam);
    // }
    return ret;

}

From source file:com.clustercontrol.notify.util.NotifyRelationCache.java

public static void refresh() {
    JpaTransactionManager jtm = new JpaTransactionManager();
    if (!jtm.isNestedEm()) {
        m_log.warn("refresh() : transactioin has not been begined.");
        jtm.close();/* w  w w .j  a v  a2s.c o m*/
        return;
    }

    try {
        _lock.writeLock();

        long start = HinemosTime.currentTimeMillis();
        new JpaTransactionManager().getEntityManager().clear();
        HashMap<String, List<NotifyRelationInfo>> notifyMap = new HashMap<String, List<NotifyRelationInfo>>();
        List<NotifyRelationInfo> nriList = null;
        try {
            nriList = QueryUtil.getAllNotifyRelationInfoWithoutJob();
        } catch (Exception e) {
            m_log.warn("refresh() : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
            return;
        }
        for (NotifyRelationInfo nri : nriList) {
            String notifyGroupId = nri.getId().getNotifyGroupId();
            // ???????????
            if (onCache(notifyGroupId)) {
                List<NotifyRelationInfo> notifyList = notifyMap.get(notifyGroupId);
                if (notifyList == null) {
                    notifyList = new ArrayList<NotifyRelationInfo>();
                    notifyList.add(nri);
                    notifyMap.put(notifyGroupId, notifyList);
                } else {
                    notifyList.add(nri);
                }
            }
        }
        for (List<NotifyRelationInfo> notifyList : notifyMap.values()) {
            if (notifyList == null) {
                continue;
            }
            Collections.sort(notifyList);
        }
        storeCache(notifyMap);
        m_log.info("refresh NotifyRelationCache. " + (HinemosTime.currentTimeMillis() - start) + "ms. size="
                + notifyMap.size());
    } finally {
        _lock.writeUnlock();
    }
}

From source file:com.thesmartweb.swebrank.DataManipulation.java

public HashMap sortHashMapByValuesD(HashMap passedMap) {
    List mapKeys = new ArrayList(passedMap.keySet());
    List mapValues = new ArrayList(passedMap.values());
    Collections.sort(mapValues);//from  w  w w  .ja  v a 2  s.  c  om
    Collections.sort(mapKeys);

    HashMap sortedMap = new HashMap();

    Iterator valueIt = mapValues.iterator();
    while (valueIt.hasNext()) {
        Object val = valueIt.next();
        Iterator keyIt = mapKeys.iterator();

        while (keyIt.hasNext()) {
            Object key = keyIt.next();
            String comp1 = passedMap.get(key).toString();
            String comp2 = val.toString();

            if (comp1.equals(comp2)) {
                passedMap.remove(key);
                mapKeys.remove(key);
                sortedMap.put((String) key, (Double) val);
                break;
            }

        }

    }
    return sortedMap;
}

From source file:com.tacitknowledge.util.migration.DistributedAutoPatchRollbackTest.java

/**
 * Ensure that read-only mode actually works
 * //from   ww  w. ja v a  2  s  .  c o m
 * @exception Exception if anything goes wrong
 */
public void testDistributedReadOnlyMode() throws Exception {
    int currentPatchLevel = 12;

    DistributedMigrationProcess process = (DistributedMigrationProcess) getLauncher().getMigrationProcess();
    process.validateTasks(process.getMigrationTasks());

    // need to mock the patch info stores to return the expected patch levels
    HashMap controlledSystems = process.getControlledSystems();
    setReportedPatchLevel(controlledSystems.values(), currentPatchLevel);

    // Make it readonly
    process.setReadOnly(true);

    // Now do the migrations, and make sure we get the right number of events
    try {
        process.doRollbacks(currentPatchInfoStore, ROLLBACK_LEVELS, getContext(), false);
        fail("There should have been an exception - unapplied patches + read-only don't work");
    } catch (MigrationException me) {
        // we expect this
    }

    currentPatchLevel = 13;
    // need to mock the patch info stores to return the expected patch levels
    setReportedPatchLevel(controlledSystems.values(), currentPatchLevel);
    //int patches = process.doRollbacks(currentPatchLevel, rollbackPatchLevel, getContext());
    // assertEquals(0, patches);
    assertEquals(0, getRollbackStartedCount());
    assertEquals(0, getRollbackSuccessCount());
}

From source file:org.apache.geode.geospatial.utils.CachingPutAllMap.java

@Override
public Collection values() {
    readLock.lock();/*from   w  ww.j a  v a2 s  .co  m*/
    try {
        HashMap map = new HashMap(wrappedMap);
        map.putAll(bulkMap);
        return map.values();
    } finally {
        readLock.unlock();
    }
}

From source file:com.taobao.metamorphosis.client.http.SimpleHttpConsumer.java

private List<TopicPartitionRegInfo> getTopicPartitionRegInfos() {
    final List<TopicPartitionRegInfo> rt = new ArrayList<TopicPartitionRegInfo>();
    for (final HashMap<Partition, TopicPartitionRegInfo> subMap : this.topicRegistry.values()) {
        final Collection<TopicPartitionRegInfo> values = subMap.values();
        if (values != null) {
            rt.addAll(values);//from   w ww.  jav a 2 s  . c  om
        }
    }
    return rt;
}