Example usage for java.util LinkedHashMap entrySet

List of usage examples for java.util LinkedHashMap entrySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap entrySet.

Prototype

public Set<Map.Entry<K, V>> entrySet() 

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:de.tudarmstadt.ukp.dkpro.argumentation.sequence.annotator.EmbeddingsAnnotator.java

@Override
public void process(JCas aJCas) throws AnalysisEngineProcessException {
    // process each annotation (sentence, etc.)
    for (Annotation annotation : selectAnnotationsForEmbeddings(aJCas)) {
        // get tfidf values for all tokens
        LinkedHashMap<String, Double> tokenTfIdf = new LinkedHashMap<>();

        for (Token t : JCasUtil.selectCovered(Token.class, annotation)) {
            String coveredText = t.getCoveredText();
            if (coveredText.length() < 50) {
                // retieve tfidf value
                List<Tfidf> tfidfs = JCasUtil.selectCovered(Tfidf.class, t);

                if (tfidfs.isEmpty()) {
                    throw new AnalysisEngineProcessException(
                            new IllegalStateException("Word embeddings annotations require TFIDF annotations"));
                }/*from   w w w  .ja v a2 s . c o  m*/

                double tfidfValue = tfidfs.iterator().next().getTfidfValue();
                tokenTfIdf.put(coveredText, tfidfValue);
            }
        }

        List<Vector> vectors = new ArrayList<>();

        // get list of embeddings for each token
        for (Map.Entry<String, Double> entry : tokenTfIdf.entrySet()) {
            Embedding embedding = getEmbeddings(entry.getKey());

            if (embedding != null && embedding.getVector() != null) {
                // create a deep copy!!!!!!
                DenseVector vector = new DenseVector(embedding.getVector());

                // multiply by tfidf
                double tfidf = entry.getValue();

                if (this.tfIdfWeighting) {
                    vector.scale(tfidf);
                }

                vectors.add(vector);
            }
        }

        // create the final vector
        DenseVector finalVector = createFinalVector(vectors);

        // make new annotation
        Embeddings embeddings = new Embeddings(aJCas, annotation.getBegin(), annotation.getEnd());

        // copy double values
        DoubleArray doubleArray = new DoubleArray(aJCas, VECTOR_SIZE);
        doubleArray.copyFromArray(finalVector.getData(), 0, 0, VECTOR_SIZE);
        embeddings.setVector(doubleArray);

        embeddings.addToIndexes();
    }
}

From source file:uk.ac.diamond.scisoft.ncd.calibration.CalibrationMethods.java

private LinkedHashMap<IPeak, HKL> indexPeaks(LinkedHashMap<HKL, Amount<Angle>> twoTheta) {
    LinkedHashMap<IPeak, HKL> indexedPeaks = new LinkedHashMap<IPeak, HKL>(peaks.size());
    CombinationGenerator<HKL> combinations = new CombinationGenerator<HKL>(twoTheta.keySet(), peaks.size());
    Double minVar = Double.MAX_VALUE;
    for (List<HKL> comb : combinations) {
        ArrayList<Double> distance = new ArrayList<Double>();
        LinkedHashMap<IPeak, HKL> tmpResult = new LinkedHashMap<IPeak, HKL>();
        for (int i = 0; i < comb.size(); i++) {
            IPeak peak = peaks.get(i);// w w  w.  j a  v  a  2  s  .co m
            HKL tmpHKL = comb.get(i);
            distance.add(peak.getPosition() / Math.tan(twoTheta.get(tmpHKL).doubleValue(SI.RADIAN)));
            tmpResult.put(peak, tmpHKL);
        }
        double var = fitFunctionToData(tmpResult, false);
        if (var > minVar)
            continue;
        indexedPeaks = tmpResult;
        minVar = var;
    }

    indexedPeakList = new ArrayList<CalibrationPeak>();
    for (Entry<IPeak, HKL> peak : indexedPeaks.entrySet()) {
        double position = peak.getKey().getPosition();
        HKL idx = peak.getValue();
        Amount<Angle> angle = twoTheta.get(idx);
        indexedPeakList.add(new CalibrationPeak(position, angle, idx));
    }

    return indexedPeaks;
}

From source file:com.smartitengineering.dao.impl.hbase.CommonDao.java

protected void deleteNonOptimistically(Template[] states) throws IllegalStateException {
    LinkedHashMap<String, List<Delete>> allDels = new LinkedHashMap<String, List<Delete>>();
    for (Template state : states) {
        if (!state.isValid()) {
            throw new IllegalStateException("Entity not in valid state!");
        }/*  w  w w  .j  a  v  a2 s. c  om*/
        LinkedHashMap<String, Delete> dels = getConverter().objectToDeleteableRows(state, executorService,
                getLockType().equals(LockType.PESSIMISTIC));
        for (Map.Entry<String, Delete> del : dels.entrySet()) {
            final List<Delete> putList;
            if (allDels.containsKey(del.getKey())) {
                putList = allDels.get(del.getKey());
            } else {
                putList = new ArrayList<Delete>();
                allDels.put(del.getKey(), putList);
            }
            putList.add(del.getValue());
        }
    }
    for (final Map.Entry<String, List<Delete>> dels : allDels.entrySet()) {
        try {
            executorService.execute(dels.getKey(), new Callback<Void>() {

                @Override
                public Void call(HTableInterface tableInterface) throws Exception {
                    final List<Delete> value = dels.getValue();
                    if (logger.isInfoEnabled()) {
                        logger.info("Attempting to DELETE " + value);
                    }
                    final ArrayList<Delete> list = new ArrayList<Delete>(value);
                    tableInterface.delete(list);
                    return null;
                }
            });
        } finally {
            for (Template state : states) {
                lockAttainer.unlockAndEvictFromCache(state);
            }
        }
    }
}

From source file:com.github.shareme.gwschips.library.BaseRecipientAdapter.java

/**
 * Constructs an actual list for this Adapter using {@link #mEntryMap}. Also tries to
 * fetch a cached photo for each contact entry (other than separators), or request another
 * thread to get one from directories./*from   w  ww. ja v a 2s.c o m*/
 */
private List<RecipientEntry> constructEntryList(LinkedHashMap<Long, List<RecipientEntry>> entryMap,
        List<RecipientEntry> nonAggregatedEntries) {
    final List<RecipientEntry> entries = new ArrayList<>();
    int validEntryCount = 0;
    for (Map.Entry<Long, List<RecipientEntry>> mapEntry : entryMap.entrySet()) {
        final List<RecipientEntry> entryList = mapEntry.getValue();
        final int size = entryList.size();
        for (int i = 0; i < size; i++) {
            RecipientEntry entry = entryList.get(i);
            entries.add(entry);
            tryFetchPhoto(entry, mContentResolver, this, false, i);
            validEntryCount++;
        }
        //            if (validEntryCount > mPreferredMaxResultCount) {
        //                break;
        //            }
    }
    if (validEntryCount <= mPreferredMaxResultCount) {
        for (int i = 0; i < nonAggregatedEntries.size(); i++) {
            RecipientEntry entry = nonAggregatedEntries.get(i);
            //                if (validEntryCount > mPreferredMaxResultCount) {
            //                    break;
            //                }
            entries.add(entry);
            tryFetchPhoto(entry, mContentResolver, this, false, i);

            validEntryCount++;
        }
    }

    return entries;
}

From source file:net.sf.jasperreports.engine.fill.DelayedFillActions.java

public void runActions(JREvaluationTime evaluationTime, byte evaluation) throws JRException {
    if (log.isDebugEnabled()) {
        log.debug(id + " running delayed actions on " + evaluationTime);
    }//from   w w  w.j  ava 2s. com

    LinkedHashMap<FillPageKey, LinkedMap<Object, EvaluationBoundAction>> pagesMap = actionsMap
            .get(evaluationTime);

    boolean hasEntry;
    do {
        reportFiller.checkInterrupted();

        // locking once per page so that we don't hold the lock for too long
        // (that would prevent async exporters from getting page data during a long resolve)
        fillContext.lockVirtualizationContext();
        try {
            synchronized (pagesMap) {
                // resolve a single page
                Iterator<Map.Entry<FillPageKey, LinkedMap<Object, EvaluationBoundAction>>> pagesIt = pagesMap
                        .entrySet().iterator();
                hasEntry = pagesIt.hasNext();
                if (hasEntry) {
                    Map.Entry<FillPageKey, LinkedMap<Object, EvaluationBoundAction>> pageEntry = pagesIt.next();
                    int pageIdx = pageEntry.getKey().index;

                    if (log.isDebugEnabled()) {
                        log.debug(
                                id + " running actions for page " + pageEntry.getKey().page + " at " + pageIdx);
                    }

                    StandardBoundActionExecutionContext context = new StandardBoundActionExecutionContext();
                    context.setCurrentPageIndex(pageIdx);
                    JasperPrint jasperPrint = fillContext.getMasterFiller().getJasperPrint();
                    context.setTotalPages(jasperPrint.getPages().size());
                    context.setEvaluationTime(evaluationTime);
                    context.setExpressionEvaluationType(evaluation);

                    LinkedMap<Object, EvaluationBoundAction> boundElementsMap = pageEntry.getValue();
                    // execute the actions
                    while (!boundElementsMap.isEmpty()) {
                        EvaluationBoundAction action = boundElementsMap.pop();
                        action.execute(context);
                    }

                    // remove the entry from the pages map
                    pagesIt.remove();

                    // call the listener to signal that the page has been modified
                    if (reportFiller.fillListener != null) {
                        reportFiller.fillListener.pageUpdated(jasperPrint, pageIdx);
                    }
                }
            }
        } finally {
            fillContext.unlockVirtualizationContext();
        }
    } while (hasEntry);
}

From source file:com.android.ex.chips.BaseRecipientAdapter.java

/**
 * Constructs an actual list for this Adapter using {@link #mEntryMap}. Also tries to
 * fetch a cached photo for each contact entry (other than separators), or request another
 * thread to get one from directories.//from   w  w w.j  av a  2s .com
 */
private List<RecipientEntry> constructEntryList(LinkedHashMap<Long, List<RecipientEntry>> entryMap,
        List<RecipientEntry> nonAggregatedEntries) {
    final List<RecipientEntry> entries = new ArrayList<RecipientEntry>();
    int validEntryCount = 0;
    for (Map.Entry<Long, List<RecipientEntry>> mapEntry : entryMap.entrySet()) {
        final List<RecipientEntry> entryList = mapEntry.getValue();
        final int size = entryList.size();
        for (int i = 0; i < size; i++) {
            RecipientEntry entry = entryList.get(i);
            entries.add(entry);
            tryFetchPhoto(entry);
            validEntryCount++;
        }
        if (validEntryCount > mPreferredMaxResultCount) {
            break;
        }
    }
    if (validEntryCount <= mPreferredMaxResultCount) {
        for (RecipientEntry entry : nonAggregatedEntries) {
            if (validEntryCount > mPreferredMaxResultCount) {
                break;
            }
            entries.add(entry);
            tryFetchPhoto(entry);

            validEntryCount++;
        }
    }

    return entries;
}

From source file:org.bimserver.charting.SupportFunctions.java

public static ArrayList<LinkedHashMap<String, Object>> getIfcByClassificationReferenceWithTreeStructure(
        String structureKeyword, IfcModelInterface model, Chart chart, boolean includeClassificationSystem) {
    ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>();
    // Prepare for static iteration.
    LinkedHashMap<IfcRelAssociatesClassification, Integer> ifcClassificationWithCounts = new LinkedHashMap<>();
    // Iterate only the products.
    for (IfcRelAssociatesClassification ifcRelAssociatesClassification : model
            .getAllWithSubTypes(IfcRelAssociatesClassification.class)) {
        IfcRelAssociatesClassification key = ifcRelAssociatesClassification;
        Integer value = 0;//from w  ww.  ja  va2 s  .c om
        if (ifcClassificationWithCounts.containsKey(key))
            value = ifcClassificationWithCounts.get(key);
        // Count.
        EList<IfcRoot> a = ifcRelAssociatesClassification.getRelatedObjects();
        ifcClassificationWithCounts.put(key, value + a.size());
    }
    // Derive the column names.
    ArrayList<String> hierarchyColumnNames = new ArrayList<>();
    int extraColumns = (includeClassificationSystem) ? 1 : 0;
    String leafColumnName = String.format("%s%d", structureKeyword, extraColumns + 1);
    for (int i = 0; i < extraColumns + 1; i++)
        hierarchyColumnNames.add(String.format("%s%d", structureKeyword, i + 1));
    // Update the chart configuration.
    chart.setDimensionLookupKeys(structureKeyword, hierarchyColumnNames);
    chart.setDimensionLookupKey("size", "size");
    chart.setDimensionLookupKey("label", "label");
    chart.setDimensionLookupKey("color", "size");
    // Add each entry.
    for (Entry<IfcRelAssociatesClassification, Integer> countedEntry : ifcClassificationWithCounts.entrySet()) {
        //
        Integer count = countedEntry.getValue();
        IfcRelAssociatesClassification ifcRelAssociatesClassification = countedEntry.getKey();
        //
        LinkedList<String> itemReferenceNames = new LinkedList<>();
        String classificationSystem = (ifcRelAssociatesClassification.isSetName())
                ? ifcRelAssociatesClassification.getName()
                : "(no name)";
        //
        IfcClassificationNotationSelect notationOrReference = ifcRelAssociatesClassification
                .getRelatingClassification();
        if (notationOrReference instanceof IfcClassificationNotation) {
            // Get notation.
            IfcClassificationNotation notation = (IfcClassificationNotation) notationOrReference;
            // Go through the facets of this annotation.
            for (IfcClassificationNotationFacet facet : notation.getNotationFacets()) {
                String notationValue = facet.getNotationValue();
                itemReferenceNames.add(notationValue);
            }
            // TODO: Look up notation in classification. No inverse lookup is available.
        } else if (notationOrReference instanceof IfcClassificationReference) {
            // Get reference.
            IfcClassificationReference reference = (IfcClassificationReference) notationOrReference;
            // Get the reference name.
            String itemReferenceName = reference.getItemReference();
            itemReferenceNames.add(itemReferenceName);
            // Get the classification the reference links out to.
            IfcClassification classification = reference.getReferencedSource();
            // Use it.
            if (classification != null)
                classificationSystem = classification.getName();
        }
        //
        while (itemReferenceNames.size() > 0) {
            String itemReferenceName = itemReferenceNames.pop();
            // Prepare to store this raw data entry.
            LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>();
            // Name the group.
            String name = String.format("%s (%s)", itemReferenceName, count);
            dataEntry.put(leafColumnName, name);
            if (includeClassificationSystem)
                dataEntry.put(hierarchyColumnNames.get(0), classificationSystem);
            dataEntry.put("size", count);
            dataEntry.put("label", name);
            // Push the entry into the data pool.
            rawData.add(dataEntry);
        }
    }
    // Send it all back.
    return rawData;
}

From source file:org.cloudifysource.dsl.internal.DSLReader.java

private Binding createGroovyBinding(final LinkedHashMap<Object, Object> properties) {
    final Binding binding = new Binding();

    final Set<Entry<String, Object>> bindingPropertiesEntries = this.bindingProperties.entrySet();
    for (final Entry<String, Object> entry : bindingPropertiesEntries) {
        binding.setVariable(entry.getKey(), entry.getValue());
    }//  w  w  w .jav a2  s .  co m

    if (properties != null) {
        final Set<Entry<Object, Object>> entries = properties.entrySet();
        for (final Entry<Object, Object> entry : entries) {
            binding.setVariable((String) entry.getKey(), entry.getValue());
        }
        // add variable that contains all the properties
        // to distinguish between properties and other binding variables.
        // This will be used in loading application's service process
        // to transfer application properties to the service using the
        // application's binding.
        binding.setVariable(DSLUtils.DSL_PROPERTIES, properties);
        if (context != null) {
            binding.setVariable("context", context);
        }
    }

    binding.setVariable(DSLUtils.DSL_VALIDATE_OBJECTS_PROPERTY_NAME, validateObjects);
    binding.setVariable(DSLUtils.DSL_FILE_PATH_PROPERTY_NAME, dslFile == null ? null : dslFile.getPath());
    binding.setVariable(DSLReader.DSL_LOGGER_NAME, dslLogger);

    //      MethodClosure printlnClosure = new MethodClosure(this, "println");
    //binding.setVariable("println", printlnClosure);

    return binding;
}

From source file:com.cloud.utils.db.SqlGenerator.java

protected List<Pair<String, Attribute[]>> buildDeleteSqls() {
    LinkedHashMap<String, ArrayList<Attribute>> map = new LinkedHashMap<String, ArrayList<Attribute>>();
    for (Class<?> table : _tables) {
        map.put(DbUtil.getTableName(table), new ArrayList<Attribute>());
    }// ww w .j  av a2 s  .co  m

    for (Attribute attr : _attributes) {
        if (attr.isId()) {
            ArrayList<Attribute> attrs = map.get(attr.table);
            assert (attrs != null) : "Null set of attributes for " + attr.table;
            attrs.add(attr);
        }
    }

    List<Pair<String, Attribute[]>> sqls = new ArrayList<Pair<String, Attribute[]>>(map.size());
    for (Map.Entry<String, ArrayList<Attribute>> entry : map.entrySet()) {
        ArrayList<Attribute> attrs = entry.getValue();
        String sql = buildDeleteSql(entry.getKey(), attrs);
        Pair<String, Attribute[]> pair = new Pair<String, Attribute[]>(sql,
                attrs.toArray(new Attribute[attrs.size()]));
        sqls.add(pair);
    }

    Collections.reverse(sqls);
    return sqls;
}