Example usage for java.util TreeMap size

List of usage examples for java.util TreeMap size

Introduction

In this page you can find the example usage for java.util TreeMap size.

Prototype

int size

To view the source code for java.util TreeMap size.

Click Source Link

Document

The number of entries in the tree

Usage

From source file:edu.utexas.cs.tactex.tariffoptimization.TariffOptimizerBinaryOneShot.java

/**
 * evaluate suggestedSpecs(index), and record result to utilToIndex
 * and result//  ww  w  .j  av a 2  s  .c  o  m
 * 
 * @param index
 * @param utilToIndex
 * @param result
 * @param suggestedSpecs
 * @param consideredTariffActions
 * @param tariffSubscriptions
 * @param competingTariffs
 * @param costCurvesPredictor
 * @param currentTimeslot
 * @param me
 * @param customer2ShiftedEnergy
 * @param customer2RelevantTariffCharges
 * @param customer2NonShiftedEnergy 
 */
private void evaluateAndRecord(int index, TreeMap<Double, Integer> utilToIndex,
        TreeMap<Double, TariffSpecification> result, List<TariffSpecification> suggestedSpecs,
        ArrayList<TariffSpecification> consideredTariffActions,
        HashMap<TariffSpecification, HashMap<CustomerInfo, Integer>> tariffSubscriptions,
        List<TariffSpecification> competingTariffs, CostCurvesPredictor costCurvesPredictor,
        int currentTimeslot, Broker me,
        HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> customer2ShiftedEnergy,
        HashMap<CustomerInfo, ArrayRealVector> customer2NonShiftedEnergy,
        HashMap<CustomerInfo, HashMap<TariffSpecification, Double>> customer2RelevantTariffCharges) {
    TreeMap<Double, TariffSpecification> sortedTariffs;
    consideredTariffActions.clear();
    consideredTariffActions.add(suggestedSpecs.get(index));
    //log.info("computing utilities");
    sortedTariffs = utilityEstimator.estimateUtilities(consideredTariffActions, tariffSubscriptions,
            competingTariffs, customer2RelevantTariffCharges, customer2ShiftedEnergy, customer2NonShiftedEnergy,
            marketPredictionManager, costCurvesPredictor, currentTimeslot, me);
    utilToIndex.put(sortedTariffs.lastEntry().getKey(), index);
    // maintain top 3
    if (utilToIndex.size() > 3) {
        utilToIndex.remove(utilToIndex.firstKey());
    }
    result.putAll(sortedTariffs);
}

From source file:com.clust4j.algo.HDBSCANTests.java

@Test
public void testCondenseAndComputeStability() {
    double[][] slt = new double[][] { new double[] { 0.0, 2.0, 0.3, 2.0 },
            new double[] { 3.0, 1.0, 0.6, 3.0 } };

    ArrayList<CompQuadTup<Integer, Integer, Double, Integer>> h = HDBSCAN.LinkageTreeUtils.condenseTree(slt, 5);
    QuadTup<Integer, Integer, Double, Integer> q = h.get(0);
    assertTrue(q.getFirst() == 3);/*  w w  w  .  ja  v  a  2  s.  c  o m*/
    assertTrue(q.getSecond() == 0);
    // Three is a repeating decimal...
    assertTrue(q.getFourth() == 1);

    TreeMap<Integer, Double> computedStability = HDBSCAN.LinkageTreeUtils.computeStability(h);
    assertTrue(computedStability.size() == 1);
    assertTrue(computedStability.get(3) == 5);

    int[] labels = HDBSCAN.getLabels(h, computedStability);
    assertTrue(labels.length == 3);
    assertTrue(labels[0] == -1);
    assertTrue(labels[1] == -1);
    assertTrue(labels[2] == -1);
}

From source file:com.termmed.reconciliation.RelationshipReconciliation.java

/**
 * Compare prev inact./*from  w  w w . j  a va2 s. c  om*/
 *
 * @param step the step
 * @return the string
 * @throws IOException Signals that an I/O exception has occurred.
 */
private String comparePrevInact(int step) throws IOException {
    // STATISTICS COUNTERS
    int countSameISA = 0;
    int countA_Diff = 0;
    int countB_Diff = 0;
    int countB_DiffISA = 0;
    int countB_Total = 0;
    int sumPrevInact = 0;
    int sumNewNoRec = 0;
    long startTime = System.currentTimeMillis();

    StringBuilder s = new StringBuilder();
    s.append(
            "\r\n::: [Reconciliation by previous inactives vs current actives, without grouping comparation - step:"
                    + step + "]");

    boolean reconciled = false;
    for (Long conceptId : newNoRec.keySet()) {
        TreeMap<String, Relationship> relsPrev = prevInact.get(conceptId);
        ArrayList<Relationship> relsCurr = newNoRec.get(conceptId);
        sumNewNoRec += relsCurr.size();
        if (relsPrev != null) {
            sumPrevInact += relsPrev.size();
            for (Relationship relC : relsCurr) {
                reconciled = false;
                for (String key : relsPrev.descendingKeySet()) {
                    Relationship relP = relsPrev.get(key);
                    if (compareRelsStep(relC, relP, step)) {
                        writeReconciled(bw, relC, relP);

                        countB_Total++;
                        if (relC.typeId == isa) {
                            countSameISA++;
                        }
                        reconciled = true;
                        relsPrev.remove(key);
                        break;
                    }
                }
                if (!reconciled) {
                    countB_Diff++;
                    if (relC.typeId == isa) {
                        countB_DiffISA++;
                    }
                    writeNewNoRec(relC);
                }
            }
            prevInact.put(conceptId, relsPrev);
        } else {
            for (Relationship relC : relsCurr) {
                countB_Diff++;
                if (relC.typeId == isa) {
                    countB_DiffISA++;
                }
                writeNewNoRec(relC);

            }
        }

    }

    s.append("\r\n::: Current active relationships to reconcile = \t" + sumNewNoRec);
    s.append("\r\n::: Candidate previous inactive relationships to match = \t" + sumPrevInact);

    s.append("\r\n::: Partial process statistics:");
    s.append("\r\n::: Reconciled relationships:  \t").append(countB_Total);
    s.append("\r\n::: Reconciled Isa's relationships:  \t").append(countSameISA);
    s.append("\r\n::: Previous relationships without match :   \t").append(countA_Diff);
    s.append("\r\n::: Current relationships without match:   \t").append(countB_Diff);
    s.append("\r\n::: Current Isa's relationships without match:\t").append(countB_DiffISA);
    s.append("\r\n::: ");

    long lapseTime = System.currentTimeMillis() - startTime;
    s.append("\r\n::: [Partial time] Sort/Compare Input & Output: \t").append(lapseTime);
    s.append("\t(mS)\t");
    s.append("\r\n");

    sumB_Total += countB_Total;
    sumSameISA += countSameISA;
    sumA_Diff = countA_Diff;
    sumB_Diff = countB_Diff;
    sumB_DiffISA = countB_DiffISA;

    return s.toString();
}

From source file:com.sfs.whichdoctor.dao.RelationshipDAOImpl.java

/**
 * Given list of prior rotations GUID and build a supervisor map.
 *
 * @param priorRotations the prior rotations
 *
 * @return the tree map< integer, relationship bean>
 *///from w w  w .  j  a va 2s .c  om
private TreeMap<Integer, RelationshipBean> loadPriorSupervisorMap(
        final Collection<RotationBean> priorRotations) {
    TreeMap<Integer, RelationshipBean> supervisorMap = new TreeMap<Integer, RelationshipBean>();

    TreeMap<Long, RotationBean> orderedRotations = new TreeMap<Long, RotationBean>();

    for (RotationBean rotation : priorRotations) {
        long key = LARGE_LONG - rotation.getEndDate().getTime();
        orderedRotations.put(key, rotation);
    }

    for (Long key : orderedRotations.keySet()) {
        RotationBean rotation = orderedRotations.get(key);
        dataLogger.info("End date: " + rotation.getEndDate());
        if (supervisorMap.size() == 0) {
            supervisorMap = getSupervisorMap(rotation);
        }
    }
    return supervisorMap;
}

From source file:net.pms.util.OpenSubtitle.java

public static Map<String, Object> findSubs(String hash, long size, String imdb, String query,
        RendererConfiguration r) throws IOException {
    TreeMap<String, Object> res = new TreeMap<>();
    if (!login()) {
        return res;
    }//from   w w  w. j ava 2  s. c  o m
    String lang = UMSUtils.getLangList(r, true);
    URL url = new URL(OPENSUBS_URL);
    String hashStr = "";
    String imdbStr = "";
    String qStr = "";
    if (!StringUtils.isEmpty(hash)) {
        hashStr = "<member><name>moviehash</name><value><string>" + hash + "</string></value></member>\n"
                + "<member><name>moviebytesize</name><value><double>" + size + "</double></value></member>\n";
    } else if (!StringUtils.isEmpty(imdb)) {
        imdbStr = "<member><name>imdbid</name><value><string>" + imdb + "</string></value></member>\n";
    } else if (!StringUtils.isEmpty(query)) {
        qStr = "<member><name>query</name><value><string>" + query + "</string></value></member>\n";
    } else {
        return res;
    }
    String req = null;
    tokenLock.readLock().lock();
    try {
        req = "<methodCall>\n<methodName>SearchSubtitles</methodName>\n" + "<params>\n<param>\n<value><string>"
                + token + "</string></value>\n</param>\n"
                + "<param>\n<value>\n<array>\n<data>\n<value><struct><member><name>sublanguageid"
                + "</name><value><string>" + lang + "</string></value></member>" + hashStr + imdbStr + qStr
                + "\n" + "</struct></value></data>\n</array>\n</value>\n</param>"
                + "</params>\n</methodCall>\n";
    } finally {
        tokenLock.readLock().unlock();
    }
    Pattern re = Pattern.compile(
            "SubFileName</name>.*?<string>([^<]+)</string>.*?SubLanguageID</name>.*?<string>([^<]+)</string>.*?SubDownloadLink</name>.*?<string>([^<]+)</string>",
            Pattern.DOTALL);
    String page = postPage(url.openConnection(), req);
    Matcher m = re.matcher(page);
    while (m.find()) {
        LOGGER.debug("found subtitle " + m.group(2) + " name " + m.group(1) + " zip " + m.group(3));
        res.put(m.group(2) + ":" + m.group(1), m.group(3));
        if (res.size() > PMS.getConfiguration().liveSubtitlesLimit()) {
            // limit the number of hits somewhat
            break;
        }
    }
    return res;
}

From source file:org.lockss.servlet.DisplayContentTab.java

private Page doHtmlStatusTable0() throws IOException {
    page = new Page();
    addJS("js/DisplayContentTab.js");
    Table divTable = createTabDiv(auStart);
    TreeMap<String, TreeMap<String, TreeSet<ArchivalUnit>>> aus;
    if ("plugin".equals(groupKey)) {
        aus = getAusByPluginName(auStart, auEnd, type, filterKey);
    } else {//  ww w.j ava2 s  . c  o  m
        aus = getAusByPublisherName(auStart, auEnd, type, filterKey);
    }
    for (Map.Entry<String, TreeMap<String, TreeSet<ArchivalUnit>>> entry : aus.entrySet()) {
        createTabContent(divTable, entry.getKey(), entry.getValue());
    }
    Form tabForm = new Form();
    tabForm.attribute("onsubmit", "return confirm('Do you wish to delete the selected items?');");
    tabForm.method("GET");
    tabForm.add(divTable);
    if (aus.size() > 0) {
        //          Input formAdd = new Input("submit", "addSubmit");
        //          formAdd.attribute("value", "Add selected");
        //          formAdd.attribute("id", "add-submit");
        //          formAdd.attribute("class", "submit-button");
        //          tabForm.add(formAdd);
        Input formDelete = new Input("submit", "deleteSubmit");
        formDelete.attribute("value", "Delete selected");
        formDelete.attribute("id", "delete-submit");
        formDelete.attribute("class", "submit-button");
        tabForm.add(formDelete);
    }
    page.add(tabForm);
    return page;
}

From source file:org.opendatakit.database.data.ColumnDefinitionTest.java

@SuppressWarnings("unchecked")
private void recursiveMatch(String parent, TreeMap<String, Object> value, Map<String, Object> xlsxValue) {
    for (String key : value.keySet()) {
        assertTrue("Investigating " + parent + "." + key, xlsxValue.containsKey(key));
        Object ov = value.get(key);
        Object oxlsxv = xlsxValue.get(key);
        if (ov instanceof Map) {
            TreeMap<String, Object> rv = (TreeMap<String, Object>) ov;
            Map<String, Object> xlsrv = (Map<String, Object>) oxlsxv;
            List<String> ignoredKeys = new ArrayList<String>();
            for (String rvkey : xlsrv.keySet()) {
                if (rvkey.startsWith("_")) {
                    ignoredKeys.add(rvkey);
                }/*from  w  w  w . j a v a  2s  .  co m*/
                if (rvkey.equals("prompt_type_name")) {
                    ignoredKeys.add(rvkey);
                }
            }
            for (String rvkey : ignoredKeys) {
                xlsrv.remove(rvkey);
            }
            assertEquals("Investigating " + parent + "." + key, rv.size(), xlsrv.size());
            recursiveMatch(parent + "." + key, rv, xlsrv);

        } else {
            assertEquals("Investigating " + parent + "." + key, ov, oxlsxv);
        }
    }
}

From source file:ffx.potential.parameters.ForceField.java

/**
 * <p>// w  ww  . j  a v a2 s . c  o m
 * getForceFieldTypeCount</p>
 *
 * @param type a {@link ffx.potential.parameters.ForceField.ForceFieldType}
 * object.
 * @return a int.
 */
@SuppressWarnings("unchecked")
public int getForceFieldTypeCount(ForceFieldType type) {
    TreeMap<String, BaseType> treeMap = (TreeMap<String, BaseType>) forceFieldTypes.get(type);
    if (treeMap == null) {
        logger.log(Level.WARNING, "Unrecognized Force Field Type: {0}", type);
        return 0;
    }
    return treeMap.size();
}

From source file:com.sfs.whichdoctor.beans.PersonBean.java

/**
 * Gets the curriculum year./*from w w w.j a v a 2 s.  co  m*/
 *
 * @return the curriculum year
 */
public final String getCurriculumYear() {

    TreeMap<Integer, Integer> years = new TreeMap<Integer, Integer>();

    if (this.getSpecialtyList() != null) {
        for (SpecialtyBean specialty : this.getSpecialtyList()) {
            if (StringUtils.equalsIgnoreCase(specialty.getStatus(), "In training")
                    && specialty.getTrainingProgramYear() > 0) {
                years.put(specialty.getTrainingProgramYear(), 1);
            }
        }
    }

    String value = "";
    int count = 1;

    for (int year : years.keySet()) {
        if (count > 1) {
            if (count == years.size()) {
                value += " and ";
            } else {
                value += ", ";
            }
        }
        value += String.valueOf(year);
        ++count;
    }
    return value;
}

From source file:com.datatorrent.contrib.hdht.HDHTWriter.java

/**
 * Flush changes from write cache to disk. New data files will be written and meta data replaced atomically. The flush
 * frequency determines availability of changes to external readers.
 *
 * @throws IOException/*from  ww  w  .j  a v a2 s.c  o  m*/
 */
private void writeDataFiles(Bucket bucket) throws IOException {
    BucketIOStats ioStats = getOrCretaStats(bucket.bucketKey);
    LOG.debug("Writing data files in bucket {}", bucket.bucketKey);
    // copy meta data on write
    BucketMeta bucketMetaCopy = kryo.copy(getMeta(bucket.bucketKey));

    /** Process purge requests before flushing data from cache to maintain
     * the oder or purge and put operations. This makes sure that purged data
     * removed from file, before new data is added to the files */
    HashSet<String> filesToDelete = Sets.newHashSet();
    bucketMetaCopy = processPurge(bucket, bucketMetaCopy, filesToDelete);

    // bucket keys by file
    TreeMap<Slice, BucketFileMeta> bucketSeqStarts = bucketMetaCopy.files;
    Map<BucketFileMeta, Map<Slice, Slice>> modifiedFiles = Maps.newHashMap();

    for (Map.Entry<Slice, byte[]> entry : bucket.frozenWriteCache.entrySet()) {
        // find file for key
        Map.Entry<Slice, BucketFileMeta> floorEntry = bucketSeqStarts.floorEntry(entry.getKey());
        BucketFileMeta floorFile;
        if (floorEntry != null) {
            floorFile = floorEntry.getValue();
        } else {
            floorEntry = bucketSeqStarts.firstEntry();
            if (floorEntry == null || floorEntry.getValue().name != null) {
                // no existing file or file with higher key
                floorFile = new BucketFileMeta();
            } else {
                // placeholder for new keys, move start key
                floorFile = floorEntry.getValue();
                bucketSeqStarts.remove(floorEntry.getKey());
            }
            floorFile.startKey = entry.getKey();
            if (floorFile.startKey.length != floorFile.startKey.buffer.length) {
                // normalize key for serialization
                floorFile.startKey = new Slice(floorFile.startKey.toByteArray());
            }
            bucketSeqStarts.put(floorFile.startKey, floorFile);
        }

        Map<Slice, Slice> fileUpdates = modifiedFiles.get(floorFile);
        if (fileUpdates == null) {
            modifiedFiles.put(floorFile, fileUpdates = Maps.newHashMap());
        }
        fileUpdates.put(entry.getKey(), new Slice(entry.getValue()));
    }

    // write modified files
    for (Map.Entry<BucketFileMeta, Map<Slice, Slice>> fileEntry : modifiedFiles.entrySet()) {
        BucketFileMeta fileMeta = fileEntry.getKey();
        TreeMap<Slice, Slice> fileData = new TreeMap<Slice, Slice>(getKeyComparator());

        if (fileMeta.name != null) {
            // load existing file
            long start = System.currentTimeMillis();
            FileReader reader = store.getReader(bucket.bucketKey, fileMeta.name);
            reader.readFully(fileData);
            ioStats.dataBytesRead += store.getFileSize(bucket.bucketKey, fileMeta.name);
            ioStats.dataReadTime += System.currentTimeMillis() - start;
            /* these keys are re-written */
            ioStats.dataKeysRewritten += fileData.size();
            ioStats.filesReadInCurrentWriteCycle++;
            ioStats.dataFilesRead++;
            reader.close();
            filesToDelete.add(fileMeta.name);
        }

        // apply updates
        fileData.putAll(fileEntry.getValue());
        // new file
        writeFile(bucket, bucketMetaCopy, fileData);
    }

    LOG.debug("Files written {} files read {}", ioStats.filesWroteInCurrentWriteCycle,
            ioStats.filesReadInCurrentWriteCycle);
    // flush meta data for new files
    try {
        LOG.debug("Writing {} with {} file entries", FNAME_META, bucketMetaCopy.files.size());
        OutputStream os = store.getOutputStream(bucket.bucketKey, FNAME_META + ".new");
        Output output = new Output(os);
        bucketMetaCopy.committedWid = bucket.committedLSN;
        bucketMetaCopy.recoveryStartWalPosition = bucket.recoveryStartWalPosition;
        kryo.writeClassAndObject(output, bucketMetaCopy);
        output.close();
        os.close();
        store.rename(bucket.bucketKey, FNAME_META + ".new", FNAME_META);
    } catch (IOException e) {
        throw new RuntimeException("Failed to write bucket meta data " + bucket.bucketKey, e);
    }

    // clear pending changes
    ioStats.dataKeysWritten += bucket.frozenWriteCache.size();
    // switch to new version
    this.metaCache.put(bucket.bucketKey, bucketMetaCopy);

    // delete old files
    for (String fileName : filesToDelete) {
        store.delete(bucket.bucketKey, fileName);
    }
    invalidateReader(bucket.bucketKey, filesToDelete);
    // clearing cache after invalidating readers
    bucket.frozenWriteCache.clear();

    // cleanup WAL files which are not needed anymore.
    minimumRecoveryWalPosition = bucketMetaCopy.recoveryStartWalPosition;
    for (Long bucketId : this.bucketKeys) {
        BucketMeta meta = getMeta(bucketId);
        if (meta.recoveryStartWalPosition.fileId < minimumRecoveryWalPosition.fileId
                || (meta.recoveryStartWalPosition.fileId == minimumRecoveryWalPosition.fileId
                        && meta.recoveryStartWalPosition.offset < minimumRecoveryWalPosition.offset)) {
            minimumRecoveryWalPosition = meta.recoveryStartWalPosition;
        }
    }
    this.wal.cleanup(minimumRecoveryWalPosition.fileId);
    ioStats.filesReadInCurrentWriteCycle = 0;
    ioStats.filesWroteInCurrentWriteCycle = 0;
}