Example usage for java.util TreeMap size

List of usage examples for java.util TreeMap size

Introduction

In this page you can find the example usage for java.util TreeMap size.

Prototype

int size

To view the source code for java.util TreeMap size.

Click Source Link

Document

The number of entries in the tree

Usage

From source file:de.thingweb.desc.ThingDescriptionParser.java

@Deprecated
private static Thing parseOld(JsonNode td) throws IOException {
    try {/*from w w w .ja  va2s.  c  o m*/
        Thing thing = new Thing(td.get("metadata").get("name").asText());

        Iterator<String> tdIterator = td.fieldNames();
        while (tdIterator.hasNext()) {
            switch (tdIterator.next()) {
            case "metadata":
                Iterator<String> metaIterator = td.get("metadata").fieldNames();
                while (metaIterator.hasNext()) {
                    switch (metaIterator.next()) {
                    case "encodings":
                        for (JsonNode encoding : td.get("metadata").get("encodings")) {
                            thing.getMetadata().add("encodings", encoding);
                        }
                        break;

                    case "protocols":
                        TreeMap<Long, String> orderedURIs = new TreeMap<>();
                        for (JsonNode protocol : td.get("metadata").get("protocols")) {
                            orderedURIs.put(protocol.get("priority").asLong(), protocol.get("uri").asText());
                        }
                        if (orderedURIs.size() == 1) {
                            thing.getMetadata().add("uris", factory.textNode(orderedURIs.get(0)));
                        } else {
                            ArrayNode an = factory.arrayNode();
                            for (String uri : orderedURIs.values()) {
                                // values returned in ascending order
                                an.add(uri);
                            }
                            thing.getMetadata().add("uris", an);
                        }

                        break;
                    }
                }
                break;

            case "interactions":
                for (JsonNode inter : td.get("interactions")) {
                    if (inter.get("@type").asText().equals("Property")) {
                        Property.Builder builder = Property.getBuilder(inter.get("name").asText());
                        Iterator<String> propIterator = inter.fieldNames();
                        while (propIterator.hasNext()) {
                            switch (propIterator.next()) {
                            case "outputData":
                                builder.setValueType(inter.get("outputData"));
                                break;
                            case "writable":
                                builder.setWriteable(inter.get("writable").asBoolean());
                                break;
                            }
                        }
                        thing.addProperty(builder.build());
                    } else if (inter.get("@type").asText().equals("Action")) {
                        Action.Builder builder = Action.getBuilder(inter.get("name").asText());
                        Iterator<String> actionIterator = inter.fieldNames();
                        while (actionIterator.hasNext()) {
                            switch (actionIterator.next()) {
                            case "inputData":
                                builder.setInputType(inter.get("inputData").asText());
                                break;
                            case "outputData":
                                builder.setOutputType(inter.get("outputData").asText());
                                break;
                            }
                        }
                        thing.addAction(builder.build());
                    } else if (inter.get("@type").asText().equals("Event")) {
                        Event.Builder builder = Event.getBuilder(inter.get("name").asText());
                        Iterator<String> actionIterator = inter.fieldNames();
                        while (actionIterator.hasNext()) {
                            switch (actionIterator.next()) {
                            case "outputData":
                                builder.setValueType(inter.get("outputData"));
                                break;
                            }
                        }
                        thing.addEvent(builder.build());
                    }
                }
                break;
            }
        }

        return thing;
    } catch (Exception e) { // anything could happen here
        throw new IOException("unable to parse Thing Description");
    }
}

From source file:org.apache.apex.malhar.lib.state.managed.ManagedStateTestUtils.java

static void transferBucketHelper(FileAccess fileAccess, long bucketId,
        Map<Slice, Bucket.BucketedValue> unsavedBucket, int keysPerTimeBucket) throws IOException {
    RemoteIterator<LocatedFileStatus> iterator = fileAccess.listFiles(bucketId);
    TreeMap<Slice, Slice> fromDisk = Maps.newTreeMap(new SliceComparator());
    int size = 0;
    while (iterator.hasNext()) {
        LocatedFileStatus fileStatus = iterator.next();

        String timeBucketStr = fileStatus.getPath().getName();
        if (timeBucketStr.equals(BucketsFileSystem.META_FILE_NAME) || timeBucketStr.endsWith(".tmp")) {
            //ignoring meta file
            continue;
        }/*  w  w  w.ja va 2s.  c o m*/

        LOG.debug("bucket {} time-bucket {}", bucketId, timeBucketStr);

        FileAccess.FileReader reader = fileAccess.getReader(bucketId, timeBucketStr);

        reader.readFully(fromDisk);
        size += keysPerTimeBucket;
        Assert.assertEquals("size of bucket " + bucketId, size, fromDisk.size());
    }

    Assert.assertEquals("size of bucket " + bucketId, unsavedBucket.size(), fromDisk.size());

    Map<Slice, Slice> testBucket = Maps.transformValues(unsavedBucket,
            new Function<Bucket.BucketedValue, Slice>() {
                @Override
                public Slice apply(@Nullable Bucket.BucketedValue input) {
                    assert input != null;
                    return input.getValue();
                }
            });
    Assert.assertEquals("data of bucket" + bucketId, testBucket, fromDisk);
}

From source file:org.apache.hadoop.hbase.master.procedure.MasterDDLOperationHelper.java

/**
 * Reopen all regions from a table after a schema change operation.
 **//*from www .  j a  v  a2s.co m*/
public static boolean reOpenAllRegions(final MasterProcedureEnv env, final TableName tableName,
        final List<HRegionInfo> regionInfoList) throws IOException {
    boolean done = false;
    LOG.info("Bucketing regions by region server...");
    List<HRegionLocation> regionLocations = null;
    Connection connection = env.getMasterServices().getConnection();
    try (RegionLocator locator = connection.getRegionLocator(tableName)) {
        regionLocations = locator.getAllRegionLocations();
    }
    // Convert List<HRegionLocation> to Map<HRegionInfo, ServerName>.
    NavigableMap<HRegionInfo, ServerName> hri2Sn = new TreeMap<HRegionInfo, ServerName>();
    for (HRegionLocation location : regionLocations) {
        hri2Sn.put(location.getRegionInfo(), location.getServerName());
    }
    TreeMap<ServerName, List<HRegionInfo>> serverToRegions = Maps.newTreeMap();
    List<HRegionInfo> reRegions = new ArrayList<HRegionInfo>();
    for (HRegionInfo hri : regionInfoList) {
        ServerName sn = hri2Sn.get(hri);
        // Skip the offlined split parent region
        // See HBASE-4578 for more information.
        if (null == sn) {
            LOG.info("Skip " + hri);
            continue;
        }
        if (!serverToRegions.containsKey(sn)) {
            LinkedList<HRegionInfo> hriList = Lists.newLinkedList();
            serverToRegions.put(sn, hriList);
        }
        reRegions.add(hri);
        serverToRegions.get(sn).add(hri);
    }

    LOG.info("Reopening " + reRegions.size() + " regions on " + serverToRegions.size() + " region servers.");
    AssignmentManager am = env.getMasterServices().getAssignmentManager();
    am.setRegionsToReopen(reRegions);
    BulkReOpen bulkReopen = new BulkReOpen(env.getMasterServices(), serverToRegions, am);
    while (true) {
        try {
            if (bulkReopen.bulkReOpen()) {
                done = true;
                break;
            } else {
                LOG.warn("Timeout before reopening all regions");
            }
        } catch (InterruptedException e) {
            LOG.warn("Reopen was interrupted");
            // Preserve the interrupt.
            Thread.currentThread().interrupt();
            break;
        }
    }
    return done;
}

From source file:org.orekit.models.earth.GeoMagneticFieldFactory.java

/** Loads the geomagnetic model files from the given filename. The loaded
 * models are inserted in a {@link TreeMap} with their epoch as key in order
 * to retrieve them in a sorted manner./*from   w w  w  . j  a  v  a  2  s  .c  o  m*/
 * @param supportedNames a regular expression for valid filenames
 * @return a {@link TreeMap} of all loaded models
 * @throws OrekitException if the models could not be loaded
 */
private static TreeMap<Integer, GeoMagneticField> loadModels(final String supportedNames)
        throws OrekitException {

    TreeMap<Integer, GeoMagneticField> loadedModels = null;
    final GeoMagneticModelLoader loader = new GeoMagneticModelLoader();
    DataProvidersManager.getInstance().feed(supportedNames, loader);

    if (!loader.stillAcceptsData()) {
        final Collection<GeoMagneticField> models = loader.getModels();
        if (models != null) {
            loadedModels = new TreeMap<Integer, GeoMagneticField>();
            for (GeoMagneticField model : models) {
                // round to a precision of two digits after the comma
                final int epoch = (int) FastMath.round(model.getEpoch() * 100d);
                loadedModels.put(epoch, model);
            }
        }
    }

    // if no models could be loaded -> throw exception
    if (loadedModels == null || loadedModels.size() == 0) {
        throw new OrekitException(OrekitMessages.UNABLE_TO_FIND_RESOURCE, supportedNames);
    }

    return loadedModels;
}

From source file:org.ow2.proactive.scheduler.authentication.ManageUsers.java

/**
 * Stores the logins into login.cfg/*w  w w .  j av  a  2s . c om*/
 */
private static void storeLoginFile(String loginFilePath, Properties props) throws IOException {
    try (BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(loginFilePath)))) {
        props.store(writer, null);
    }
    List<String> lines = null;

    try (FileInputStream stream = new FileInputStream(loginFilePath)) {
        lines = IOUtils.readLines(stream);
    }

    TreeMap<String, String> sortedUsers = new TreeMap<>();
    for (String line : lines) {
        if (!(line.isEmpty() || line.startsWith("#"))) {
            String[] loginAndPwd = line.split("=", 2);
            sortedUsers.put(loginAndPwd[0], loginAndPwd[1]);
        }
    }
    List<String> modifiedLines = new ArrayList<>(sortedUsers.size());
    for (Map.Entry entry : sortedUsers.entrySet()) {
        modifiedLines.add(entry.getKey() + ":" + entry.getValue());
    }
    try (BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(loginFilePath)))) {
        IOUtils.writeLines(modifiedLines, System.getProperty("line.separator"), writer);
    }
    System.out.println("Stored login file in " + loginFilePath);
}

From source file:cit360.sandbox.BackEndMenu.java

public static void ticketPrices() {
    TreeMap ageGroup = new TreeMap();

    // Add some ageGroup.
    ageGroup.put("Adult", 8.75);
    ageGroup.put("Child", 5.50);
    ageGroup.put("Senior Citizen", 5.25);
    ageGroup.put("Military Veteran", 5.00);

    // Iterate over all ageGroup, using the keySet method.
    for (Object key : ageGroup.keySet())
        System.out.println(key + " - $" + ageGroup.get(key));
    System.out.println();/*from  ww w  .  j a va2 s .c o m*/

    System.out.println("Highest key: " + ageGroup.lastKey());
    System.out.println("Lowest key: " + ageGroup.firstKey());

    System.out.println("\nPrinting all values: ");
    for (Object val : ageGroup.values())
        System.out.println("$" + val);
    System.out.println();

    // Clear all values.
    ageGroup.clear();

    // Equals to zero.
    System.out.println("After clear operation, size: " + ageGroup.size());
}

From source file:Main.java

public static Window getOwnerForChildWindow() {
    Window w = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusedWindow();
    if (w != null) {
        return w;
    }/* w ww  . jav  a 2 s .  c om*/
    w = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
    if (w != null) {
        return w;
    }
    /*
     * Priority level1
     * modal dialog: +200
     * non-modal dialog: +100
     * frame: +0
     *
     * Priority level2
     * no owned windows: +10
     */
    TreeMap<Integer, Window> prioMap = new TreeMap<Integer, Window>();
    for (Window cand : Window.getWindows()) {
        if (cand == null) {
            continue;
        }
        if (!cand.isVisible()) {
            continue;
        }
        if (!cand.isShowing()) {
            continue;
        }
        int prio = 0;
        Window[] children = cand.getOwnedWindows();
        if (children == null || children.length == 0) {
            prio += 10;
        }
        if (cand instanceof Dialog) {
            Dialog dlg = (Dialog) cand;
            if (dlg.isModal()) {
                prio += 200;
            } else {
                prio += 100;
            }
            prioMap.put(prio, cand);
        } else if (cand instanceof Frame) {
            if (!prioMap.containsKey(prio)) {
                prioMap.put(prio, cand);
            }
        }
    }
    if (prioMap.size() > 0) {
        return prioMap.get(prioMap.lastKey());
    }
    //last line of defense
    if (prioMap.size() == 0) {
        for (Window cand : Window.getWindows()) {
            if (cand == null) {
                continue;
            }
            if (cand.isVisible()) {
                return cand;
            }
        }
    }
    return null;
}

From source file:com.sfs.DataFilter.java

/**
 * Parses the text data./*  ww  w.j  a  va 2 s  .c om*/
 *
 * @param text the text
 *
 * @return the tree map< integer, tree map< integer, string>>
 */
public static TreeMap<Integer, TreeMap<Integer, String>> parseTextData(final String text) {

    TreeMap<Integer, TreeMap<Integer, String>> parsedData = new TreeMap<Integer, TreeMap<Integer, String>>();

    // This counter holds the maximum number of columns provided
    int maxNumberOfTokens = 0;

    if (text != null) {
        StringTokenizer tokenizer = new StringTokenizer(text, "\n");

        int lineCounter = 1;

        while (tokenizer.hasMoreTokens()) {
            String line = tokenizer.nextToken();
            TreeMap<Integer, String> parsedLine = new TreeMap<Integer, String>();

            final StringTokenizer tabTokenizer = new StringTokenizer(line, "\t");
            if (tabTokenizer.countTokens() > 1) {
                parsedLine = tokenizerToMap(tabTokenizer);
            } else {
                final StringTokenizer commaTokenizer = new StringTokenizer(line, ",");
                parsedLine = tokenizerToMap(commaTokenizer);
            }
            if (parsedLine.size() > maxNumberOfTokens) {
                maxNumberOfTokens = parsedLine.size();
            }

            parsedData.put(lineCounter, parsedLine);
            lineCounter++;
        }
    }

    // Now cycle through all the parsed data
    // Ensure that each row has the same (max) number of tokens
    for (int rowIndex : parsedData.keySet()) {
        TreeMap<Integer, String> parsedLine = parsedData.get(rowIndex);

        // This map holds the final values
        TreeMap<Integer, String> columnTokens = new TreeMap<Integer, String>();

        for (int i = 0; i < maxNumberOfTokens; i++) {
            int columnIndex = i + 1;
            if (parsedLine.containsKey(columnIndex)) {
                String value = parsedLine.get(columnIndex);
                columnTokens.put(columnIndex, value);
            } else {
                columnTokens.put(columnIndex, "");
            }
        }
        parsedData.put(rowIndex, columnTokens);
    }

    return parsedData;
}

From source file:se.sics.kompics.p2p.experiment.cyclon.CyclonDataPoint.java

public CyclonDataPoint(int index, TreeMap<OverlayAddress, CyclonNeighbors> alivePeers) {
    this.index = index;
    this.networkSize = alivePeers.size();
    GraphUtil g = new GraphUtil(alivePeers);
    averagePathLength = g.getMeanPathLength();
    diameter = g.getDiameter();/*from  w w w  . j a va 2 s. c om*/
    clusteringCoefficient = g.getMeanClusteringCoefficient();
    inDegree = g.getInStats();
    outDegree = g.getOutStats();
}

From source file:com.romeikat.datamessie.core.base.dao.impl.AbstractEntityWithIdAndVersionDaoTest.java

@Test
public void getIdsWithVersion_ids() {
    Collection<Long> ids = Lists.newArrayList();
    TreeMap<Long, Long> idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), ids);
    assertEquals(0, idsWithVersion.size());
    assertTrue(CollectionUtils.isEqualCollection(ids, idsWithVersion.keySet()));

    ids = Lists.newArrayList(1l, 2l, 3l);
    idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), ids);
    assertEquals(3, idsWithVersion.size());
    assertEquals(ids, Lists.newArrayList(idsWithVersion.keySet()));

    dbSetupTracker.skipNextLaunch();/*ww w  .  j av a2 s. c o  m*/
}