Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.apdplat.superword.tools.SentenceScorer.java

public static TreeMap<Float, Map<String, List<String>>> score(String path, int limit) {
    //?  // w ww .  j  a  v  a  2  s  .  com
    Set<String> fileNames = TextAnalyzer.getFileNames(path);
    //?
    Map<String, AtomicInteger> frequency = TextAnalyzer.frequency(fileNames);
    //?
    TreeMap<Float, Map<String, List<String>>> sentences = new TreeMap<>();
    //??????
    Set<Integer> hashes = new HashSet<>();
    Set<String> repeat = new HashSet<>();
    //?????????
    int count = 0;
    for (String fileName : fileNames) {
        try (BufferedReader reader = new BufferedReader(
                new InputStreamReader(new BufferedInputStream(new FileInputStream(fileName))))) {
            String book = Paths.get(fileName).toFile().getName().replace(".txt", "");
            String line = null;
            while ((line = reader.readLine()) != null) {
                if (StringUtils.isBlank(line)) {
                    continue;
                }
                int hc = line.hashCode();
                if (hashes.contains(hc)) {
                    repeat.add(line);
                    continue;
                }
                hashes.add(hc);
                //
                float score = score(line, frequency);
                if (score > 0) {
                    if (count >= limit) {
                        LOGGER.debug("?????" + limit + "?");
                        return sentences;
                    }
                    count++;
                    sentences.putIfAbsent(score, new HashMap<>());
                    sentences.get(score).putIfAbsent(book, new ArrayList<>());
                    sentences.get(score).get(book).add(line);
                }
            }
        } catch (IOException ex) {
            ex.printStackTrace();
        }
    }
    LOGGER.debug("????" + repeat.size());
    AtomicInteger i = new AtomicInteger();
    repeat.forEach(r -> {
        LOGGER.debug("\t" + i.incrementAndGet() + "?" + r);
    });
    LOGGER.debug("???" + count);
    return sentences;
}

From source file:edu.illinois.cs.cogcomp.transliteration.WikiTransliteration.java

/**
 * Helper function.//from w  w w . j a v  a2  s  .  c  o m
 * @param word1
 * @param maxSubstringLength
 * @param probMap
 * @param probs
 * @param memoizationTable
 * @param pruneToSize
 * @return
 */
public static HashMap<String, Double> Predict2(String word1, int maxSubstringLength,
        Map<String, HashSet<String>> probMap, HashMap<Production, Double> probs,
        HashMap<String, HashMap<String, Double>> memoizationTable, int pruneToSize) {
    HashMap<String, Double> result;
    if (word1.length() == 0) {
        result = new HashMap<>(1);
        result.put("", 1.0);
        return result;
    }

    if (memoizationTable.containsKey(word1)) {
        return memoizationTable.get(word1);
    }

    result = new HashMap<>();

    int maxSubstringLength1 = Math.min(word1.length(), maxSubstringLength);

    for (int i = 1; i <= maxSubstringLength1; i++) {
        String substring1 = word1.substring(0, i);

        if (probMap.containsKey(substring1)) {

            // recursion right here.
            HashMap<String, Double> appends = Predict2(word1.substring(i), maxSubstringLength, probMap, probs,
                    memoizationTable, pruneToSize);

            //int segmentations = Segmentations( word1.Length - i );

            for (String tgt : probMap.get(substring1)) {
                Production alignment = new Production(substring1, tgt);

                double alignmentProb = probs.get(alignment);

                for (String key : appends.keySet()) {
                    Double value = appends.get(key);
                    String word = alignment.getSecond() + key;
                    //double combinedProb = (pair.Value/segmentations) * alignmentProb;
                    double combinedProb = (value) * alignmentProb;

                    // I hope this is an accurate translation...
                    Dictionaries.IncrementOrSet(result, word, combinedProb, combinedProb);
                }
            }

        }
    }

    if (result.size() > pruneToSize) {
        Double[] valuesArray = result.values().toArray(new Double[result.values().size()]);
        String[] data = result.keySet().toArray(new String[result.size()]);

        //Array.Sort<Double, String> (valuesArray, data);

        TreeMap<Double, String> sorted = new TreeMap<>();
        for (int i = 0; i < valuesArray.length; i++) {
            sorted.put(valuesArray[i], data[i]);
        }

        // FIXME: is this sorted in the correct order???

        //double sum = 0;
        //for (int i = data.Length - pruneToSize; i < data.Length; i++)
        //    sum += valuesArray[i];

        result = new HashMap<>(pruneToSize);
        //            for (int i = data.length - pruneToSize; i < data.length; i++)
        //                result.put(data[i], valuesArray[i]);

        int i = 0;
        for (Double d : sorted.descendingKeySet()) {
            result.put(sorted.get(d), d);
            if (i++ > pruneToSize) {
                break;
            }
        }
    }

    memoizationTable.put(word1, result);
    return result;
}

From source file:org.starfishrespect.myconsumption.server.business.sensors.SensorsDataRetriever.java

/**
 * Retrieves and stores the data for one user
 *
 * @param onlyThisSensorId retrieve only data for one sensor with this id
 * @return false if something goes wrong; true otherwise
 *///w  w  w  .ja  v a 2 s . c om
public boolean retrieve(List<Sensor> sensors, String onlyThisSensorId) {
    boolean allSuccessful = true;
    for (Sensor sensor : sensors) {
        System.out.println("Retrieve data for sensor " + sensor.getId());
        try {
            valuesRepository.setSensor(sensor.getId());
            valuesRepository.init();
            if (onlyThisSensorId != null) {
                if (!sensor.getId().equals(onlyThisSensorId)) {
                    continue;
                }
            }
            HashMap<Integer, HashMap<Integer, Integer>> sortedValues = new HashMap<Integer, HashMap<Integer, Integer>>();
            Date lastValue = sensor.getLastValue();
            SensorRetriever retriever = null;
            if (sensor instanceof FluksoSensor) {
                retriever = new FluksoRetriever((FluksoSensor) sensor);
            }
            if (retriever == null) {
                System.out.println("This sensor type has not been found!");
                continue;
            }
            TreeMap<Integer, Integer> data = retriever.getDataSince(lastValue).getData();
            if (data.size() != 0) {
                for (int key : data.keySet()) {
                    int hour = key - key % 3600;
                    HashMap<Integer, Integer> hourData = sortedValues.get(hour);
                    if (hourData == null) {
                        hourData = new HashMap<Integer, Integer>();
                        sortedValues.put(hour, hourData);
                    }
                    hourData.put(key % 3600, data.get(key));
                }

                for (int key : sortedValues.keySet()) {
                    Date dateKey = new Date(key * 1000L);
                    SensorDataset newValue = new SensorDataset(dateKey);
                    newValue.addAllValues(sortedValues.get(key));
                    valuesRepository.insertOrUpdate(newValue);
                }
                if (sensor.getLastValue().before(new Date(data.lastKey() * 1000L))) {
                    sensor.setLastValue(new Date(data.lastKey() * 1000L));
                }
                if (sensor.getFirstValue().after(new Date(data.firstKey() * 1000L))
                        || sensor.getFirstValue().getTime() == 0) {
                    sensor.setFirstValue(new Date(data.firstKey() * 1000L));
                }
                // sync operation, this avoid to insert a sensor who would have been deleted
                // while retrieving its data
                int currentUsageCount = sensorRepository.getUsageCount(sensor.getId());
                if (currentUsageCount > -1) {
                    // update, the field may have been incremented during retrieving
                    sensor.setUsageCount(currentUsageCount);
                    sensor.setDead(false);
                    sensorRepository.updateSensor(sensor);
                }
                System.out.println("Retrieve successful");
            } else {
                System.out.println("No values retrieved for this sensor");
                if (!sensor.isDead()) {
                    // test if sensor is dead ?
                    Calendar cal = new GregorianCalendar();
                    cal.add(Calendar.HOUR, -6);
                    if (sensor.getLastValue().before(new Date(cal.getTimeInMillis()))) {
                        System.out.println(
                                "Sensor has not sent anything in the last 6 hours! Set its status as dead.");
                        sensor.setDead(true);
                        sensorRepository.updateSensor(sensor);
                    }
                } else {
                    System.out.println("Sensor is still dead");
                }
            }
        } catch (RetrieveException | DaoException e) {
            System.err.println(e.getMessage());
            allSuccessful = false;
        }
    }

    return allSuccessful;
}

From source file:monasca.persister.repository.cassandra.CassandraMetricRepo.java

@Override
public void addToBatch(MetricEnvelope metricEnvelope, String id) {
    Metric metric = metricEnvelope.metric;
    Map<String, Object> metaMap = metricEnvelope.meta;

    String tenantId = getMeta(TENANT_ID, metric, metaMap, id);
    String region = getMeta(REGION, metric, metaMap, id);
    String metricName = metric.getName();
    TreeMap<String, String> dimensions = metric.getDimensions() == null ? new TreeMap<String, String>()
            : new TreeMap<>(metric.getDimensions());

    StringBuilder sb = new StringBuilder(region).append(tenantId).append(metricName);

    Iterator<String> it = dimensions.keySet().iterator();
    while (it.hasNext()) {
        String k = it.next();/*w ww . ja va  2 s  .  c  o  m*/
        sb.append(k).append(dimensions.get(k));
    }

    byte[] defIdSha = DigestUtils.sha(sb.toString());
    Sha1HashId defIdShaHash = new Sha1HashId(defIdSha);

    if (cluster.getMetricIdCache().getIfPresent(defIdShaHash.toHexString()) == null) {
        addDefinitionToBatch(defIdShaHash, metricName, dimensions, tenantId, region, id, metric.getTimestamp());
        batches.addMeasurementQuery(buildMeasurementInsertQuery(defIdShaHash, metric.getTimestamp(),
                metric.getValue(), metric.getValueMeta(), region, tenantId, metricName, dimensions, id));
    } else {
        metricCacheHitMeter.mark();
        batches.addMetricQuery(cluster.getMetricUpdateStmt().bind(retention,
                new Timestamp(metric.getTimestamp()), region, tenantId, metricName,
                getDimensionList(dimensions), new ArrayList<>(dimensions.keySet())));
        batches.addMeasurementQuery(buildMeasurementUpdateQuery(defIdShaHash, metric.getTimestamp(),
                metric.getValue(), metric.getValueMeta(), id));
    }

    metricCount++;
}

From source file:org.starfishrespect.myconsumption.server.business.repositories.repositoriesimpl.ValuesRepositoryImpl.java

@Override
public void insertOrUpdate(SensorDataset value) throws DaoException {
    Update update = new Update();
    Query existingQuery = new Query(new Criteria("timestamp").is(value.getTimestamp()));

    if (mongoOperation.exists(existingQuery, SensorDataset.class, collectionName)) {
        TreeMap<Integer, MinuteValues> minuteValues = value.getValues();
        for (Integer minuteTs : minuteValues.keySet()) {
            Query existingMinute = new Query(
                    new Criteria().andOperator(Criteria.where("timestamp").is(value.getTimestamp()),
                            Criteria.where("values." + minuteTs)));
            MinuteValues minute;//from w  w  w. j a  va 2  s.c om
            if (mongoOperation.exists(existingMinute, MinuteValues.class, collectionName)) {
                minute = mongoOperation.findOne(existingMinute, MinuteValues.class, collectionName);
                minute.merge(minuteValues.get(minuteTs));
            } else {
                minute = minuteValues.get(minuteTs);
            }
            update.set("values." + minuteTs, minute);
        }
        mongoOperation.updateFirst(existingQuery, update, collectionName);
    } else {
        mongoOperation.save(value, collectionName);
    }
}

From source file:be.cytomine.client.HttpClient.java

public void authorize(String action, String url, String contentType, String accept) throws IOException {
    url = url.replace(host, "");
    url = url.replace("http://" + host, "");
    url = url.replace("https://" + host, "");

    TreeMap<String, String> headers = new TreeMap<String, String>();
    headers.put("accept", accept);
    headers.put("date", getActualDateStr());

    log.debug("AUTHORIZE: " + action + "\\n\\n" + contentType + "\\n" + headers.get("date") + "\n");

    String canonicalHeaders = action + "\n\n" + contentType + "\n" + headers.get("date") + "\n";

    String messageToSign = canonicalHeaders + url;

    log.debug("publicKey=" + publicKey);
    log.debug("privateKey=" + privateKey);
    log.debug("messageToSign=" + messageToSign);

    SecretKeySpec privateKeySign = new SecretKeySpec(privateKey.getBytes(), "HmacSHA1");

    try {//from w  ww . jav  a  2s  .com
        Mac mac = Mac.getInstance("HmacSHA1");
        mac.init(privateKeySign);
        byte[] rawHmac = mac.doFinal(new String(messageToSign.getBytes(), "UTF-8").getBytes());

        byte[] signatureBytes = Base64.encodeBase64(rawHmac);

        String signature = new String(signatureBytes);

        String authorization = "CYTOMINE " + publicKey + ":" + signature;

        log.debug("signature=" + signature);
        log.debug("authorization=" + authorization);

        headers.put("authorization", authorization);

        for (String key : headers.keySet()) {
            addHeader(key, headers.get(key));
        }

    } catch (GeneralSecurityException e) {
        throw new IOException(e);
    }

}

From source file:com.netflix.discovery.shared.Applications.java

/**
 * Populates the provided instance count map.  The instance count map is used as part of the general
 * app list synchronization mechanism./*from  w  ww  . j  a v a 2 s  . c  om*/
 * @param instanceCountMap the map to populate
 */
public void populateInstanceCountMap(TreeMap<String, AtomicInteger> instanceCountMap) {
    for (Application app : this.getRegisteredApplications()) {
        for (InstanceInfo info : app.getInstancesAsIsFromEureka()) {
            AtomicInteger instanceCount = instanceCountMap.get(info.getStatus().name());
            if (instanceCount == null) {
                instanceCount = new AtomicInteger(0);
                instanceCountMap.put(info.getStatus().name(), instanceCount);
            }
            instanceCount.incrementAndGet();
        }
    }
}

From source file:org.apache.hadoop.raid.TestRaidHistogram.java

/**
 * Have three stages. Each stage spawns nPercents threads.
 * Each thread iterate $rounds rounds and send random number for 
 * each monitor dir to raidnode including succeed files and failed files. 
 * Set two windows: The first window covers stage3 only.
 * The second window covers stage2 and stage3 only.
 * Calling getBlockFixStatus should be able to filter out all stage1 points
 * The histogram counts for the second window should be double as the of 
 * the first window.// w  w  w . j a v a2 s. co  m
 */
public void testHistograms() throws Exception {
    int rounds = 10000;
    int range = 1000000;
    int dividedRange = range / 1000;
    float step = 1.0f / nPercents;
    try {
        mySetup();
        cnode = RaidNode.createRaidNode(null, conf);
        ArrayList<Float> percents = new ArrayList<Float>();

        for (int i = 0; i <= nPercents; i++) {
            percents.add(step * i);
        }
        Collections.shuffle(percents);
        // submit some old data
        sendRecoveryTimes(nPercents, range * (nPercents + 1), range, rounds);
        Thread.sleep(100);
        long ckpTime1 = System.currentTimeMillis();

        sendRecoveryTimes(nPercents, 0, range, rounds);
        Thread.sleep(100);
        long ckpTime2 = System.currentTimeMillis();

        sendRecoveryTimes(nPercents, 0, range, rounds);
        long endTime = System.currentTimeMillis();
        ArrayList<Long> newWindows = new ArrayList<Long>();
        newWindows.add(endTime - ckpTime2);
        newWindows.add(endTime - ckpTime1);
        HashMap<String, RaidHistogram> recoveryTimes = cnode.blockIntegrityMonitor.getRecoveryTimes();
        for (RaidHistogram histogram : recoveryTimes.values()) {
            histogram.setNewWindows(newWindows);
        }
        for (int i = 0; i <= monitorDirs.length; i++) {
            String monitorDir;
            if (i < monitorDirs.length) {
                monitorDir = monitorDirs[i];
            } else {
                monitorDir = BlockIntegrityMonitor.OTHERS;
            }
            assertEquals("Stale entries are not filtered", rounds * nPercents * 3 * 2,
                    cnode.blockIntegrityMonitor.getNumberOfPoints(monitorDir));
            TreeMap<Long, BlockFixStatus> status = cnode.blockIntegrityMonitor.getBlockFixStatus(monitorDir,
                    nPercents, percents, endTime);
            assertTrue(status.containsKey(newWindows.get(0)));
            assertTrue(status.containsKey(newWindows.get(1)));
            BlockFixStatus bfs = status.get(newWindows.get(0));
            assertEquals("Stale entries are not filtered", rounds * nPercents * 2 * 2,
                    cnode.blockIntegrityMonitor.getNumberOfPoints(monitorDir));
            // Verify failed recovered files for the first window
            assertEquals("The number of failed recovery files should match", rounds * nPercents,
                    bfs.failedPaths);
            // Verify histogram for the first window
            assertEquals(nPercents, bfs.counters.length);
            for (int j = 0; j < nPercents; j++) {
                assertEquals(rounds, bfs.counters[j]);
            }
            // Verify percent values for the first window
            assertEquals(nPercents + 1, bfs.percentValues.length);
            assertEquals(0, bfs.percentValues[0]);
            for (int j = 1; j <= nPercents; j++) {
                assertEquals(dividedRange * j - 1, bfs.percentValues[j]);
            }
            bfs = status.get(newWindows.get(1));
            // Verify failed recovered files for the second window
            assertEquals("The number of failed recovery files should match", rounds * nPercents,
                    bfs.failedPaths);
            // Verify histogram for the second window
            assertEquals(nPercents, bfs.counters.length);
            for (int j = 0; j < nPercents; j++) {
                assertEquals(rounds * 2, bfs.counters[j]);
            }
            // Verify percent values for the second window
            assertEquals(nPercents + 1, bfs.percentValues.length);
            assertEquals(0, bfs.percentValues[0]);
            for (int j = 1; j <= nPercents; j++) {
                assertEquals(dividedRange * j - 1, bfs.percentValues[j]);
            }
        }
    } finally {
        myTearDown();
    }
}

From source file:org.apache.hadoop.net.unix.DomainSocketWatcher.java

/**
 * Send callback and return whether or not the domain socket was closed as a
 * result of processing./*w  w w  .j  a  v  a 2s  .  c o m*/
 *
 * @param caller reason for call
 * @param entries mapping of file descriptor to entry
 * @param fdSet set of file descriptors
 * @param fd file descriptor
 * @return true if the domain socket was closed as a result of processing
 */
private boolean sendCallback(String caller, TreeMap<Integer, Entry> entries, FdSet fdSet, int fd) {
    if (LOG.isTraceEnabled()) {
        LOG.trace(this + ": " + caller + " starting sendCallback for fd " + fd);
    }
    Entry entry = entries.get(fd);
    Preconditions.checkNotNull(entry, this + ": fdSet contained " + fd + ", which we were " + "not tracking.");
    DomainSocket sock = entry.getDomainSocket();
    if (entry.getHandler().handle(sock)) {
        if (LOG.isTraceEnabled()) {
            LOG.trace(this + ": " + caller + ": closing fd " + fd + " at the request of the handler.");
        }
        if (toRemove.remove(fd) != null) {
            if (LOG.isTraceEnabled()) {
                LOG.trace(this + ": " + caller + " : sendCallback processed fd " + fd + " in toRemove.");
            }
        }
        try {
            sock.refCount.unreferenceCheckClosed();
        } catch (IOException e) {
            Preconditions.checkArgument(false, this + ": file descriptor " + sock.fd + " was closed while "
                    + "still in the poll(2) loop.");
        }
        IOUtils.cleanup(LOG, sock);
        fdSet.remove(fd);
        return true;
    } else {
        if (LOG.isTraceEnabled()) {
            LOG.trace(this + ": " + caller + ": sendCallback not " + "closing fd " + fd);
        }
        return false;
    }
}

From source file:org.opendatakit.common.android.data.ColumnDefinition.java

private static void getDataModelHelper(TreeMap<String, Object> jsonSchema, ColumnDefinition c,
        boolean nestedInsideUnitOfRetention) {
    ElementType type = c.getType();
    ElementDataType dataType = type.getDataType();

    // this is a user-defined field
    jsonSchema.put(JSON_SCHEMA_ELEMENT_SET, JSON_SCHEMA_INSTANCE_DATA_VALUE);
    jsonSchema.put(JSON_SCHEMA_ELEMENT_NAME, c.getElementName());
    jsonSchema.put(JSON_SCHEMA_ELEMENT_KEY, c.getElementKey());

    if (nestedInsideUnitOfRetention) {
        jsonSchema.put(JSON_SCHEMA_NOT_UNIT_OF_RETENTION, Boolean.TRUE);
    }//  w ww. ja va2  s  .  c o  m

    if (dataType == ElementDataType.array) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        ColumnDefinition ch = c.getChildren().get(0);
        TreeMap<String, Object> itemSchema = new TreeMap<String, Object>();
        jsonSchema.put(JSON_SCHEMA_ITEMS, itemSchema);
        itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                ((String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH)) + '.' + ch.getElementName());
        // if it isn't already nested within a unit of retention,
        // an array is always itself a unit of retention
        getDataModelHelper(itemSchema, ch, true); // recursion...
    } else if (dataType == ElementDataType.bool) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.configpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
    } else if (dataType == ElementDataType.integer) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.number) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else if (dataType == ElementDataType.object) {
        jsonSchema.put(JSON_SCHEMA_TYPE, dataType.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
        TreeMap<String, Object> propertiesSchema = new TreeMap<String, Object>();
        jsonSchema.put(JSON_SCHEMA_PROPERTIES, propertiesSchema);
        for (ColumnDefinition ch : c.getChildren()) {
            TreeMap<String, Object> itemSchema = new TreeMap<String, Object>();
            propertiesSchema.put(ch.getElementName(), itemSchema);
            itemSchema.put(JSON_SCHEMA_ELEMENT_PATH,
                    ((String) jsonSchema.get(JSON_SCHEMA_ELEMENT_PATH)) + '.' + ch.getElementName());
            // objects are not units of retention -- propagate retention status.
            getDataModelHelper(itemSchema, ch, nestedInsideUnitOfRetention); // recursion...
        }
    } else if (dataType == ElementDataType.rowpath) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, ElementDataType.rowpath.name());
    } else if (dataType == ElementDataType.string) {
        jsonSchema.put(JSON_SCHEMA_TYPE, ElementDataType.string.name());
        if (!c.getElementType().equals(dataType.name())) {
            jsonSchema.put(JSON_SCHEMA_ELEMENT_TYPE, c.getElementType());
        }
    } else {
        throw new IllegalStateException("unexpected alternative ElementDataType");
    }
}