Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.impetus.ankush2.framework.monitor.AbstractMonitor.java

/**
 * Iterative Method to split the string and populate hashmap with tree
 * structure.//from   w  w  w  .j  a  va2  s.co  m
 * 
 * @param treeMap
 *            the tree map
 * @param root
 *            the root
 * @param rest
 *            the rest
 */
private static void treePopulation(TreeMap treeMap, String root, String rest) {
    // spliting in two parts.
    String[] tmp = rest.split("\\.", 2);

    // getting the value from tree map.
    TreeMap rootValue = (TreeMap) treeMap.get(root);

    // if rootvalue is null.
    if (rootValue == null) {
        rootValue = new TreeMap();
        treeMap.put(root, rootValue);
    }
    // if length is null.
    if (tmp.length == 1) { // path end
        rootValue.put(tmp[0], null);
    } else {
        // iterative call for rest of the string.
        treePopulation(rootValue, tmp[0], tmp[1]);
    }
}

From source file:chatbot.Chatbot.java

/** *************************************************************************************************
 * @return a list of matches ranked by relevance to the input.
 *///  w  ww.java  2 s .c  om
public TreeMap<Float, ArrayList<Integer>> matchInputFull(String input) {

    //System.out.println("Info in TFIDF.matchInputFull(): input: " + input);
    //System.out.println("Info in TFIDF.matchInputFull(): lines: " + lines);
    ArrayList<String> result = new ArrayList<String>();
    if (isNullOrEmpty(input))
        System.exit(0);
    Integer negone = new Integer(-1);
    processDoc(input, negone);
    calcIDF(lines.size() + 1);
    calcOneTFIDF(negone);
    calcDocSim();
    TreeMap<Float, ArrayList<Integer>> sortedSim = new TreeMap<Float, ArrayList<Integer>>();
    if (docSim == null)
        return sortedSim;
    Iterator<Integer> it = docSim.keySet().iterator();
    while (it.hasNext()) {
        Integer i = it.next();
        Float f = docSim.get(i);
        if (sortedSim.containsKey(f)) {
            ArrayList<Integer> vals = sortedSim.get(f);
            vals.add(i);
        } else {
            ArrayList<Integer> vals = new ArrayList<Integer>();
            vals.add(i);
            sortedSim.put(f, vals);
        }
    }
    return sortedSim;
}

From source file:org.apache.hadoop.hbase.rest.client.RemoteHTable.java

public void put(List<Put> puts) throws IOException {
    // this is a trick: The gateway accepts multiple rows in a cell set and
    // ignores the row specification in the URI

    // separate puts by row
    TreeMap<byte[], List<Cell>> map = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
    for (Put put : puts) {
        byte[] row = put.getRow();
        List<Cell> cells = map.get(row);
        if (cells == null) {
            cells = new ArrayList<Cell>();
            map.put(row, cells);/*w w  w .  java  2 s.  c o  m*/
        }
        for (List<Cell> l : put.getFamilyCellMap().values()) {
            cells.addAll(l);
        }
    }

    // build the cell set
    CellSetModel model = new CellSetModel();
    for (Map.Entry<byte[], List<Cell>> e : map.entrySet()) {
        RowModel row = new RowModel(e.getKey());
        for (Cell cell : e.getValue()) {
            KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
            row.addCell(new CellModel(kv));
        }
        model.addRow(row);
    }

    // build path for multiput
    StringBuilder sb = new StringBuilder();
    sb.append('/');
    sb.append(Bytes.toStringBinary(name));
    sb.append("/$multiput"); // can be any nonexistent row
    for (int i = 0; i < maxRetries; i++) {
        Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF,
                model.createProtobufOutput());
        int code = response.getCode();
        switch (code) {
        case 200:
            return;
        case 509:
            try {
                Thread.sleep(sleepTime);
            } catch (InterruptedException e) {
                throw (InterruptedIOException) new InterruptedIOException().initCause(e);
            }
            break;
        default:
            throw new IOException("multiput request failed with " + code);
        }
    }
    throw new IOException("multiput request timed out");
}

From source file:disko.flow.analyzers.ParseSelectAnalyzer.java

public void process(AnalysisContext<TextDocument> ctx, Ports ports) throws InterruptedException {
    final HyperGraph graph = this.graph != null ? this.graph : ctx.getGraph();

    RelationCountFactory.createCountingIndices(graph);

    InputPort<EntityMaintainer> entityInput = ports.getInput(EntityAnalyzer.ENTITY_CHANNEL);
    InputPort<Set<SentenceInterpretation>> sentenceInput = ports
            .getInput(ToRelOccAnalyzer.SENTENCE_INTERPRETATIONS);
    OutputPort<SentenceInterpretation> out = ports.getOutput(SELECTED_PARSE_CHANNEL);

    for (Set<SentenceInterpretation> iset = sentenceInput.take(); !sentenceInput
            .isEOS(iset); iset = sentenceInput.take()) {
        EntityMaintainer em = entityInput.take();
        if (entityInput.isEOS(em))
            break;

        HashMap<String, String> entityTypes = RelationCountFactory.getEntityTypes(em);

        TreeMap<Double, SentenceInterpretation> ranked = new TreeMap<Double, SentenceInterpretation>();
        for (SentenceInterpretation i : iset) {
            ArrayList<RelationCount> relationCounts = new ArrayList<RelationCount>();
            for (RelOccurrence occ : i.getRelOccs()) {
                if (occ.getArity() < 3)
                    continue;
                List<String> components = occ.getComponents(ctx.getGraph());
                relationCounts.add(//from  w  w  w  . ja  v  a2  s .co  m
                        RelationCountFactory.getRelationCount(entityTypes, components, occ.getPositions()));
            }
            //                    RelationCountFactory.getRelationCounts(entityTypes, i.getParse());

            double score = computeScores(graph, relationCounts);
            ranked.put(score, i);
        }
        SentenceInterpretation best = ranked.get(ranked.lastKey());
        out.put(best);
    }
    out.close();
}

From source file:net.unconventionalthinking.hierarchy.langsymboltable.LangSymbolTable_File.java

private void addSymbolto__Maps_w_DuplicateSymNamesAtScope(LangSymbol langSymbol,
        Map<String, TreeMap<Integer, List<LangSymbol>>> symbolTable) {

    TreeMap<Integer, List<LangSymbol>> symbol_TreeMap = symbolTable.get(langSymbol.name);
    if (symbol_TreeMap == null) {
        symbol_TreeMap = new TreeMap<Integer, List<LangSymbol>>();
        symbolTable.put(langSymbol.name, symbol_TreeMap);
    }/* w w  w . j a v a  2 s. co  m*/

    // now, see there is a list of symbols there already (the list contains all the symbols with the same name at the same scope level)
    Integer scopeID = new Integer(langSymbol.scopeID);
    List<LangSymbol> symbolsList__hasSameName_forScopeLevel = symbol_TreeMap.get(scopeID);
    if (symbolsList__hasSameName_forScopeLevel == null) {
        symbolsList__hasSameName_forScopeLevel = new ArrayList<LangSymbol>();
        symbol_TreeMap.put(scopeID, symbolsList__hasSameName_forScopeLevel);
    }

    // add the sym to the symbol list
    symbolsList__hasSameName_forScopeLevel.add(langSymbol);

}

From source file:net.spfbl.core.Analise.java

protected static void dumpClusterMask(StringBuilder builder) {
    TreeMap<String, Short[]> map = getClusterMap();
    for (String token : map.keySet()) {
        if (token.contains("#") || token.contains(".H.")) {
            Short[] dist = map.get(token);
            int spam = dist[1];
            if (spam > 512) {
                int ham = dist[0];
                float total = ham + spam;
                float reputation = spam / total;
                if (reputation > CLUSTER_RED) {
                    if (!Generic.containsGenericExact(token)) {
                        String hostname = token.replace("#", "0");
                        hostname = hostname.replace(".H.", ".0a.");
                        if (!Block.contains(hostname)) {
                            builder.append(token);
                            builder.append(' ');
                            builder.append(ham);
                            builder.append(' ');
                            builder.append(spam);
                            builder.append('\n');
                        }/*from  w  ww .  ja  v a2s.co m*/
                    }
                }
            }
        }
    }
}

From source file:com.sfs.DataFilter.java

/**
 * Parses the text data./*from w  w w  . j  a  va2  s .  c  o  m*/
 *
 * @param text the text
 *
 * @return the tree map< integer, tree map< integer, string>>
 */
public static TreeMap<Integer, TreeMap<Integer, String>> parseTextData(final String text) {

    TreeMap<Integer, TreeMap<Integer, String>> parsedData = new TreeMap<Integer, TreeMap<Integer, String>>();

    // This counter holds the maximum number of columns provided
    int maxNumberOfTokens = 0;

    if (text != null) {
        StringTokenizer tokenizer = new StringTokenizer(text, "\n");

        int lineCounter = 1;

        while (tokenizer.hasMoreTokens()) {
            String line = tokenizer.nextToken();
            TreeMap<Integer, String> parsedLine = new TreeMap<Integer, String>();

            final StringTokenizer tabTokenizer = new StringTokenizer(line, "\t");
            if (tabTokenizer.countTokens() > 1) {
                parsedLine = tokenizerToMap(tabTokenizer);
            } else {
                final StringTokenizer commaTokenizer = new StringTokenizer(line, ",");
                parsedLine = tokenizerToMap(commaTokenizer);
            }
            if (parsedLine.size() > maxNumberOfTokens) {
                maxNumberOfTokens = parsedLine.size();
            }

            parsedData.put(lineCounter, parsedLine);
            lineCounter++;
        }
    }

    // Now cycle through all the parsed data
    // Ensure that each row has the same (max) number of tokens
    for (int rowIndex : parsedData.keySet()) {
        TreeMap<Integer, String> parsedLine = parsedData.get(rowIndex);

        // This map holds the final values
        TreeMap<Integer, String> columnTokens = new TreeMap<Integer, String>();

        for (int i = 0; i < maxNumberOfTokens; i++) {
            int columnIndex = i + 1;
            if (parsedLine.containsKey(columnIndex)) {
                String value = parsedLine.get(columnIndex);
                columnTokens.put(columnIndex, value);
            } else {
                columnTokens.put(columnIndex, "");
            }
        }
        parsedData.put(rowIndex, columnTokens);
    }

    return parsedData;
}

From source file:com.sa.npopa.samples.hbase.rest.client.RemoteHTable.java

@Override
public void put(List<Put> puts) throws IOException {
    // this is a trick: The gateway accepts multiple rows in a cell set and
    // ignores the row specification in the URI

    // separate puts by row
    TreeMap<byte[], List<Cell>> map = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
    for (Put put : puts) {
        byte[] row = put.getRow();
        List<Cell> cells = map.get(row);
        if (cells == null) {
            cells = new ArrayList<Cell>();
            map.put(row, cells);/*  w w w .  j  av  a  2s.  com*/
        }
        for (List<Cell> l : put.getFamilyCellMap().values()) {
            cells.addAll(l);
        }
    }

    // build the cell set
    CellSetModel model = new CellSetModel();
    for (Map.Entry<byte[], List<Cell>> e : map.entrySet()) {
        RowModel row = new RowModel(e.getKey());
        for (Cell cell : e.getValue()) {
            row.addCell(new CellModel(cell));
        }
        model.addRow(row);
    }

    // build path for multiput
    StringBuilder sb = new StringBuilder();
    sb.append('/');
    sb.append(Bytes.toStringBinary(name));
    sb.append("/$multiput"); // can be any nonexistent row
    for (int i = 0; i < maxRetries; i++) {
        Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF,
                model.createProtobufOutput());
        int code = response.getCode();
        switch (code) {
        case 200:
            return;
        case 509:
            try {
                Thread.sleep(sleepTime);
            } catch (InterruptedException e) {
                throw (InterruptedIOException) new InterruptedIOException().initCause(e);
            }
            break;
        default:
            throw new IOException("multiput request failed with " + code);
        }
    }
    throw new IOException("multiput request timed out");
}

From source file:com.linkedin.pinot.integration.tests.HybridClusterScanComparisonIntegrationTest.java

protected void runTestLoop(Callable<Object> testMethod, boolean useMultipleThreads) throws Exception {
    // Clean up the Kafka topic
    // TODO jfim: Re-enable this once PINOT-2598 is fixed
    // purgeKafkaTopicAndResetRealtimeTable();

    List<Pair<File, File>> enabledRealtimeSegments = new ArrayList<>();

    // Sort the realtime segments based on their segment name so they get added from earliest to latest
    TreeMap<File, File> sortedRealtimeSegments = new TreeMap<File, File>(new Comparator<File>() {
        @Override/* w  ww  . j  av  a  2  s .c o  m*/
        public int compare(File o1, File o2) {
            return _realtimeAvroToSegmentMap.get(o1).getName()
                    .compareTo(_realtimeAvroToSegmentMap.get(o2).getName());
        }
    });
    sortedRealtimeSegments.putAll(_realtimeAvroToSegmentMap);

    for (File avroFile : sortedRealtimeSegments.keySet()) {
        enabledRealtimeSegments.add(Pair.of(avroFile, sortedRealtimeSegments.get(avroFile)));

        if (useMultipleThreads) {
            _queryExecutor = new ThreadPoolExecutor(4, 4, 5, TimeUnit.SECONDS,
                    new ArrayBlockingQueue<Runnable>(50), new ThreadPoolExecutor.CallerRunsPolicy());
        }

        // Push avro for the new segment
        LOGGER.info("Pushing Avro file {} into Kafka", avroFile);
        pushAvroIntoKafka(Collections.singletonList(avroFile), KafkaStarterUtils.DEFAULT_KAFKA_BROKER,
                KAFKA_TOPIC);

        // Configure the scan based comparator to use the distinct union of the offline and realtime segments
        configureScanBasedComparator(enabledRealtimeSegments);

        QueryResponse queryResponse = _scanBasedQueryProcessor.processQuery("select count(*) from mytable");

        int expectedRecordCount = queryResponse.getNumDocsScanned();
        waitForRecordCountToStabilizeToExpectedCount(expectedRecordCount,
                System.currentTimeMillis() + getStabilizationTimeMs());

        // Run the actual tests
        LOGGER.info("Running queries");
        testMethod.call();

        if (useMultipleThreads) {
            if (_nQueriesRead == -1) {
                _queryExecutor.shutdown();
                _queryExecutor.awaitTermination(5, TimeUnit.MINUTES);
            } else {
                int totalQueries = _failedQueries.get() + _successfulQueries.get();
                while (totalQueries < _nQueriesRead) {
                    LOGGER.info("Completed " + totalQueries + " out of " + _nQueriesRead + " - waiting");
                    Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS);
                    totalQueries = _failedQueries.get() + _successfulQueries.get();
                }
                if (totalQueries > _nQueriesRead) {
                    throw new RuntimeException("Executed " + totalQueries + " more than " + _nQueriesRead);
                }
                _queryExecutor.shutdown();
            }
        }
        int totalQueries = _failedQueries.get() + _successfulQueries.get();
        doDisplayStatus(totalQueries);

        // Release resources
        _scanBasedQueryProcessor.close();
        _compareStatusFileWriter.write("Status after push of " + avroFile + ":" + System.currentTimeMillis()
                + ":Executed " + _nQueriesRead + " queries, " + _failedQueries + " failures,"
                + _emptyResults.get() + " empty results\n");
    }
}