Example usage for com.google.common.collect Maps newTreeMap

List of usage examples for com.google.common.collect Maps newTreeMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newTreeMap.

Prototype

public static <K extends Comparable, V> TreeMap<K, V> newTreeMap() 

Source Link

Document

Creates a mutable, empty TreeMap instance using the natural ordering of its elements.

Usage

From source file:iterator.util.Config.java

private Config(File override) {
    this.override = override;
    this.config = Maps.newTreeMap();
}

From source file:org.apache.druid.query.metadata.SegmentMetadataQueryRunnerFactory.java

@Override
public QueryRunner<SegmentAnalysis> createRunner(final Segment segment) {
    return new QueryRunner<SegmentAnalysis>() {
        @Override//  w  ww .  j av a2  s  . co m
        public Sequence<SegmentAnalysis> run(QueryPlus<SegmentAnalysis> inQ,
                Map<String, Object> responseContext) {
            SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) inQ.getQuery())
                    .withFinalizedAnalysisTypes(toolChest.getConfig());
            final SegmentAnalyzer analyzer = new SegmentAnalyzer(updatedQuery.getAnalysisTypes());
            final Map<String, ColumnAnalysis> analyzedColumns = analyzer.analyze(segment);
            final long numRows = analyzer.numRows(segment);
            long totalSize = 0;

            if (analyzer.analyzingSize()) {
                // Initialize with the size of the whitespace, 1 byte per
                totalSize = analyzedColumns.size() * numRows;
            }

            Map<String, ColumnAnalysis> columns = Maps.newTreeMap();
            ColumnIncluderator includerator = updatedQuery.getToInclude();
            for (Map.Entry<String, ColumnAnalysis> entry : analyzedColumns.entrySet()) {
                final String columnName = entry.getKey();
                final ColumnAnalysis column = entry.getValue();

                if (!column.isError()) {
                    totalSize += column.getSize();
                }
                if (includerator.include(columnName)) {
                    columns.put(columnName, column);
                }
            }
            List<Interval> retIntervals = updatedQuery.analyzingInterval()
                    ? Collections.singletonList(segment.getDataInterval())
                    : null;

            final Map<String, AggregatorFactory> aggregators;
            Metadata metadata = null;
            if (updatedQuery.hasAggregators()) {
                metadata = segment.asStorageAdapter().getMetadata();
                if (metadata != null && metadata.getAggregators() != null) {
                    aggregators = Maps.newHashMap();
                    for (AggregatorFactory aggregator : metadata.getAggregators()) {
                        aggregators.put(aggregator.getName(), aggregator);
                    }
                } else {
                    aggregators = null;
                }
            } else {
                aggregators = null;
            }

            final TimestampSpec timestampSpec;
            if (updatedQuery.hasTimestampSpec()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                timestampSpec = metadata != null ? metadata.getTimestampSpec() : null;
            } else {
                timestampSpec = null;
            }

            final Granularity queryGranularity;
            if (updatedQuery.hasQueryGranularity()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                queryGranularity = metadata != null ? metadata.getQueryGranularity() : null;
            } else {
                queryGranularity = null;
            }

            Boolean rollup = null;
            if (updatedQuery.hasRollup()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                rollup = metadata != null ? metadata.isRollup() : null;
                if (rollup == null) {
                    // in this case, this segment is built before no-rollup function is coded,
                    // thus it is built with rollup
                    rollup = Boolean.TRUE;
                }
            }

            return Sequences.simple(Collections
                    .singletonList(new SegmentAnalysis(segment.getIdentifier(), retIntervals, columns,
                            totalSize, numRows, aggregators, timestampSpec, queryGranularity, rollup)));
        }
    };
}

From source file:com.huawei.streaming.cql.tasks.CreateFunctionTask.java

private TreeMap<String, String> analyzeStreamProperties(StreamPropertiesContext streamPropertiesContext) {
    if (streamPropertiesContext == null) {
        return Maps.newTreeMap();
    }/* w ww .ja  va 2s . co  m*/

    return parseStreamProperties(streamPropertiesContext);
}

From source file:org.codetrack.database.file.FileProject.java

/**
 * This method create instance of SortedMap if not
 * exist in the project control itemMap/*w w w  . jav  a 2  s . c  o  m*/
 *
 * @param clazz Class
 * @return SortedMap<String,ProjectItem> instance
 */
private SortedMap<String, ProjectItem> lazyMap(Class clazz) {

    SortedMap<String, ProjectItem> map = itemsMap.get(clazz);

    if (map == null) {
        map = Maps.newTreeMap();
        itemsMap.put(clazz, map);
    }

    return map;

}

From source file:com.google.auto.value.processor.escapevelocity.Reparser.java

Reparser(ImmutableList<Node> nodes) {
    this.nodes = removeSpaceBeforeSet(nodes);
    this.nodeIndex = 0;
    this.macros = Maps.newTreeMap();
}

From source file:siftscience.kafka.tools.KafkaAssignmentStrategy.java

private static void fillNodesFromAssignment(Map<Integer, List<Integer>> assignment,
        Map<Integer, Node> nodeMap) {
    // Assign existing partitions back to nodes in a round-robin fashion. This ensures that
    // we prevent (when possible) multiple replicas of the same partition moving around in the
    // cluster at the same time. It also helps ensure that we have orphaned replicas that nodes
    // can accept.
    Map<Integer, Iterator<Integer>> assignmentIterators = Maps.newTreeMap();
    for (Map.Entry<Integer, List<Integer>> e : assignment.entrySet()) {
        assignmentIterators.put(e.getKey(), e.getValue().iterator());
    }/*www  .j  a  va 2s  .c o m*/
    boolean filled = false;
    while (!filled) {
        Iterator<Integer> roundRobin = assignmentIterators.keySet().iterator();
        while (roundRobin.hasNext()) {
            int partition = roundRobin.next();
            Iterator<Integer> nodeIt = assignmentIterators.get(partition);
            if (nodeIt.hasNext()) {
                int nodeId = nodeIt.next();
                Node node = nodeMap.get(nodeId);
                if (node != null && node.canAccept(partition)) {
                    // The node from the current assignment must still exist and be able to
                    // accept the partition.
                    node.accept(partition);
                }
            } else {
                roundRobin.remove();
            }
        }
        filled = assignmentIterators.isEmpty();
    }
}

From source file:org.opengeoportal.harvester.api.service.IngestReportErrorServiceImpl.java

@Override
@Transactional(readOnly = true)/*  w w  w.j a  v a2 s.  co m*/
public Map<String, Long> getCountErrorsByReportId(Long id, IngestReportErrorType errorType) {
    List<Object[]> errorList = reportErrorRepository.getCountErrorsByReportId(id, errorType);
    Map<String, Long> result = Maps.newTreeMap();
    for (Object[] fieldError : errorList) {
        String fieldName = (String) fieldError[0];
        Long errorCount = (Long) fieldError[1];

        result.put(fieldName, errorCount);

    }
    return result;
}

From source file:org.sosy_lab.cpachecker.cpa.predicate.persistence.PredicateAbstractionsStorage.java

private void parseAbstractionTree() throws IOException, PredicateParsingFailedException {
    Multimap<Integer, Integer> resultTree = LinkedHashMultimap.create();
    Map<Integer, AbstractionNode> resultAbstractions = Maps.newTreeMap();
    Set<Integer> abstractionsWithParents = Sets.newTreeSet();

    String source = abstractionsFile.getName();
    try (BufferedReader reader = abstractionsFile.asCharSource(StandardCharsets.US_ASCII)
            .openBufferedStream()) {//from   ww w  .  j a v  a 2s  .c  o  m

        // first, read first section with initial set of function definitions
        Pair<Integer, String> defParsingResult = PredicatePersistenceUtils.parseCommonDefinitions(reader,
                abstractionsFile.toString());
        int lineNo = defParsingResult.getFirst();
        String commonDefinitions = defParsingResult.getSecond();

        String currentLine;
        int currentAbstractionId = -1;
        Optional<Integer> currentLocationId = Optional.absent();
        Set<Integer> currentSuccessors = Sets.newTreeSet();

        AbstractionsParserState parserState = AbstractionsParserState.EXPECT_NODE_DECLARATION;
        while ((currentLine = reader.readLine()) != null) {
            lineNo++;
            currentLine = currentLine.trim();

            if (currentLine.isEmpty()) {
                // blank lines separates sections
                continue;
            }

            if (currentLine.startsWith("//")) {
                // comment
                continue;
            }

            if (parserState == AbstractionsParserState.EXPECT_NODE_DECLARATION) {
                // we expect a new section header
                if (!currentLine.endsWith(":")) {
                    throw new PredicateParsingFailedException(
                            currentLine + " is not a valid abstraction header", source, lineNo);
                }

                currentLine = currentLine.substring(0, currentLine.length() - 1).trim(); // strip off ":"
                if (currentLine.isEmpty()) {
                    throw new PredicateParsingFailedException("empty header is not allowed", source, lineNo);
                }

                if (!NODE_DECLARATION_PATTERN.matcher(currentLine).matches()) {
                    throw new PredicateParsingFailedException(
                            currentLine + " is not a valid abstraction header", source, lineNo);
                }

                currentLocationId = null;
                StringTokenizer declarationTokenizer = new StringTokenizer(currentLine, " (,):");
                currentAbstractionId = Integer.parseInt(declarationTokenizer.nextToken());
                while (declarationTokenizer.hasMoreTokens()) {
                    String token = declarationTokenizer.nextToken().trim();
                    if (token.length() > 0) {
                        if (token.startsWith("@")) {
                            currentLocationId = Optional.of(Integer.parseInt(token.substring(1)));
                        } else {
                            int successorId = Integer.parseInt(token);
                            currentSuccessors.add(successorId);
                        }
                    }
                }

                parserState = AbstractionsParserState.EXPECT_NODE_ABSTRACTION;

            } else if (parserState == AbstractionsParserState.EXPECT_NODE_ABSTRACTION) {
                if (!currentLine.startsWith("(assert ") && currentLine.endsWith(")")) {
                    throw new PredicateParsingFailedException("unexpected line " + currentLine, source, lineNo);
                }

                BooleanFormula f;
                try {
                    f = fmgr.parse(commonDefinitions + currentLine);
                } catch (IllegalArgumentException e) {
                    throw new PredicateParsingFailedException(e, "Formula parsing", lineNo);
                }

                AbstractionNode abstractionNode = new AbstractionNode(currentAbstractionId, f,
                        currentLocationId);
                resultAbstractions.put(currentAbstractionId, abstractionNode);
                resultTree.putAll(currentAbstractionId, currentSuccessors);
                abstractionsWithParents.addAll(currentSuccessors);
                currentAbstractionId = -1;
                currentSuccessors.clear();

                parserState = AbstractionsParserState.EXPECT_NODE_DECLARATION;
            }
        }
    }

    // Determine root node
    Set<Integer> nodesWithNoParents = Sets.difference(resultAbstractions.keySet(), abstractionsWithParents);
    assert nodesWithNoParents.size() <= 1;
    if (!nodesWithNoParents.isEmpty()) {
        this.rootAbstractionId = nodesWithNoParents.iterator().next();
    } else {
        this.rootAbstractionId = null;
    }

    // Set results
    this.abstractions = ImmutableMap.copyOf(resultAbstractions);
    this.abstractionTree = ImmutableMultimap.copyOf(resultTree);
}

From source file:org.locationtech.geogig.model.impl.LegacyTreeBuilder.java

/**
 * Only useful to {@link #build() build} the named {@link #empty() empty} tree
 *//*w w  w .  ja va  2 s  .  c  o  m*/
private LegacyTreeBuilder() {
    obStore = null;
    treeChanges = Maps.newTreeMap();
    featureChanges = Maps.newTreeMap();
    deletes = Sets.newTreeSet();
    bucketTreesByBucket = Maps.newTreeMap();
    pendingWritesCache = Maps.newTreeMap();
    original = RevTree.EMPTY;
}

From source file:indigo.runtime.AnalysisContext.java

private AnalysisContext(Collection<Operation> operations, ConflictResolutionPolicy policy,
        GenericPredicateFactory factory) {
    this.resolutionPolicy = policy;
    this.parentContext = null;
    this.predicateSizeConstraints = Maps.newHashMap();
    this.transformedOps = Maps.newTreeMap();
    this.transformedOpsPre = Maps.newTreeMap();
    this.factory = factory;

    this.operations = operations.stream().collect(Collectors.toMap(Operation::opName, Function.identity()));

    this.opEffects = operations.stream().collect(Collectors.toMap(Operation::opName, Operation::getEffects));

    this.predicateToOpsIncludingPre = computePredicateToOpsIndex();
}