Example usage for com.google.common.collect Maps newTreeMap

List of usage examples for com.google.common.collect Maps newTreeMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newTreeMap.

Prototype

public static <K extends Comparable, V> TreeMap<K, V> newTreeMap() 

Source Link

Document

Creates a mutable, empty TreeMap instance using the natural ordering of its elements.

Usage

From source file:indigo.impl.javaclass.JavaClassSpecification.java

public JavaClassSpecification(Class<?> javaClass) {
    super(javaClass.getName());
    this.javaClass = javaClass;
    this.dependenciesForPredicateName = Maps.newTreeMap();
    this.constrainedSets = Sets.newTreeSet();
    init();//from   w w  w . jav  a 2s .  com
}

From source file:org.apache.pulsar.common.policies.data.TopicStats.java

public TopicStats() {
    this.publishers = Lists.newArrayList();
    this.subscriptions = Maps.newHashMap();
    this.replication = Maps.newTreeMap();
}

From source file:org.codetrack.database.file.FileDatabase.java

public FileDatabase() {
    projectMap = Maps.newTreeMap();
}

From source file:com.metamx.druid.query.metadata.SegmentAnalyzer.java

public Map<String, ColumnAnalysis> analyze(QueryableIndex index) {
    Preconditions.checkNotNull(index, "Index cannot be null");

    Map<String, ColumnAnalysis> columns = Maps.newTreeMap();

    for (String columnName : index.getColumnNames()) {
        final Column column = index.getColumn(columnName);
        final ColumnCapabilities capabilities = column.getCapabilities();

        final ColumnAnalysis analysis;
        final ValueType type = capabilities.getType();
        switch (type) {
        case LONG:
            analysis = analyzeLongColumn(column);
            break;
        case FLOAT:
            analysis = analyzeFloatColumn(column);
            break;
        case STRING:
            analysis = analyzeStringColumn(column);
            break;
        case COMPLEX:
            analysis = analyzeComplexColumn(column);
            break;
        default://from   w  ww.ja  v  a2 s.c  o  m
            log.warn("Unknown column type[%s].", type);
            analysis = ColumnAnalysis.error(String.format("unknown_type_%s", type));
        }

        columns.put(columnName, analysis);
    }

    columns.put("__time", lengthBasedAnalysis(index.getTimeColumn(), NUM_BYTES_IN_TIMESTAMP));

    return columns;
}

From source file:org.geogit.api.plumbing.diff.MutableTree.java

private MutableTree(Node node) {
    this.node = node;
    this.childTrees = Maps.newTreeMap();
}

From source file:org.eclipse.wb.internal.swt.model.layout.LayoutNameSupport.java

@Override
protected Map<String, String> getValueMap() {
    // prepare variables
    Map<String, String> valueMap = Maps.newTreeMap();
    {//from  www.jav a2  s .c o  m
        valueMap.put("layoutAcronym", getAcronym());
        valueMap.put("layoutClassName", getClassName());
        valueMap.put("compositeName", getParentName());
        valueMap.put("compositeName-cap", getParentNameCap());
    }
    return valueMap;
}

From source file:org.eclipse.wb.internal.swing.model.layout.LayoutNameSupport.java

@Override
protected Map<String, String> getValueMap() {
    // prepare variables
    Map<String, String> valueMap = Maps.newTreeMap();
    {/*from   w  w  w.j a va  2s  .com*/
        valueMap.put("layoutAcronym", getAcronym());
        valueMap.put("layoutClassName", getClassName());
        valueMap.put("containerName", getParentName());
        valueMap.put("containerName-cap", getParentNameCap());
    }
    return valueMap;
}

From source file:org.onosproject.store.primitives.impl.FederatedDistributedPrimitiveCreator.java

public FederatedDistributedPrimitiveCreator(Map<PartitionId, DistributedPrimitiveCreator> members) {
    this.members = Maps.newTreeMap();
    this.members.putAll(checkNotNull(members));
    this.sortedMemberPartitionIds = Lists.newArrayList(members.keySet());
}

From source file:org.sonatype.nexus.yum.internal.capabilities.GenerateMetadataCapabilityConfiguration.java

public GenerateMetadataCapabilityConfiguration(final Map<String, String> properties) {
    super(properties);

    this.aliases = Maps.newTreeMap();
    aliases.putAll(new AliasMappings(properties.get(ALIASES)).aliases());

    boolean processDeletes = true;
    if (properties.containsKey(DELETE_PROCESSING)) {
        processDeletes = Boolean.parseBoolean(properties.get(DELETE_PROCESSING));
    }/*from  w w  w .  j  a  v  a 2 s. c  o m*/
    this.processDeletes = processDeletes;

    long deleteProcessingDelay = Yum.DEFAULT_DELETE_PROCESSING_DELAY;
    try {
        deleteProcessingDelay = Long.parseLong(properties.get(DELETE_PROCESSING_DELAY));
    } catch (NumberFormatException e) {
        // will use default
    }
    this.deleteProcessingDelay = deleteProcessingDelay;

    this.yumGroupsDefinitionFile = properties.get(YUM_GROUPS_DEFINITION_FILE);
}

From source file:org.graylog2.indexer.results.FieldHistogramResult.java

@Override
public Map<Long, Map<String, Number>> getResults() {
    if (result.getBuckets().isEmpty()) {
        return Collections.emptyMap();
    }//from ww w .  jav  a  2s  .c o  m

    final Map<Long, Map<String, Number>> results = Maps.newTreeMap();
    for (Histogram.Bucket b : result.getBuckets()) {
        final ImmutableMap.Builder<String, Number> resultMap = ImmutableMap.builder();
        resultMap.put("total_count", b.getDocCount());

        final Stats stats = b.getAggregations().get(Searches.AGG_STATS);
        resultMap.put("count", stats.getCount());
        resultMap.put("min", stats.getMin());
        resultMap.put("max", stats.getMax());
        resultMap.put("total", stats.getSum());
        resultMap.put("mean", stats.getAvg());

        // cardinality is only calculated if it was explicitly requested, so this might be null
        final Cardinality cardinality = b.getAggregations().get(Searches.AGG_CARDINALITY);
        resultMap.put("cardinality", cardinality == null ? 0 : cardinality.getValue());

        final DateTime keyAsDate = (DateTime) b.getKey();
        final long timestamp = keyAsDate.getMillis() / 1000L;
        results.put(timestamp, resultMap.build());
    }

    final long minTimestamp = Collections.min(results.keySet());
    final long maxTimestamp = Collections.max(results.keySet());
    final MutableDateTime currentTime = new MutableDateTime(minTimestamp, DateTimeZone.UTC);

    while (currentTime.getMillis() < maxTimestamp) {
        final Map<String, Number> entry = results.get(currentTime.getMillis());

        // advance timestamp by the interval's seconds value
        currentTime.add(interval.getPeriod());

        if (entry == null) {
            // synthesize a 0 value for this timestamp
            results.put(currentTime.getMillis(), EMPTY_RESULT);
        }
    }
    return results;
}