Example usage for com.google.common.collect Maps newTreeMap

List of usage examples for com.google.common.collect Maps newTreeMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newTreeMap.

Prototype

public static <C, K extends C, V> TreeMap<K, V> newTreeMap(@Nullable Comparator<C> comparator) 

Source Link

Document

Creates a mutable, empty TreeMap instance using the given comparator.

Usage

From source file:co.cask.cdap.data2.transaction.stream.leveldb.LevelDBStreamFileConsumer.java

@Override
protected void updateState(Iterable<byte[]> rows, int size, byte[] value) throws IOException {
    NavigableMap<byte[], NavigableMap<byte[], byte[]>> changes = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (byte[] row : rows) {
        NavigableMap<byte[], byte[]> values = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
        values.put(stateColumnName, value);
        changes.put(row, values);//from w w  w . jav  a 2 s.  c  o m
    }
    tableCore.persist(changes, KeyValue.LATEST_TIMESTAMP);
}

From source file:org.nanoframework.orm.jdbc.record.AbstractJdbcRecord.java

protected void initColumnNames() {
    final Collection<Field> fields = instance.fields();
    if (Result.JDBC_JSTL_CASE_INSENSITIVE_ORDER) {
        columnMapper = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
        fieldColumnMapper = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
    } else {/*from  w w w . ja  v  a  2s.c  o  m*/
        columnMapper = Maps.newTreeMap();
        fieldColumnMapper = Maps.newTreeMap();
    }

    fields.stream().filter(field -> field.isAnnotationPresent(Column.class)).forEach(field -> {
        final String columnName = field.getAnnotation(Column.class).value();
        columnMapper.put(columnName, field);
        fieldColumnMapper.put(field.getName(), columnName);
    });

    Assert.notEmpty(columnMapper,
            "Record[ " + this.entity.getName() + " ], @Column");
}

From source file:org.apache.isis.core.runtime.services.ServicesInstallerFromConfiguration.java

@Override
public List<Object> getServices() {
    LOG.info("installing " + this.getClass().getName());

    // rather nasty, lazily copy over the configuration to the instantiator
    serviceInstantiator.setConfiguration(getConfiguration());

    if (serviceList == null) {

        final SortedMap<String, SortedSet<String>> positionedServices = Maps
                .newTreeMap(new DeweyOrderComparator());
        appendServices(positionedServices);

        serviceList = ServicesInstallerUtils.instantiateServicesFrom(positionedServices, serviceInstantiator);
    }//w ww  .j a v  a  2s .c o m
    return serviceList;

}

From source file:org.apache.druid.server.http.IntervalsResource.java

@GET
@Path("/{interval}")
@Produces(MediaType.APPLICATION_JSON)/*from  w w w .ja  va2  s.co m*/
public Response getSpecificIntervals(@PathParam("interval") String interval,
        @QueryParam("simple") String simple, @QueryParam("full") String full,
        @Context final HttpServletRequest req) {
    final Interval theInterval = Intervals.of(interval.replace("_", "/"));
    final Set<ImmutableDruidDataSource> datasources = InventoryViewUtils.getSecuredDataSources(req,
            serverInventoryView, authorizerMapper);

    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());

    if (full != null) {
        final Map<Interval, Map<String, Map<String, Object>>> retVal = Maps.newTreeMap(comparator);
        for (ImmutableDruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Map<String, Object>> dataSourceInterval = retVal.get(dataSegment.getInterval());
                    if (dataSourceInterval == null) {
                        Map<String, Map<String, Object>> tmp = Maps.newHashMap();
                        retVal.put(dataSegment.getInterval(), tmp);
                    }
                    setProperties(retVal, dataSource, dataSegment);
                }
            }
        }

        return Response.ok(retVal).build();
    }

    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
        for (ImmutableDruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Object> properties = retVal.get(dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        properties.put("size", dataSegment.getSize());
                        properties.put("count", 1);

                        retVal.put(dataSegment.getInterval(), properties);
                    } else {
                        properties.put("size",
                                MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                        properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                    }
                }
            }
        }

        return Response.ok(retVal).build();
    }

    final Map<String, Object> retVal = Maps.newHashMap();
    for (ImmutableDruidDataSource dataSource : datasources) {
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                retVal.put("size", MapUtils.getLong(retVal, "size", 0L) + dataSegment.getSize());
                retVal.put("count", MapUtils.getInt(retVal, "count", 0) + 1);
            }
        }
    }

    return Response.ok(retVal).build();
}

From source file:io.druid.server.http.IntervalsResource.java

@GET
@Path("/{interval}")
@Produces(MediaType.APPLICATION_JSON)/*from   w  ww  . ja v a 2s  .c  o  m*/
public Response getSpecificIntervals(@PathParam("interval") String interval,
        @QueryParam("simple") String simple, @QueryParam("full") String full,
        @Context final HttpServletRequest req) {
    final Interval theInterval = new Interval(interval.replace("_", "/"));
    final Set<DruidDataSource> datasources = authConfig.isEnabled()
            ? InventoryViewUtils.getSecuredDataSources(serverInventoryView,
                    (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN))
            : InventoryViewUtils.getDataSources(serverInventoryView);

    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());

    if (full != null) {
        final Map<Interval, Map<String, Map<String, Object>>> retVal = Maps.newTreeMap(comparator);
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Map<String, Object>> dataSourceInterval = retVal.get(dataSegment.getInterval());
                    if (dataSourceInterval == null) {
                        Map<String, Map<String, Object>> tmp = Maps.newHashMap();
                        retVal.put(dataSegment.getInterval(), tmp);
                    }
                    setProperties(retVal, dataSource, dataSegment);
                }
            }
        }

        return Response.ok(retVal).build();
    }

    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Object> properties = retVal.get(dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        properties.put("size", dataSegment.getSize());
                        properties.put("count", 1);

                        retVal.put(dataSegment.getInterval(), properties);
                    } else {
                        properties.put("size",
                                MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                        properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                    }
                }
            }
        }

        return Response.ok(retVal).build();
    }

    final Map<String, Object> retVal = Maps.newHashMap();
    for (DruidDataSource dataSource : datasources) {
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                retVal.put("size", MapUtils.getLong(retVal, "size", 0L) + dataSegment.getSize());
                retVal.put("count", MapUtils.getInt(retVal, "count", 0) + 1);
            }
        }
    }

    return Response.ok(retVal).build();
}

From source file:org.apache.gobblin.metrics.performance.PerformanceUtils.java

/**
 * Runs a set of performance tests. The method will take the cardinal product of the values of each input parameter,
 * and run a performance test for each combination of paramters. At the end, it will print out the results.
 *
 * <p>//from w ww  .  j a v a2  s  .com
 *   All parameters (except for queries) are a set of integers, meaning that separate tests will be run for all
 *   the values provided. The number of queries will be identical for all tests.
 * </p>
 *
 * @param threads Number of threads to spawn. Each thread will have an {@link Incrementer} and update metrics.
 * @param depth Depth of the {@link org.apache.gobblin.metrics.MetricContext} tree.
 * @param forkAtDepth If multiple threads, each thread has its own {@link org.apache.gobblin.metrics.MetricContext}. This
 *                    parameter sets the first level in the tree where the per-thread MetricContexts branch off.
 * @param counters Number of counters to generate per thread.
 * @param meters Number of meters to generate per thread.
 * @param histograms Number of histograms to generate per thread.
 * @param timers Number of timers to generate per thread.
 * @param queries Number of increments to do, divided among all threads.
 * @throws Exception
 */
@Builder(buildMethodName = "run", builderMethodName = "multiTest")
public static void _multiTest(@Singular("threads") Set<Integer> threads, @Singular("depth") Set<Integer> depth,
        @Singular("forkAtDepth") Set<Integer> forkAtDepth, @Singular("counters") Set<Integer> counters,
        @Singular("meters") Set<Integer> meters, @Singular("histograms") Set<Integer> histograms,
        @Singular("timers") Set<Integer> timers, long queries, String name) throws Exception {

    if (threads.isEmpty()) {
        threads = Sets.newHashSet(1);
    }
    if (forkAtDepth.isEmpty()) {
        forkAtDepth = Sets.newHashSet(0);
    }
    if (depth.isEmpty()) {
        depth = Sets.newHashSet(0);
    }
    if (counters.isEmpty()) {
        counters = Sets.newHashSet(0);
    }
    if (meters.isEmpty()) {
        meters = Sets.newHashSet(0);
    }
    if (histograms.isEmpty()) {
        histograms = Sets.newHashSet(0);
    }
    if (timers.isEmpty()) {
        timers = Sets.newHashSet(0);
    }
    if (queries == 0) {
        queries = 50000000l;
    }
    if (Strings.isNullOrEmpty(name)) {
        name = "Test";
    }

    Set<List<Integer>> parameters = Sets.cartesianProduct(threads, depth, forkAtDepth, counters, meters,
            histograms, timers);

    Comparator<List<Integer>> comparator = new Comparator<List<Integer>>() {
        @Override
        public int compare(List<Integer> o1, List<Integer> o2) {
            Iterator<Integer> it1 = o1.iterator();
            Iterator<Integer> it2 = o2.iterator();

            while (it1.hasNext() && it2.hasNext()) {
                int compare = Integer.compare(it1.next(), it2.next());
                if (compare != 0) {
                    return compare;
                }
            }
            if (it1.hasNext()) {
                return 1;
            } else if (it2.hasNext()) {
                return -1;
            } else {
                return 0;
            }
        }
    };

    TreeMap<List<Integer>, Double> results = Maps.newTreeMap(comparator);

    for (List<Integer> p : parameters) {
        Preconditions.checkArgument(p.size() == 7, "Parameter list should be of size 7.");
        results.put(p, singleTest().threads(p.get(0)).depth(p.get(1)).forkAtDepth(p.get(2)).counters(p.get(3))
                .meters(p.get(4)).histograms(p.get(5)).timers(p.get(6)).queries(queries).run());
    }

    System.out.println("===========================");
    System.out.println(name);
    System.out.println("===========================");
    System.out.println("Threads\tDepth\tForkAtDepth\tCounters\tMeters\tHistograms\tTimers\tQPS");
    for (Map.Entry<List<Integer>, Double> result : results.entrySet()) {
        List<Integer> p = result.getKey();
        System.out.println(String.format("%d\t%d\t%d\t%d\t%d\t%d\t%d\t%f", p.get(0), p.get(1), p.get(2),
                p.get(3), p.get(4), p.get(5), p.get(6), result.getValue()));
    }
}

From source file:co.cask.cdap.metrics.data.AggregatesTable.java

/**
 * Updates aggregates for the given iterator of {@link MetricsRecord}.
 *
 * @throws OperationException When there is an error updating the table.
 *//*  w ww .j ava2  s. c o  m*/
public void update(Iterator<MetricsRecord> records) throws OperationException {
    try {
        while (records.hasNext()) {
            MetricsRecord record = records.next();
            byte[] rowKey = getKey(record.getContext(), record.getName(), record.getRunId());
            Map<byte[], Long> increments = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);

            // The no tag value
            increments.put(Bytes.toBytes(MetricsConstants.EMPTY_TAG), (long) record.getValue());

            // For each tag, increments corresponding values
            for (TagMetric tag : record.getTags()) {
                increments.put(Bytes.toBytes(tag.getTag()), (long) tag.getValue());
            }
            aggregatesTable.increment(rowKey, increments);
        }
    } catch (Exception e) {
        throw new OperationException(StatusCode.INTERNAL_ERROR, e.getMessage(), e);
    }
}

From source file:co.cask.tephra.hbase94.coprocessor.TransactionVisibilityFilter.java

/**
 * Creates a new {@link org.apache.hadoop.hbase.filter.Filter} for returning data only from visible transactions.
 *
 * @param tx the current transaction to apply.  Only data visible to this transaction will be returned.
 * @param ttlByFamily map of time-to-live (TTL) (in milliseconds) by column family name
 * @param allowEmptyValues if {@code true} cells with empty {@code byte[]} values will be returned, if {@code false}
 *                         these will be interpreted as "delete" markers and the column will be filtered out
 * @param scanType the type of scan operation being performed
 * @param cellFilter if non-null, this filter will be applied to all cells visible to the current transaction, by
 *                   calling {@link Filter#filterKeyValue(org.apache.hadoop.hbase.KeyValue)}.  If null, then
 *                   {@link Filter.ReturnCode#INCLUDE_AND_NEXT_COL} will be returned instead.
 *///from w  ww .j a v a 2 s .c  o  m
public TransactionVisibilityFilter(Transaction tx, Map<byte[], Long> ttlByFamily, boolean allowEmptyValues,
        ScanType scanType, @Nullable Filter cellFilter) {
    this.tx = tx;
    this.oldestTsByFamily = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], Long> ttlEntry : ttlByFamily.entrySet()) {
        long familyTTL = ttlEntry.getValue();
        oldestTsByFamily.put(ttlEntry.getKey(),
                familyTTL <= 0 ? 0 : tx.getVisibilityUpperBound() - familyTTL * TxConstants.MAX_TX_PER_MS);
    }
    this.allowEmptyValues = allowEmptyValues;
    this.clearDeletes = scanType == ScanType.MAJOR_COMPACT || scanType == ScanType.USER_SCAN;

    this.cellFilter = cellFilter;
}

From source file:org.atlasapi.persistence.content.mongo.MongoDBQueryBuilder.java

DBObject buildQuery(ContentQuery query) {

    // handle attributes that are not part of a list structure
    Multimap<List<String>, ConstrainedAttribute> attributeConstraints = HashMultimap.create();
    for (ConstrainedAttribute constraint : buildQueries(query)) {
        if (constraint == null) {
            continue;
        }/* www  . j a  v a  2 s  .c  o  m*/
        attributeConstraints.put(entityPath(constraint.attribute), constraint);
    }

    // sort the keys by length so that versions are dealt with before broadcasts etc.
    TreeMap<List<String>, Collection<ConstrainedAttribute>> map = Maps.newTreeMap(LENGTH_ORDER);
    map.putAll(attributeConstraints.asMap());

    DBObject finalQuery = new BasicDBObject();

    Map<List<String>, DBObject> queries = Maps.newHashMap();
    for (Entry<List<String>, Collection<ConstrainedAttribute>> entry : map.entrySet()) {

        List<String> entityPath = entry.getKey();

        Collection<ConstrainedAttribute> constraints = entry.getValue();

        if (entityPath.isEmpty()) {
            finalQuery.putAll(buildQueryForSingleLevelEntity(constraints));
            continue;
        }

        DBObject parentDbObject = null;

        List<String> parentPath = entityPath;
        while (!parentPath.isEmpty()) {
            parentPath = parentPath.subList(0, parentPath.size() - 1);
            if (queries.get(parentPath) != null) {
                parentDbObject = queries.get(parentPath);
                break;
            }
        }
        if (parentDbObject == null) {
            parentDbObject = finalQuery;
            parentPath = ImmutableList.of();
        }

        DBObject rhs = buildQueryForSingleLevelEntity(constraints);
        String key = DOTTED_MONGO_ATTRIBUTE_PATH.join(entityPath.subList(parentPath.size(), entityPath.size()));
        DBObject attrObj = new BasicDBObject(key, new BasicDBObject(MongoConstants.ELEM_MATCH, rhs));
        parentDbObject.putAll(attrObj);
        queries.put(entityPath, rhs);
    }
    return finalQuery;
}

From source file:pt.ist.fenixedu.cmscomponents.domain.unit.components.CompetenceCourseComponent.java

private Map<ExecutionSemester, String> executionSemesterUrls(CompetenceCourse competenceCourse, Page page) {
    Map<ExecutionSemester, String> semesterUrl = Maps
            .newTreeMap(ExecutionSemester.COMPARATOR_BY_SEMESTER_AND_YEAR);
    for (ExecutionSemester semester : executionSemesters(competenceCourse)) {
        semesterUrl.put(semester, String.format("%s/%s/%s", page.getAddress(), competenceCourse.getExternalId(),
                semester.getExternalId()));
    }//  w  ww . j av a  2 s . c o m
    return semesterUrl;
}