Example usage for com.google.common.collect Maps newTreeMap

List of usage examples for com.google.common.collect Maps newTreeMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newTreeMap.

Prototype

public static <C, K extends C, V> TreeMap<K, V> newTreeMap(@Nullable Comparator<C> comparator) 

Source Link

Document

Creates a mutable, empty TreeMap instance using the given comparator.

Usage

From source file:co.cask.cdap.metrics.data.TimeSeriesTable.java

public void save(Iterator<MetricsRecord> records) throws OperationException {
    if (!records.hasNext()) {
        return;/*from  www . j av a  2 s.c o m*/
    }

    // Simply collecting all rows/cols/values that need to be put to the underlying table.
    NavigableMap<byte[], NavigableMap<byte[], byte[]>> table = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);

    while (records.hasNext()) {
        getUpdates(records.next(), table);
    }

    try {
        timeSeriesTable.put(table);
    } catch (Exception e) {
        throw new OperationException(StatusCode.INTERNAL_ERROR, e.getMessage(), e);
    }
}

From source file:com.palantir.atlasdb.keyvalue.partition.util.RowResultUtil.java

public static RowResult<Set<Long>> allTimestamps(PeekingIterator<RowResult<Set<Long>>> it) {
    Preconditions.checkArgument(it.hasNext());

    final byte[] row = it.peek().getRowName();
    final SortedMap<byte[], Set<Long>> result = Maps.newTreeMap(UnsignedBytes.lexicographicalComparator());
    while (it.hasNext() && Arrays.equals(row, it.peek().getRowName())) {
        RowResult<Set<Long>> kvsResult = it.next();
        for (Map.Entry<Cell, Set<Long>> e : kvsResult.getCells()) {
            if (!result.containsKey(e.getKey().getColumnName())) {
                result.put(e.getKey().getColumnName(), Sets.<Long>newHashSet());
            }/* w  w w  .  j a v  a 2  s.c  o m*/
            result.get(e.getKey().getColumnName()).addAll(e.getValue());
        }
    }
    return RowResult.create(row, result);
}

From source file:org.apache.crunch.impl.mr.plan.MSCRPlanner.java

public MRExecutor plan(Class<?> jarClass, Configuration conf) throws IOException {

    DotfileUtil dotfileUtil = new DotfileUtil(jarClass, conf);

    // Generate the debug lineage dotfiles (if configuration is enabled)
    dotfileUtil.buildLineageDotfile(outputs);

    Map<PCollectionImpl<?>, Set<Target>> targetDeps = Maps.newTreeMap(DEPTH_COMPARATOR);
    for (PCollectionImpl<?> pcollect : outputs.keySet()) {
        targetDeps.put(pcollect, pcollect.getTargetDependencies());
    }/*from  ww w  . ja va  2s.  c om*/

    Multimap<Target, JobPrototype> assignments = HashMultimap.create();

    while (!targetDeps.isEmpty()) {
        Set<Target> allTargets = Sets.newHashSet();
        for (PCollectionImpl<?> pcollect : targetDeps.keySet()) {
            allTargets.addAll(outputs.get(pcollect));
        }
        GraphBuilder graphBuilder = new GraphBuilder();

        // Walk the current plan tree and build a graph in which the vertices are
        // sources, targets, and GBK operations.
        Set<PCollectionImpl<?>> currentStage = Sets.newHashSet();
        for (PCollectionImpl<?> output : targetDeps.keySet()) {
            Set<Target> deps = Sets.intersection(allTargets, targetDeps.get(output));
            if (deps.isEmpty()) {
                graphBuilder.visitOutput(output);
                currentStage.add(output);
            }
        }

        Graph baseGraph = graphBuilder.getGraph();
        boolean hasInputs = false;
        for (Vertex v : baseGraph) {
            if (v.isInput()) {
                hasInputs = true;
                break;
            }
        }
        if (!hasInputs) {
            LOG.warn("No input sources for pipeline, nothing to do...");
            return new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets, pipelineCallables);
        }

        // Create a new graph that splits up up dependent GBK nodes.
        Graph graph = prepareFinalGraph(baseGraph);

        // Break the graph up into connected components.
        List<List<Vertex>> components = graph.connectedComponents();

        // Generate the debug graph dotfiles (if configuration is enabled)
        dotfileUtil.buildBaseGraphDotfile(outputs, graph);
        dotfileUtil.buildSplitGraphDotfile(outputs, graph, components);

        // For each component, we will create one or more job prototypes,
        // depending on its profile.
        // For dependency handling, we only need to care about which
        // job prototype a particular GBK is assigned to.
        Multimap<Vertex, JobPrototype> newAssignments = HashMultimap.create();
        for (List<Vertex> component : components) {
            newAssignments.putAll(constructJobPrototypes(component));
        }

        // Add in the job dependency information here.
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            JobPrototype current = e.getValue();
            for (Vertex parent : graph.getParents(e.getKey())) {
                for (JobPrototype parentJobProto : newAssignments.get(parent)) {
                    current.addDependency(parentJobProto);
                }
            }
        }

        ImmutableMultimap<Target, JobPrototype> previousStages = ImmutableMultimap.copyOf(assignments);
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            if (e.getKey().isOutput()) {
                PCollectionImpl<?> pcollect = e.getKey().getPCollection();
                JobPrototype current = e.getValue();

                // Add in implicit dependencies via SourceTargets that are read into memory
                for (Target pt : pcollect.getTargetDependencies()) {
                    for (JobPrototype parentJobProto : assignments.get(pt)) {
                        current.addDependency(parentJobProto);
                    }
                }

                // Add this to the set of output assignments
                for (Target t : outputs.get(pcollect)) {
                    assignments.put(t, e.getValue());
                }
            } else {
                Source source = e.getKey().getSource();
                if (source != null && source instanceof Target) {
                    JobPrototype current = e.getValue();
                    Collection<JobPrototype> parentJobPrototypes = previousStages.get((Target) source);
                    if (parentJobPrototypes != null) {
                        for (JobPrototype parentJobProto : parentJobPrototypes) {
                            current.addDependency(parentJobProto);
                        }
                    }
                }
            }
        }

        // Remove completed outputs and mark materialized output locations
        // for subsequent job processing.
        for (PCollectionImpl<?> output : currentStage) {
            if (toMaterialize.containsKey(output)) {
                MaterializableIterable mi = toMaterialize.get(output);
                if (mi.isSourceTarget()) {
                    output.materializeAt((SourceTarget) mi.getSource());
                }
            }
            targetDeps.remove(output);
        }
    }

    // Finally, construct the jobs from the prototypes and return.
    MRExecutor exec = new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets,
            pipelineCallables);

    // Generate the debug Plan dotfiles
    dotfileUtil.buildPlanDotfile(exec, assignments, pipeline, lastJobID);

    for (JobPrototype proto : Sets.newHashSet(assignments.values())) {
        exec.addJob(proto.getCrunchJob(jarClass, conf, pipeline, lastJobID));
    }

    // Generate the debug RTNode dotfiles (if configuration is enabled)
    dotfileUtil.buildRTNodesDotfile(exec);

    // Attach the dotfiles to the MRExcutor context
    dotfileUtil.addDotfilesToContext(exec);

    return exec;
}

From source file:co.cask.cdap.data2.transaction.stream.leveldb.LevelDBStreamFileConsumer.java

@Override
protected void undoState(Iterable<byte[]> rows, int size) throws IOException {
    NavigableMap<byte[], NavigableMap<byte[], byte[]>> changes = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (byte[] row : rows) {
        NavigableMap<byte[], byte[]> values = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
        values.put(stateColumnName, DUMMY_STATE_CONTENT);
        changes.put(row, values);/*from www . j a  v  a  2s. co  m*/
    }
    tableCore.undo(changes, KeyValue.LATEST_TIMESTAMP);
}

From source file:co.cask.cdap.data2.increment.hbase98.IncrementHandler.java

private Map<byte[], byte[]> convertFamilyValues(
        Map<ImmutableBytesWritable, ImmutableBytesWritable> writableValues) {
    Map<byte[], byte[]> converted = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e : writableValues.entrySet()) {
        converted.put(e.getKey().get(), e.getValue().get());
    }/* ww w.  ja  v a  2  s  .co  m*/
    return converted;
}

From source file:com.griddynamics.jagger.engine.e1.scenario.DefaultWorkloadSuggestionMaker.java

private static Integer findClosestPoint(BigDecimal desiredTps, Map<Integer, Pair<Long, BigDecimal>> stats) {
    SortedMap<Long, Integer> map = Maps.newTreeMap(new Comparator<Long>() {
        @Override//w w w  .j  a  v a2s. com
        public int compare(Long first, Long second) {
            return second.compareTo(first);
        }
    });
    for (Map.Entry<Integer, Pair<Long, BigDecimal>> entry : stats.entrySet()) {
        map.put(entry.getValue().getFirst(), entry.getKey());
    }

    if (map.size() < 2) {
        throw new IllegalArgumentException("Not enough stats to calculate point");
    }

    Iterator<Map.Entry<Long, Integer>> iterator = map.entrySet().iterator();
    Integer firstPoint = iterator.next().getValue();
    Integer secondPoint = iterator.next().getValue();

    if (firstPoint > secondPoint) {
        Integer temp = secondPoint;
        secondPoint = firstPoint;
        firstPoint = temp;
    }

    BigDecimal x1 = new BigDecimal(firstPoint);
    BigDecimal z1 = stats.get(firstPoint).getSecond();

    BigDecimal x2 = new BigDecimal(secondPoint);
    BigDecimal z2 = stats.get(secondPoint).getSecond();

    BigDecimal a = x2.subtract(x1);
    BigDecimal c = z2.subtract(z1);

    if (areEqual(c, BigDecimal.ZERO)) {
        return firstPoint;
    }

    // Line equation
    // y - y1 = ((y2 - y1)/(x2 - x1))*(x-x1)
    BigDecimal approxPoint = desiredTps.subtract(z1).multiply(a).divide(c, 3, BigDecimal.ROUND_HALF_UP).add(x1);

    Integer result = 0;
    if (DecimalUtil.compare(approxPoint, BigDecimal.ZERO) > 0) {
        approxPoint = approxPoint.divide(BigDecimal.ONE, 0, BigDecimal.ROUND_UP);
        result = approxPoint.intValue();
    }
    return result;
}

From source file:com.enonic.cms.domain.content.ContentEntity.java

/**
 * Constructor that creates a new instance as a copy of the given content.
 *//*from   w w w  . j a  v  a2 s  .com*/
public ContentEntity(ContentEntity source) {
    this();

    this.key = source.getKey();
    this.createdAt = source.getCreatedAt();
    this.timestamp = source.getTimestamp();
    this.deleted = source.getDeleted();
    this.name = source.getName();
    this.priority = source.getPriority();
    this.availableFrom = source.getAvailableFrom();
    this.availableTo = source.getAvailableTo();
    this.owner = source.getOwner();
    this.assignee = source.getAssignee();
    this.assigner = source.getAssigner();
    this.assignmentDueDate = source.getAssignmentDueDate();
    this.assignmentDescription = source.getAssignmentDescription();
    this.category = source.getCategory();
    this.language = source.getLanguage();
    this.source = source.getSource();
    this.mainVersion = source.getMainVersion();
    this.draftVersion = source.getDraftVersion();
    this.contentHomes = source.getContentHomesAsMap() != null ? Maps.newTreeMap(source.getContentHomesAsMap())
            : null;
    this.versions = source.getVersions() != null ? Lists.newArrayList(source.getVersions()) : null;
    this.relatedParents = source.getRelatedParentContentVersions() != null
            ? Sets.newHashSet(source.getRelatedParentContentVersions())
            : null;
    this.sectionContents = source.getSectionContents() != null ? Sets.newHashSet(source.getSectionContents())
            : null;
    this.directMenuItemPlacements = source.getDirectMenuItemPlacements() != null
            ? new TreeSet<MenuItemEntity>(source.getDirectMenuItemPlacements())
            : null;
}

From source file:org.obeonetwork.dsl.smartdesigner.design.actions.AbstractShadeGraphicalElement.java

/**
 * Construct the model to display in the tree of the dialog.
 * <p>//w ww . ja  v a  2  s  . co  m
 * The structure of the model is the following: Map<Architecture,
 * Map<MetaType,Set<GraphicalElement>>>
 * 
 * @param graphicalElements
 * @return
 */
private final Map<EObject, Map<EClass, Set<GraphicalElement>>> getModel(
        Map<EClass, List<GraphicalElement>> graphicalElements) {
    Map<EObject, Map<EClass, Set<GraphicalElement>>> result = Maps.newTreeMap(new Comparator<EObject>() {
        @Override
        public int compare(EObject o1, EObject o2) {
            return EMFUtil.retrieveNameFrom(o1).compareTo(EMFUtil.retrieveNameFrom(o2));
        }
    });

    Map<EClass, List<EClass>> architectures = this.getArchitectures();

    for (Entry<EClass, List<EClass>> architectureEntry : architectures.entrySet()) {
        Map<EClass, Set<GraphicalElement>> metaType = Maps.newTreeMap(new Comparator<EClass>() {
            @Override
            public int compare(EClass o1, EClass o2) {
                return o1.getName().compareTo(o2.getName());
            }
        });
        for (EClass metaTypeEntry : architectureEntry.getValue()) {
            List<GraphicalElement> elements = graphicalElements.get(metaTypeEntry);
            if (elements != null) {
                Set<GraphicalElement> set = Sets.newTreeSet(new Comparator<GraphicalElement>() {
                    @Override
                    public int compare(GraphicalElement o1, GraphicalElement o2) {
                        return EMFUtil.retrieveNameFrom(o1.getSemanticElement())
                                .compareTo(EMFUtil.retrieveNameFrom(o2.getSemanticElement()));
                    }
                });
                set.addAll(elements);
                metaType.put(metaTypeEntry, set);
            }
        }
        result.put(architectureEntry.getKey(), metaType);
    }
    return result;
}

From source file:co.cask.tephra.hbase98.coprocessor.TransactionVisibilityFilter.java

/**
 * Creates a new {@link org.apache.hadoop.hbase.filter.Filter} for returning data only from visible transactions.
 *
 * @param tx the current transaction to apply.  Only data visible to this transaction will be returned.
 * @param ttlByFamily map of time-to-live (TTL) (in milliseconds) by column family name
 * @param allowEmptyValues if {@code true} cells with empty {@code byte[]} values will be returned, if {@code false}
 *                         these will be interpreted as "delete" markers and the column will be filtered out
 * @param scanType the type of scan operation being performed
 * @param cellFilter if non-null, this filter will be applied to all cells visible to the current transaction, by
 *                   calling {@link Filter#filterKeyValue(org.apache.hadoop.hbase.Cell)}.  If null, then
 *                   {@link Filter.ReturnCode#INCLUDE_AND_NEXT_COL} will be returned instead.
 *///from w  w w.  j  ava  2s.c  om
public TransactionVisibilityFilter(Transaction tx, Map<byte[], Long> ttlByFamily, boolean allowEmptyValues,
        ScanType scanType, @Nullable Filter cellFilter) {
    this.tx = tx;
    this.oldestTsByFamily = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], Long> ttlEntry : ttlByFamily.entrySet()) {
        long familyTTL = ttlEntry.getValue();
        oldestTsByFamily.put(ttlEntry.getKey(),
                familyTTL <= 0 ? 0 : tx.getVisibilityUpperBound() - familyTTL * TxConstants.MAX_TX_PER_MS);
    }
    this.allowEmptyValues = allowEmptyValues;
    this.clearDeletes = scanType == ScanType.COMPACT_DROP_DELETES || (scanType == ScanType.USER_SCAN
            && tx.getVisibilityLevel() != Transaction.VisibilityLevel.SNAPSHOT_ALL);
    this.cellFilter = cellFilter;
}

From source file:co.cask.cdap.data2.transaction.queue.AbstractQueueConsumer.java

protected AbstractQueueConsumer(CConfiguration cConf, ConsumerConfig consumerConfig, QueueName queueName,
        @Nullable byte[] startRow) {
    this.consumerConfig = consumerConfig;
    this.queueName = queueName;
    this.entryCache = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    this.consumingEntries = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    this.queueRowPrefix = QueueEntryRow.getQueueRowPrefix(queueName);
    this.scanStartRow = (startRow == null || startRow.length == 0)
            ? QueueEntryRow.getQueueEntryRowKey(queueName, 0L, 0)
            : startRow;/*w  w  w  .j av  a 2  s .  co m*/
    this.stateColumnName = Bytes.add(QueueEntryRow.STATE_COLUMN_PREFIX,
            Bytes.toBytes(consumerConfig.getGroupId()));

    // Maximum time to spend in dequeue.
    int dequeuePercent = cConf.getInt(QueueConstants.ConfigKeys.DEQUEUE_TX_PERCENT);
    Preconditions.checkArgument(dequeuePercent > 0 && dequeuePercent <= 100, "Invalid value for %s",
            QueueConstants.ConfigKeys.DEQUEUE_TX_PERCENT);
    long txTimeout = TimeUnit.SECONDS.toMillis(cConf.getLong(TxConstants.Manager.CFG_TX_TIMEOUT));
    this.maxDequeueMillis = txTimeout * dequeuePercent / 100;
}