Example usage for com.google.common.collect Maps newTreeMap

List of usage examples for com.google.common.collect Maps newTreeMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newTreeMap.

Prototype

public static <K extends Comparable, V> TreeMap<K, V> newTreeMap() 

Source Link

Document

Creates a mutable, empty TreeMap instance using the natural ordering of its elements.

Usage

From source file:com.metamx.druid.query.metadata.SegmentMetadataQueryQueryToolChest.java

@Override
public QueryRunner<SegmentAnalysis> mergeResults(final QueryRunner<SegmentAnalysis> runner) {
    return new ResultMergeQueryRunner<SegmentAnalysis>(runner) {
        @Override/*from w w  w .j a  va2  s .  com*/
        protected Ordering<SegmentAnalysis> makeOrdering(Query<SegmentAnalysis> query) {
            if (((SegmentMetadataQuery) query).isMerge()) {
                // Merge everything always
                return new Ordering<SegmentAnalysis>() {
                    @Override
                    public int compare(@Nullable SegmentAnalysis left, @Nullable SegmentAnalysis right) {
                        return 0;
                    }
                };
            }

            return getOrdering(); // No two elements should be equal, so it should never merge
        }

        @Override
        protected BinaryFn<SegmentAnalysis, SegmentAnalysis, SegmentAnalysis> createMergeFn(
                final Query<SegmentAnalysis> inQ) {
            return new BinaryFn<SegmentAnalysis, SegmentAnalysis, SegmentAnalysis>() {
                private final SegmentMetadataQuery query = (SegmentMetadataQuery) inQ;

                @Override
                public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) {
                    if (arg1 == null) {
                        return arg2;
                    }

                    if (arg2 == null) {
                        return arg1;
                    }

                    if (!query.isMerge()) {
                        throw new ISE("Merging when a merge isn't supposed to happen[%s], [%s]", arg1, arg2);
                    }

                    List<Interval> newIntervals = JodaUtils
                            .condenseIntervals(Iterables.concat(arg1.getIntervals(), arg2.getIntervals()));

                    final Map<String, ColumnAnalysis> leftColumns = arg1.getColumns();
                    final Map<String, ColumnAnalysis> rightColumns = arg2.getColumns();
                    Map<String, ColumnAnalysis> columns = Maps.newTreeMap();

                    Set<String> rightColumnNames = Sets.newHashSet(rightColumns.keySet());
                    for (Map.Entry<String, ColumnAnalysis> entry : leftColumns.entrySet()) {
                        final String columnName = entry.getKey();
                        columns.put(columnName, entry.getValue().fold(rightColumns.get(columnName)));
                        rightColumnNames.remove(columnName);
                    }

                    for (String columnName : rightColumnNames) {
                        columns.put(columnName, rightColumns.get(columnName));
                    }

                    return new SegmentAnalysis("merged", newIntervals, columns,
                            arg1.getSize() + arg2.getSize());
                }
            };
        }
    };
}

From source file:com.kolich.aws.services.s3.impl.KolichS3Signer.java

/**
  * Calculate the canonical string for a REST/HTTP request to S3.
  *///from   www.  java 2  s.c  o  m
private static final String getS3CanonicalString(final AwsHttpRequest request) {
    // A few standard headers we extract for conveinence.
    final String contentType = CONTENT_TYPE.toLowerCase(), contentMd5 = CONTENT_MD5.toLowerCase(),
            date = DATE.toLowerCase();
    // Start with the empty string ("").
    final StringBuilder buf = new StringBuilder();
    // Next is the HTTP verb and a newline.
    buf.append(request.getMethod() + LINE_SEPARATOR_UNIX);
    // Add all interesting headers to a list, then sort them.
    // "Interesting" is defined as Content-MD5, Content-Type, Date,
    // and x-amz-... headers.
    final Map<String, String> headersMap = getHeadersAsMap(request);
    final SortedMap<String, String> interesting = Maps.newTreeMap();
    if (!headersMap.isEmpty()) {
        Iterator<Map.Entry<String, String>> it = headersMap.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry<String, String> entry = it.next();
            final String key = entry.getKey(), value = entry.getValue();
            if (key == null) {
                continue;
            }
            final String lk = key.toLowerCase(Locale.getDefault());
            // Ignore any headers that are not interesting.
            if (lk.equals(contentType) || lk.equals(contentMd5) || lk.equals(date)
                    || lk.startsWith(AMAZON_PREFIX)) {
                interesting.put(lk, value);
            }
        }
    }
    // Remove default date timestamp if "x-amz-date" is set.
    if (interesting.containsKey(S3_ALTERNATE_DATE)) {
        interesting.put(date, "");
    }
    // These headers require that we still put a new line in after them,
    // even if they don't exist.
    if (!interesting.containsKey(contentType)) {
        interesting.put(contentType, "");
    }
    if (!interesting.containsKey(contentMd5)) {
        interesting.put(contentMd5, "");
    }
    // Add all the interesting headers
    for (Iterator<Map.Entry<String, String>> i = interesting.entrySet().iterator(); i.hasNext();) {
        final Map.Entry<String, String> entry = i.next();
        final String key = entry.getKey();
        final Object value = entry.getValue();
        if (key.startsWith(AMAZON_PREFIX)) {
            buf.append(key).append(':').append(value);
        } else {
            buf.append(value);
        }
        buf.append(LINE_SEPARATOR_UNIX);
    }
    // The CanonicalizedResource this request is working with.
    // If the request specifies a bucket using the HTTP Host header
    // (virtual hosted-style), append the bucket name preceded by a
    // "/" (e.g., "/bucketname"). For path-style requests and requests
    // that don't address a bucket, do nothing.
    if (request.getResource() != null) {
        buf.append("/" + request.getResource() + request.getURI().getRawPath());
    } else {
        buf.append(request.getURI().getRawPath());
    }
    // Amazon requires us to sort the query string parameters.
    final List<SortableBasicNameValuePair> params = sortParams(URLEncodedUtils.parse(request.getURI(), UTF_8));
    String separator = "?";
    for (final NameValuePair pair : params) {
        final String name = pair.getName(), value = pair.getValue();
        // Skip any parameters that aren't part of the
        // canonical signed string.
        if (!INTERESTING_PARAMETERS.contains(name)) {
            continue;
        }
        buf.append(separator).append(name);
        if (value != null) {
            buf.append("=").append(value);
        }
        separator = "&";
    }
    return buf.toString();
}

From source file:org.smartdeveloperhub.harvesters.it.testing.util.Application.java

private static <T extends Map<?, ?>> TreeMap<String, String> sort(final T properties) {
    final TreeMap<String, String> orderedProperties = Maps.newTreeMap();
    for (final Entry<?, ?> entry : properties.entrySet()) {
        orderedProperties.put(entry.getKey().toString(), entry.getValue().toString());
    }/*from  ww  w  .  j  av a 2 s . c o m*/
    return orderedProperties;
}

From source file:com.google.gitiles.DefaultAccess.java

@Override
public Map<String, RepositoryDescription> listRepositories(Set<String> branches) throws IOException {
    Map<String, RepositoryDescription> repos = Maps.newTreeMap();
    for (Repository repo : scanRepositories(basePath, req)) {
        repos.put(getRepositoryName(repo), buildDescription(repo, branches));
        repo.close();/*from   www  .j a  v  a  2s.com*/
    }
    return repos;
}

From source file:co.cask.tephra.hbase10.coprocessor.TransactionVisibilityFilter.java

/**
 * Creates a new {@link org.apache.hadoop.hbase.filter.Filter} for returning data only from visible transactions.
 *
 * @param tx the current transaction to apply.  Only data visible to this transaction will be returned.
 * @param ttlByFamily map of time-to-live (TTL) (in milliseconds) by column family name
 * @param allowEmptyValues if {@code true} cells with empty {@code byte[]} values will be returned, if {@code false}
 *                         these will be interpreted as "delete" markers and the column will be filtered out
 * @param scanType the type of scan operation being performed
 * @param cellFilter if non-null, this filter will be applied to all cells visible to the current transaction, by
 *                   calling {@link Filter#filterKeyValue(org.apache.hadoop.hbase.Cell)}.  If null, then
 *                   {@link Filter.ReturnCode#INCLUDE_AND_NEXT_COL} will be returned instead.
 *//*w  ww .  ja  v  a 2  s  .c  om*/
public TransactionVisibilityFilter(Transaction tx, Map<byte[], Long> ttlByFamily, boolean allowEmptyValues,
        ScanType scanType, @Nullable Filter cellFilter) {
    this.tx = tx;
    this.oldestTsByFamily = Maps.newTreeMap();
    for (Map.Entry<byte[], Long> ttlEntry : ttlByFamily.entrySet()) {
        long familyTTL = ttlEntry.getValue();
        oldestTsByFamily.put(new ImmutableBytesWritable(ttlEntry.getKey()),
                familyTTL <= 0 ? 0 : tx.getVisibilityUpperBound() - familyTTL * TxConstants.MAX_TX_PER_MS);
    }
    this.allowEmptyValues = allowEmptyValues;
    this.clearDeletes = scanType == ScanType.COMPACT_DROP_DELETES || (scanType == ScanType.USER_SCAN
            && tx.getVisibilityLevel() != Transaction.VisibilityLevel.SNAPSHOT_ALL);
    this.cellFilter = cellFilter;
}

From source file:io.mapzone.arena.analytics.graph.SingleSourceNodeGraphFunction.java

@Override
public void createContents(final MdToolkit tk, final Composite parent, final Graph graph) {
    try {//from w  ww . j  a  v  a  2 s. com
        super.createContents(tk, parent, graph);
        final FeaturePropertySelectorUI sourcePropertiesUI = new FeaturePropertySelectorUI(tk, parent, prop -> {
            this.selectedSourcePropertyDescriptor = prop;
            EventManager.instance().publish(new GraphFunctionConfigurationChangedEvent(
                    SingleSourceNodeGraphFunction.this, "sourcePropertyDescriptor", prop));
        });
        final FeatureSourceSelectorUI sourceFeaturesUI = new FeatureSourceSelectorUI(tk, parent, fs -> {
            this.selectedSourceFeatureSource = fs;
            EventManager.instance().publish(new GraphFunctionConfigurationChangedEvent(
                    SingleSourceNodeGraphFunction.this, "sourceFeatureSource", fs));
            sourcePropertiesUI.setFeatureSource(fs);
        });

        final TreeMap<String, EdgeFunction> edgeFunctions = Maps.newTreeMap();
        for (Class<EdgeFunction> cl : availableFunctions) {
            try {
                EdgeFunction function = cl.newInstance();
                function.init(this);
                edgeFunctions.put(function.title(), function);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }

        final Composite edgeFunctionContainer = tk.createComposite(parent, SWT.NONE);
        edgeFunctionContainer.setLayout(FormLayoutFactory.defaults().create());

        final ComboViewer edgeFunctionsUI = new ComboViewer(parent,
                SWT.SINGLE | SWT.BORDER | SWT.DROP_DOWN | SWT.READ_ONLY);
        edgeFunctionsUI.setContentProvider(new ArrayContentProvider());
        edgeFunctionsUI.setInput(edgeFunctions.keySet());
        edgeFunctionsUI.addSelectionChangedListener(ev -> {
            String selected = SelectionAdapter.on(ev.getSelection()).first(String.class).get();
            EdgeFunction function = edgeFunctions.get(selected);

            // FormDataFactory.on( edgeFunctionContainer ).top(
            // edgeFunctionsUI.getCombo(), 4 )
            // .height( function.preferredHeight() ).left( COLUMN_2 ).right( 100
            // );
            FormDataFactory.on(edgeFunctionContainer).height(function.preferredHeight());

            UIUtils.disposeChildren(edgeFunctionContainer);
            // create panel
            function.createContents(tk, edgeFunctionContainer, selectedSourceFeatureSource);
            // FormDataFactory.on( edgeFunctionContainer ).fill();

            // resize also the top container
            // XXX depends on the parent structure
            ((FormData) parent.getParent().getParent().getLayoutData()).height = preferredHeight()
                    + function.preferredHeight();
            parent.getParent().getParent().getParent().layout();

            this.selectedEdgeFunction = function;
        });

        final Label selectSourceTableLabel = tk.createLabel(parent, i18n.get("selectSourceTable"), SWT.NONE);
        FormDataFactory.on(selectSourceTableLabel).top(15).left(1);
        FormDataFactory.on(sourceFeaturesUI.control()).top(selectSourceTableLabel, 2).left(1);
        final Label selectSourcePropertiesLabel = tk.createLabel(parent, i18n.get("selectSourceProperties"),
                SWT.NONE);
        FormDataFactory.on(selectSourcePropertiesLabel).top(sourceFeaturesUI.control(), 4).left(COLUMN_2);
        FormDataFactory.on(sourcePropertiesUI.control()).top(selectSourcePropertiesLabel, 2).left(COLUMN_2);

        final Label selectEdgeFunctionLabel = tk.createLabel(parent, i18n.get("selectEdgeFunction"), SWT.NONE);
        FormDataFactory.on(selectEdgeFunctionLabel).top(sourcePropertiesUI.control(), 6).left(1);
        FormDataFactory.on(edgeFunctionsUI.getCombo()).top(selectEdgeFunctionLabel, 2).left(1);
        FormDataFactory.on(edgeFunctionContainer).fill().top(edgeFunctionsUI.getCombo(), 4).left(COLUMN_2);

        // event listener
        EventManager.instance().subscribe(this, ifType(EdgeFunctionConfigurationDoneEvent.class,
                ev -> ev.status.get() == Boolean.TRUE && ev.getSource().equals(selectedEdgeFunction)));

        EventManager.instance().subscribe(this, ifType(GraphFunctionConfigurationChangedEvent.class,
                ev -> ev.getSource().equals(SingleSourceNodeGraphFunction.this)));
    } catch (Exception e) {
        StatusDispatcher.handleError("", e);
    }
}

From source file:io.druid.query.metadata.SegmentMetadataQueryQueryToolChest.java

@Override
public QueryRunner<SegmentAnalysis> mergeResults(final QueryRunner<SegmentAnalysis> runner) {
    return new ResultMergeQueryRunner<SegmentAnalysis>(runner) {
        @Override/*from   ww w  .  j  ava 2s.com*/
        protected Ordering<SegmentAnalysis> makeOrdering(Query<SegmentAnalysis> query) {
            if (((SegmentMetadataQuery) query).isMerge()) {
                // Merge everything always
                return new Ordering<SegmentAnalysis>() {
                    @Override
                    public int compare(@Nullable SegmentAnalysis left, @Nullable SegmentAnalysis right) {
                        return 0;
                    }
                };
            }

            return getOrdering(); // No two elements should be equal, so it should never merge
        }

        @Override
        protected BinaryFn<SegmentAnalysis, SegmentAnalysis, SegmentAnalysis> createMergeFn(
                final Query<SegmentAnalysis> inQ) {
            return new BinaryFn<SegmentAnalysis, SegmentAnalysis, SegmentAnalysis>() {
                private final SegmentMetadataQuery query = (SegmentMetadataQuery) inQ;

                @Override
                public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) {
                    if (arg1 == null) {
                        return arg2;
                    }

                    if (arg2 == null) {
                        return arg1;
                    }

                    List<Interval> newIntervals = JodaUtils
                            .condenseIntervals(Iterables.concat(arg1.getIntervals(), arg2.getIntervals()));

                    final Map<String, ColumnAnalysis> leftColumns = arg1.getColumns();
                    final Map<String, ColumnAnalysis> rightColumns = arg2.getColumns();
                    Map<String, ColumnAnalysis> columns = Maps.newTreeMap();

                    Set<String> rightColumnNames = Sets.newHashSet(rightColumns.keySet());
                    for (Map.Entry<String, ColumnAnalysis> entry : leftColumns.entrySet()) {
                        final String columnName = entry.getKey();
                        columns.put(columnName, entry.getValue().fold(rightColumns.get(columnName)));
                        rightColumnNames.remove(columnName);
                    }

                    for (String columnName : rightColumnNames) {
                        columns.put(columnName, rightColumns.get(columnName));
                    }

                    return new SegmentAnalysis("merged", newIntervals, columns,
                            arg1.getSize() + arg2.getSize());
                }
            };
        }
    };
}

From source file:uk.co.unclealex.executable.generator.CodeGeneratorImpl.java

/**
 * {@inheritDoc}/*from  w  w  w.  j  a va 2 s  . c om*/
 */
@Override
public void generate(ClassLoader classLoader, Path sourceDirectory, Path targetDirectory)
        throws IOException, ExecutableScanException {
    ClassLoader generatedClassLoader = new URLClassLoader(new URL[] { sourceDirectory.toUri().toURL() },
            classLoader);
    List<String> allClassNames = getAllClassNamesCollector().listAllClassNames(sourceDirectory);
    List<Class<?>> allClasses = Lists
            .newArrayList(Iterables.transform(allClassNames, new ClassFunction(generatedClassLoader)));
    List<ExecutableAnnotationInformation> executableAnnotationInformations = getExecutableAnnotationInformationFinder()
            .findExecutableAnnotationInformation(allClasses);
    log.info("Found the following scripts: " + Joiner.on(", ").join(
            Sets.newTreeSet(Iterables.transform(executableAnnotationInformations, new ScriptNameFunction()))));
    GeneratedClassWriter generatedClassWriter = getGeneratedClassWriter();
    Map<String, String> commandRunnerClassNamesByScriptName = Maps.newTreeMap();
    for (ExecutableAnnotationInformation executableAnnotationInformation : executableAnnotationInformations) {
        String commandRunnerClassName = generatedClassWriter
                .writeClass(executableAnnotationInformation, generatedClassLoader, targetDirectory)
                .getClassName();
        String scriptName = executableAnnotationInformation.getScriptName();
        commandRunnerClassNamesByScriptName.put(scriptName, commandRunnerClassName);
        log.info("Written class " + commandRunnerClassName + " for script " + scriptName);
    }
    generatedClassWriter.writeEntryPointClass(targetDirectory, commandRunnerClassNamesByScriptName);
}

From source file:org.locationtech.geogig.porcelain.InitOp.java

/**
 * Executes the Init operation.//from w w w.  java 2 s. c o m
 * 
 * @return the initialized repository
 * @throws IllegalStateException if a repository cannot be created on the current directory or
 *         re-initialized in the current dir or one if its parents as determined by
 *         {@link ResolveGeogigURI}
 */
@Override
protected Repository _call() {
    final Platform platform = platform();
    Optional<URI> resolvedURI = new ResolveGeogigURI(platform, hints).call();
    if (!resolvedURI.isPresent()) {
        resolvedURI = Optional.of(platform.pwd().getAbsoluteFile().toURI());
    }

    URI repoURI = resolvedURI.get();

    final RepositoryResolver repoInitializer = RepositoryResolver.lookup(repoURI);
    final boolean repoExisted = repoInitializer.repoExists(repoURI);

    repoInitializer.initialize(repoURI, context());

    Map<String, String> effectiveConfigBuilder = Maps.newTreeMap();
    Optional<Serializable> repoName = hints.get(Hints.REPOSITORY_NAME);
    if (repoName.isPresent()) {
        effectiveConfigBuilder.put("repo.name", String.valueOf(repoName.get()));
    }

    if (filterFile != null) {
        try {

            File oldFilterFile = new File(filterFile);
            if (!oldFilterFile.exists()) {
                throw new FileNotFoundException("No filter file found at " + filterFile + ".");
            }

            repository().blobStore().putBlob(AbstractMappedRemoteRepo.SPARSE_FILTER_BLOB_KEY,
                    Files.toByteArray(oldFilterFile));
        } catch (Exception e) {
            throw new IllegalStateException(
                    "Unable to copy filter file at path " + filterFile + " to the new repository.", e);
        }
    }

    Repository repository;
    try {
        if (!repoExisted) {
            // use a config database appropriate for the kind of repo URI
            try (ConfigDatabase configDB = repoInitializer.getConfigDatabase(repoURI, context)) {
                PluginDefaults defaults = context.pluginDefaults();
                addDefaults(configDB, defaults, effectiveConfigBuilder);
                if (config != null) {
                    effectiveConfigBuilder.putAll(config);
                }
                try {
                    for (Entry<String, String> pair : effectiveConfigBuilder.entrySet()) {
                        String key = pair.getKey();
                        String value = pair.getValue();
                        configDB.put(key, value);
                    }
                    repository = repository();
                    repository.configure();
                } catch (RepositoryConnectionException e) {
                    throw new IllegalStateException(
                            "Unable to initialize repository for the first time: " + e.getMessage(), e);
                }
            }
        } else {
            repository = repository();
        }
        try {
            repository.open();
            // make sure the repo has the empty tree
            ObjectStore objectDatabase = repository.objectDatabase();
            objectDatabase.put(RevTree.EMPTY);
        } catch (RepositoryConnectionException e) {
            throw new IllegalStateException("Error opening repository databases: " + e.getMessage(), e);
        }
    } catch (ConfigException e) {
        throw e;
    } catch (Exception e) {
        Throwables.propagateIfInstanceOf(e, IllegalStateException.class);
        throw new IllegalStateException("Can't access repository at '" + repoURI + "'", e);
    }

    if (!repoExisted) {
        try {
            createDefaultRefs();
        } catch (IllegalStateException e) {
            Throwables.propagate(e);
        }
    }
    return repository;
}

From source file:ezbake.services.graph.cmd.GraphClient.java

public void sendBasicGraph(EzGraphService.Iface client, EzSecurityToken token, boolean runCreateSchema)
        throws IOException, TException {
    log("Starting sendBasicGraph()");

    log("Obtained clients\nCreating Schema");
    if (runCreateSchema) {
        createSchema(client, token);//from   w  w w  .j a v a 2  s . c  om
        log("Created Schema");
    }

    Graph graph = new Graph();

    log("Created Vertex");
    ElementId id1 = ElementId.localId("1");
    Vertex v1 = new Vertex(id1);
    Map<String, List<Property>> props = Maps.newHashMap();
    List<Property> propsList = Lists.newArrayList();
    propsList.add((new Property(PropValue.string_val("stevejobs"))).setVisibility(visibility));
    props.put(KEY_NAME, propsList);
    v1.setProperties(props);

    ElementId id2 = ElementId.localId("2");
    Vertex v2 = new Vertex(id2);
    Map<String, List<Property>> props2 = Maps.newHashMap();
    propsList = Lists.newArrayList();
    propsList.add((new Property(PropValue.string_val("stevewoz"))).setVisibility(visibility));
    props2.put(KEY_NAME, propsList);
    v2.setProperties(props2);

    log("Creating Edge");

    Edge edge = new Edge(id1, id2, "friend");
    edge.setVisibility(visibility);
    Map<String, Property> map = Maps.newTreeMap();
    map.put(KEY_NAME, new Property(PropValue.string_val("friendz")));
    map.get(KEY_NAME).setVisibility(visibility);
    edge.setProperties(map);
    graph.addToEdges(edge);
    graph.addToVertices(v1);
    graph.addToVertices(v2);

    log("Created Graph");

    writeGraph(client, token, graph);
}