Example usage for com.google.common.collect Maps newHashMapWithExpectedSize

List of usage examples for com.google.common.collect Maps newHashMapWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Maps newHashMapWithExpectedSize.

Prototype

public static <K, V> HashMap<K, V> newHashMapWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashMap instance, with a high enough "initial capacity" that it should hold expectedSize elements without growth.

Usage

From source file:com.google.devtools.build.skyframe.QueryableGraphBackedSkyFunctionEnvironment.java

@Override
protected Map<SkyKey, ValueOrUntypedException> getValueOrUntypedExceptions(Iterable<SkyKey> depKeys)
        throws InterruptedException {
    Map<SkyKey, ? extends NodeEntry> resultMap = queryableGraph.getBatch(null, Reason.DEP_REQUESTED, depKeys);
    // resultMap will be smaller than what we actually return if some of depKeys were not found in
    // the graph. Pad to a minimum of 16 to avoid excessive resizing.
    Map<SkyKey, ValueOrUntypedException> result = Maps
            .newHashMapWithExpectedSize(Math.max(16, resultMap.size()));
    for (SkyKey dep : depKeys) {
        result.put(dep, toUntypedValue(resultMap.get(dep)));
    }//from  w w  w. j a  v  a 2 s .c  o m
    return result;
}

From source file:client.DockerPackageClient.java

/**
 * Gets all of the packages in the registry.
 *
 * @return a response with all of the packages
 *///from  ww w  .java  2s  .  c  o  m
public F.Promise<PackageListResponse> getAllPackages() {

    final F.Promise<List<String>> repoNamesPromise = WS.client().url(uri("/v1/search").toString()).get()
            .map(searchResponse -> {
                // searchResponse = { results: [ {name: "", description: "" }, ... ] }
                final Spliterator<JsonNode> resultsIter = searchResponse.asJson().get("results").spliterator();

                return StreamSupport.stream(resultsIter, false)
                        .map(jsonResult -> jsonResult.get("name").asText()).collect(Collectors.toList());

            });

    final F.Promise<List<WSResponse>> tagListJsonNodes = repoNamesPromise.flatMap(repoNames -> {

        final List<WSRequest> requests = repoNames.stream()
                .map(name -> WS.client().url(uri(String.format("/v1/repositories/%s/tags", name)).toString()))
                .collect(Collectors.toList());

        return concurrentExecute(requests, 5);

    });

    // Combine the repo names and the tag lists into a Map, and package in a PackageListResponse:
    return repoNamesPromise.zip(tagListJsonNodes).map(namesAndJsonTuple -> {

        final Map<String, List<ImageMetadata>> packages = Maps
                .newHashMapWithExpectedSize(namesAndJsonTuple._1.size());

        final Iterator<String> repoNamesIter = namesAndJsonTuple._1.iterator();
        final Iterator<WSResponse> tagListsIter = namesAndJsonTuple._2.iterator();

        while (repoNamesIter.hasNext() && tagListsIter.hasNext()) {
            final String nextName = repoNamesIter.next();
            final WSResponse tagListResponse = tagListsIter.next();
            final List<ImageMetadata> tagList = repoTagListToImages(tagListResponse, nextName);

            packages.put(nextName, tagList);
        }

        return new PackageListResponse(packages);
    });
}

From source file:voldemort.store.stats.ClusterWideCounter.java

public Map<Integer, Long> byZone() {
    Map<Integer, Long> map = Maps.newHashMapWithExpectedSize(cluster.getNumberOfNodes());
    for (Map.Entry<Integer, AtomicLong> entry : values.entrySet()) {
        try {//from   ww w .j  a  va 2 s  .c  o  m
            Node node = cluster.getNodeById(entry.getKey());
            int zoneId = node.getZoneId();
            Long count = map.get(zoneId);
            if (count == null)
                count = 0L;
            count += entry.getValue().get();
            map.put(zoneId, count);
        } catch (VoldemortException e) {
            logger.warn("Can't get zone information for node id " + entry.getKey(), e);
        }
    }
    return Collections.unmodifiableMap(map);
}

From source file:org.gradle.internal.component.external.model.ivy.RealisedIvyModuleResolveMetadata.java

private static Map<String, ConfigurationMetadata> realiseConfigurations(
        DefaultIvyModuleResolveMetadata metadata, VariantMetadataRules variantMetadataRules) {
    Map<Artifact, ModuleComponentArtifactMetadata> artifacts = new IdentityHashMap<Artifact, ModuleComponentArtifactMetadata>();
    IvyConfigurationHelper configurationHelper = new IvyConfigurationHelper(metadata.getArtifactDefinitions(),
            artifacts, metadata.getExcludes(), metadata.getDependencies(), metadata.getId());

    Map<String, ConfigurationMetadata> configurations = Maps
            .newHashMapWithExpectedSize(metadata.getConfigurationNames().size());
    ImmutableMap<String, Configuration> configurationDefinitions = metadata.getConfigurationDefinitions();
    for (String configurationName : metadata.getConfigurationNames()) {
        Configuration configuration = configurationDefinitions.get(configurationName);
        ImmutableSet<String> hierarchy = LazyToRealisedModuleComponentResolveMetadataHelper
                .constructHierarchy(configuration, configurationDefinitions);

        NameOnlyVariantResolveMetadata variant = new NameOnlyVariantResolveMetadata(configurationName);
        ImmutableAttributes variantAttributes = variantMetadataRules.applyVariantAttributeRules(variant,
                metadata.getAttributes());

        CapabilitiesMetadata capabilitiesMetadata = variantMetadataRules.applyCapabilitiesRules(variant,
                ImmutableCapabilities.EMPTY);

        configurations.put(configurationName,
                createConfiguration(configurationHelper, variantMetadataRules, metadata.getId(),
                        configurationName, configuration.isTransitive(), configuration.isVisible(), hierarchy,
                        configurationHelper.filterArtifacts(configurationName, hierarchy),
                        configurationHelper.filterExcludes(hierarchy), variantAttributes,
                        ImmutableCapabilities.of(capabilitiesMetadata.getCapabilities())));
    }//from www.  j a v a  2 s  .  c  o  m
    return configurations;
}

From source file:org.apache.kylin.tool.metrics.systemcube.CubeDescCreator.java

public static CubeDesc generateKylinCubeDescForMetricsQuery(KylinConfig config, SinkTool sinkTool) {
    String tableName = sinkTool.getTableNameForMetrics(config.getKylinMetricsSubjectQuery());

    //Set for dimensions
    List<String> dimensions = ModelCreator.getDimensionsForMetricsQuery();
    dimensions.remove(TimePropertyEnum.DAY_TIME.toString());
    dimensions.remove(RecordEvent.RecordReserveKeyEnum.TIME.toString());

    List<DimensionDesc> dimensionDescList = Lists.newArrayListWithExpectedSize(dimensions.size());
    for (String dimensionName : dimensions) {
        dimensionDescList.add(getDimensionDesc(tableName, dimensionName));
    }//from   ww  w  . j av a2s  .c  o m

    //Set for measures
    List<String> measures = ModelCreator.getMeasuresForMetricsQuery();
    measures.remove(QueryPropertyEnum.ID_CODE.toString());
    List<MeasureDesc> measureDescList = Lists.newArrayListWithExpectedSize(measures.size() * 2 + 1 + 1);

    List<Pair<String, String>> measureTypeList = HiveTableCreator.getHiveColumnsForMetricsQuery();
    Map<String, String> measureTypeMap = Maps.newHashMapWithExpectedSize(measureTypeList.size());
    for (Pair<String, String> entry : measureTypeList) {
        measureTypeMap.put(entry.getKey(), entry.getValue());
    }
    measureDescList.add(getMeasureCount());
    measureDescList.add(getMeasureMin(QueryPropertyEnum.TIME_COST.toString(),
            measureTypeMap.get(QueryPropertyEnum.TIME_COST.toString())));
    for (String measure : measures) {
        measureDescList.add(getMeasureSum(measure, measureTypeMap.get(measure)));
        measureDescList.add(getMeasureMax(measure, measureTypeMap.get(measure)));
    }
    measureDescList.add(getMeasureHLL(QueryPropertyEnum.ID_CODE.toString()));
    measureDescList.add(getMeasurePercentile(QueryPropertyEnum.TIME_COST.toString()));

    //Set for row key
    RowKeyColDesc[] rowKeyColDescs = new RowKeyColDesc[dimensionDescList.size()];
    int idx = getTimeRowKeyColDesc(tableName, rowKeyColDescs);
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.USER.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.PROJECT.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.REALIZATION.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.REALIZATION_TYPE.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.EXCEPTION.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.TYPE.toString(), idx + 1);
    idx++;
    rowKeyColDescs[idx] = getRowKeyColDesc(tableName, RecordEvent.RecordReserveKeyEnum.HOST.toString(),
            idx + 1);
    idx++;

    RowKeyDesc rowKeyDesc = new RowKeyDesc();
    rowKeyDesc.setRowkeyColumns(rowKeyColDescs);

    //Set for aggregation group
    String[][] hierarchy_dims = new String[2][];
    hierarchy_dims[0] = getTimeHierarchy();
    hierarchy_dims[1] = new String[2];
    hierarchy_dims[1][0] = QueryPropertyEnum.REALIZATION_TYPE.toString();
    hierarchy_dims[1][1] = QueryPropertyEnum.REALIZATION.toString();
    for (int i = 0; i < hierarchy_dims.length; i++) {
        hierarchy_dims[i] = refineColumnWithTable(tableName, hierarchy_dims[i]);
    }

    SelectRule selectRule = new SelectRule();
    selectRule.mandatoryDims = new String[0];
    selectRule.hierarchyDims = hierarchy_dims;
    selectRule.jointDims = new String[0][0];

    AggregationGroup aggGroup = new AggregationGroup();
    aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
    aggGroup.setSelectRule(selectRule);

    //Set for hbase mapping
    HBaseMappingDesc hBaseMapping = new HBaseMappingDesc();
    hBaseMapping.setColumnFamily(getHBaseColumnFamily(measureDescList));

    return generateKylinCubeDesc(tableName, sinkTool.getStorageType(), dimensionDescList, measureDescList,
            rowKeyDesc, aggGroup, hBaseMapping, sinkTool.getCubeDescOverrideProperties());
}

From source file:org.elasticsearch.search.suggest.completion.CompletionSuggester.java

@Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(
        String name, CompletionSuggestionContext suggestionContext, IndexReader indexReader, CharsRef spare)
        throws IOException {
    if (suggestionContext.mapper() == null || !(suggestionContext.mapper() instanceof CompletionFieldMapper)) {
        throw new ElasticsearchException(
                "Field [" + suggestionContext.getField() + "] is not a completion suggest field");
    }/*from w w w .j av  a2 s .  co  m*/

    CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize());
    UnicodeUtil.UTF8toUTF16(suggestionContext.getText(), spare);

    CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry(
            new StringText(spare.toString()), 0, spare.length());
    completionSuggestion.addTerm(completionSuggestEntry);

    String fieldName = suggestionContext.getField();
    Map<String, CompletionSuggestion.Entry.Option> results = Maps
            .newHashMapWithExpectedSize(indexReader.leaves().size() * suggestionContext.getSize());
    for (AtomicReaderContext atomicReaderContext : indexReader.leaves()) {
        AtomicReader atomicReader = atomicReaderContext.reader();
        Terms terms = atomicReader.fields().terms(fieldName);
        if (terms instanceof Completion090PostingsFormat.CompletionTerms) {
            final Completion090PostingsFormat.CompletionTerms lookupTerms = (Completion090PostingsFormat.CompletionTerms) terms;
            final Lookup lookup = lookupTerms.getLookup(suggestionContext.mapper(), suggestionContext);
            if (lookup == null) {
                // we don't have a lookup for this segment.. this might be possible if a merge dropped all
                // docs from the segment that had a value in this segment.
                continue;
            }
            List<Lookup.LookupResult> lookupResults = lookup.lookup(spare, false, suggestionContext.getSize());
            for (Lookup.LookupResult res : lookupResults) {

                final String key = res.key.toString();
                final float score = res.value;
                final Option value = results.get(key);
                if (value == null) {
                    final Option option = new CompletionSuggestion.Entry.Option(new StringText(key), score,
                            res.payload == null ? null : new BytesArray(res.payload));
                    results.put(key, option);
                } else if (value.getScore() < score) {
                    value.setScore(score);
                    value.setPayload(res.payload == null ? null : new BytesArray(res.payload));
                }
            }
        }
    }
    final List<CompletionSuggestion.Entry.Option> options = new ArrayList<>(results.values());
    CollectionUtil.introSort(options, scoreComparator);

    int optionCount = Math.min(suggestionContext.getSize(), options.size());
    for (int i = 0; i < optionCount; i++) {
        completionSuggestEntry.addOption(options.get(i));
    }

    return completionSuggestion;
}

From source file:com.google.devtools.depan.view_doc.layout.LayoutContext.java

/**
 * Populate internal table of node locations from supplied positions.
 * Only positions for moveable and fixed nodes are used.
 *//*  ww  w.j a v a2s .c  o  m*/
public void setNodeLocations(Map<GraphNode, Point2D> currPositions) {
    nodeLocations = Maps.newHashMapWithExpectedSize(movableNodes.size() + fixedNodes.size());

    for (GraphNode node : movableNodes) {
        Point2D point = currPositions.get(node);
        if (null != point) {
            nodeLocations.put(node, point);
        }
    }

    for (GraphNode node : fixedNodes) {
        Point2D point = currPositions.get(node);
        if (null != point) {
            nodeLocations.put(node, point);
        }
    }
}

From source file:org.apache.nifi.processors.kite.AvroRecordConverter.java

/**
 * @param inputSchema/*from w w  w  .  j  av  a  2 s .co m*/
 *            Schema of input record objects
 * @param outputSchema
 *            Schema of output record objects
 * @param fieldMapping
 *            Map from field name in input record to field name in output
 *            record.
 * @param locale
 *            Locale to use
 */
public AvroRecordConverter(Schema inputSchema, Schema outputSchema, Map<String, String> fieldMapping,
        Locale locale) {
    this.inputSchema = inputSchema;
    this.outputSchema = outputSchema;
    // Need to reverse this map.
    this.fieldMapping = Maps.newHashMapWithExpectedSize(fieldMapping.size());
    for (Map.Entry<String, String> entry : fieldMapping.entrySet()) {
        this.fieldMapping.put(entry.getValue(), entry.getKey());
    }
    this.locale = locale;
}

From source file:monasca.log.api.app.LogService.java

@Inject
public LogService(final ApiConfig config, final Producer<String, String> producer,
        final LogSerializer logSerializer) {
    this.config = config;
    this.producer = producer;
    this.serializer = logSerializer;
    this.payloadTransformers = Maps.newHashMapWithExpectedSize(2);
}

From source file:models.Cloud.java

public Map<String, String> properties() {
    Map<String, String> resultMap = Maps.newHashMapWithExpectedSize(cloudProperties.size());
    for (CloudProperty cloudProperty : cloudProperties) {
        resultMap.put(cloudProperty.key(), cloudProperty.value());
    }//www .  j  a  v a  2  s .  co  m
    return resultMap;
}