Example usage for com.google.common.collect Maps newHashMapWithExpectedSize

List of usage examples for com.google.common.collect Maps newHashMapWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Maps newHashMapWithExpectedSize.

Prototype

public static <K, V> HashMap<K, V> newHashMapWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashMap instance, with a high enough "initial capacity" that it should hold expectedSize elements without growth.

Usage

From source file:org.apache.shindig.gadgets.uri.DefaultConcatUriManager.java

private ConcatData makeConcatUri(ConcatUri ctx, boolean isAdjacent, String version) {
    // TODO: Consider per-bundle isAdjacent plus first-bundle direct evaluation

    if (!isAdjacent && ctx.getType() != Type.JS) {
        // Split-concat is only supported for JS at the moment.
        // This situation should never occur due to ConcatLinkRewriter's implementation.
        throw new UnsupportedOperationException("Split concatenation only supported for JS");
    }// www. j  av a 2  s  . c  om

    UriBuilder uriBuilder = ctx.makeQueryParams(null, version);

    String concatHost = getReqVal(ctx.getContainer(), CONCAT_HOST_PARAM);
    String concatPath = getReqVal(ctx.getContainer(), CONCAT_PATH_PARAM);
    uriBuilder.setAuthority(concatHost);
    uriBuilder.setPath(concatPath);

    uriBuilder.addQueryParameter(Param.TYPE.getKey(), ctx.getType().getType());
    List<Uri> resourceUris = ctx.getBatch();
    Map<Uri, String> snippets = Maps.newHashMapWithExpectedSize(resourceUris.size());

    String splitParam = config.getString(ctx.getContainer(), CONCAT_JS_SPLIT_PARAM);
    boolean doSplit = false;
    if (!isAdjacent && splitParam != null && !"false".equalsIgnoreCase(splitParam)) {
        uriBuilder.addQueryParameter(Param.JSON.getKey(), splitParam);
        doSplit = true;
    }

    Integer i = Integer.valueOf(START_INDEX);
    for (Uri resource : resourceUris) {
        uriBuilder.addQueryParameter(i.toString(), resource.toString());
        i++;
        if (doSplit) {
            snippets.put(resource, getJsSnippet(splitParam, resource));
        }
    }

    return new ConcatData(uriBuilder.toUri(), snippets);
}

From source file:org.apache.beam.runners.apex.translation.ParDoTranslator.java

@Override
public void translate(ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
    DoFn<InputT, OutputT> doFn = transform.getFn();
    DoFnSignature signature = DoFnSignatures.getSignature(doFn.getClass());

    if (signature.processElement().isSplittable()) {
        throw new UnsupportedOperationException(String.format("%s does not support splittable DoFn: %s",
                ApexRunner.class.getSimpleName(), doFn));
    }//from   w  w  w  .  j  ava  2  s .  c om
    if (signature.stateDeclarations().size() > 0) {
        throw new UnsupportedOperationException(
                String.format("Found %s annotations on %s, but %s cannot yet be used with state in the %s.",
                        DoFn.StateId.class.getSimpleName(), doFn.getClass().getName(),
                        DoFn.class.getSimpleName(), ApexRunner.class.getSimpleName()));
    }

    if (signature.timerDeclarations().size() > 0) {
        throw new UnsupportedOperationException(
                String.format("Found %s annotations on %s, but %s cannot yet be used with timers in the %s.",
                        DoFn.TimerId.class.getSimpleName(), doFn.getClass().getName(),
                        DoFn.class.getSimpleName(), ApexRunner.class.getSimpleName()));
    }

    Map<TupleTag<?>, PValue> outputs = context.getOutputs();
    PCollection<InputT> input = context.getInput();
    List<PCollectionView<?>> sideInputs = transform.getSideInputs();
    Coder<InputT> inputCoder = input.getCoder();
    WindowedValueCoder<InputT> wvInputCoder = FullWindowedValueCoder.of(inputCoder,
            input.getWindowingStrategy().getWindowFn().windowCoder());

    ApexParDoOperator<InputT, OutputT> operator = new ApexParDoOperator<>(context.getPipelineOptions(), doFn,
            transform.getMainOutputTag(), transform.getAdditionalOutputTags().getAll(),
            input.getWindowingStrategy(), sideInputs, wvInputCoder, context.getStateBackend());

    Map<PCollection<?>, OutputPort<?>> ports = Maps.newHashMapWithExpectedSize(outputs.size());
    for (Entry<TupleTag<?>, PValue> output : outputs.entrySet()) {
        checkArgument(output.getValue() instanceof PCollection, "%s %s outputs non-PCollection %s of type %s",
                ParDo.MultiOutput.class.getSimpleName(), context.getFullName(), output.getValue(),
                output.getValue().getClass().getSimpleName());
        PCollection<?> pc = (PCollection<?>) output.getValue();
        if (output.getKey().equals(transform.getMainOutputTag())) {
            ports.put(pc, operator.output);
        } else {
            int portIndex = 0;
            for (TupleTag<?> tag : transform.getAdditionalOutputTags().getAll()) {
                if (tag.equals(output.getKey())) {
                    ports.put(pc, operator.additionalOutputPorts[portIndex]);
                    break;
                }
                portIndex++;
            }
        }
    }
    context.addOperator(operator, ports);
    context.addStream(context.getInput(), operator.input);
    if (!sideInputs.isEmpty()) {
        addSideInputs(operator.sideInput1, sideInputs, context);
    }
}

From source file:com.google.gerrit.server.change.RelatedChangesSorter.java

private Map<String, PatchSetData> collectById(List<ChangeData> in) throws OrmException, IOException {
    Project.NameKey project = in.get(0).change().getProject();
    Map<String, PatchSetData> result = Maps.newHashMapWithExpectedSize(in.size() * 3);
    try (Repository repo = repoManager.openRepository(project); RevWalk rw = new RevWalk(repo)) {
        rw.setRetainBody(true);/*from www. j  av a 2s. c  om*/
        for (ChangeData cd : in) {
            checkArgument(cd.change().getProject().equals(project),
                    "Expected change %s in project %s, found %s", cd.getId(), project,
                    cd.change().getProject());
            for (PatchSet ps : cd.patchSets()) {
                String id = ps.getRevision().get();
                RevCommit c = rw.parseCommit(ObjectId.fromString(id));
                PatchSetData psd = PatchSetData.create(cd, ps, c);
                result.put(id, psd);
            }
        }
    }
    return result;
}

From source file:com.kylinolap.storage.hbase.HBaseKeyRange.java

private void init(Collection<ColumnValueRange> andDimensionRanges) {
    int size = andDimensionRanges.size();
    Map<TblColRef, String> startValues = Maps.newHashMapWithExpectedSize(size);
    Map<TblColRef, String> stopValues = Maps.newHashMapWithExpectedSize(size);
    Map<TblColRef, Set<String>> fuzzyValues = Maps.newHashMapWithExpectedSize(size);
    for (ColumnValueRange dimRange : andDimensionRanges) {
        TblColRef column = dimRange.getColumn();
        startValues.put(column, dimRange.getBeginValue());
        stopValues.put(column, dimRange.getEndValue());
        fuzzyValues.put(column, dimRange.getEqualValues());

        TblColRef partitionDateColumnRef = cubeSeg.getCubeDesc().getCubePartitionDesc()
                .getPartitionDateColumnRef();
        if (column.equals(partitionDateColumnRef)) {
            initPartitionRange(dimRange);
        }//from   ww w.jav  a2  s  .co  m
    }

    AbstractRowKeyEncoder encoder = AbstractRowKeyEncoder.createInstance(cubeSeg, cuboid);
    encoder.setBlankByte(RowConstants.ROWKEY_LOWER_BYTE);
    this.startKey = encoder.encode(startValues);

    encoder.setBlankByte(RowConstants.ROWKEY_UPPER_BYTE);
    // In order to make stopRow inclusive add a trailing 0 byte. #See
    // Scan.setStopRow(byte [] stopRow)
    this.stopKey = Bytes.add(encoder.encode(stopValues), ZERO_TAIL_BYTES);

    // restore encoder defaults for later reuse (note
    // AbstractRowKeyEncoder.createInstance() caches instances)
    encoder.setBlankByte(AbstractRowKeyEncoder.DEFAULT_BLANK_BYTE);

    // always fuzzy match cuboid ID to lock on the selected cuboid
    this.fuzzyKeys = buildFuzzyKeys(fuzzyValues);
}

From source file:com.android.tools.idea.editors.theme.ProjectThemeResolver.java

@NotNull
private static List<StyleResourceValue> getThemesFromResources(@NotNull Map<String, ResourceValue> styles,
        @NotNull Configuration configuration) {
    // Collect the themes out of all the styles.
    Collection<ResourceValue> values = styles.values();
    List<StyleResourceValue> themes = new ArrayList<StyleResourceValue>(values.size());

    // Try a little harder to see if the user has themes that don't have the normal naming convention
    ResourceResolver resolver = configuration.getResourceResolver();
    assert resolver != null;

    Map<ResourceValue, Boolean> cache = Maps.newHashMapWithExpectedSize(values.size());
    for (ResourceValue value : values) {
        if (value instanceof StyleResourceValue) {
            StyleResourceValue styleValue = (StyleResourceValue) value;
            if (resolver.isTheme(styleValue, cache)) {
                themes.add(styleValue);/*from  ww w  . j a va2s .co  m*/
            }
        }
    }

    return themes;
}

From source file:org.eclipse.elk.gmf.GmfLayoutConfigurationStore.java

/**
 * Return a map of default options. The base implementation only assigns the option
 * {@link LayoutOptions#COMMENT_BOX} to instances of {@link NoteEditPart}. More defaults can
 * be specified in subclasses./*  www. ja  va 2  s  .com*/
 */
protected Map<String, Object> getDefaultOptions() {
    Map<String, Object> result = null;
    if (editPart instanceof NoteEditPart) {
        result = Maps.newHashMapWithExpectedSize(1);
        result.put(LayoutOptions.COMMENT_BOX.getId(), true);
    }
    return result;
}

From source file:com.opengamma.engine.view.compilation.CompiledViewDefinitionWithGraphsImpl.java

private CompiledViewDefinitionWithGraphsImpl(VersionCorrection versionCorrection, String identifier,
        ViewDefinition viewDefinition, Collection<DependencyGraph> graphs,
        Map<ComputationTargetReference, UniqueId> resolutions, Portfolio portfolio, long functionInitId,
        Collection<CompiledViewCalculationConfiguration> compiledCalcConfigs,
        Pair<Instant, Instant> validityRange) {
    super(versionCorrection, identifier, viewDefinition, portfolio, compiledCalcConfigs,
            validityRange.getFirst(), validityRange.getSecond());
    ArgumentChecker.notNull(resolutions, "resolutions");
    _functionInitId = functionInitId;/*from  w w w.  jav  a 2  s.co m*/
    final Map<String, DependencyGraphExplorer> graphsByConfiguration = Maps
            .newHashMapWithExpectedSize(graphs.size());
    for (DependencyGraph graph : graphs) {
        graphsByConfiguration.put(graph.getCalculationConfigurationName(),
                new DependencyGraphExplorerImpl(graph));
    }
    _graphsByConfiguration = Collections.unmodifiableMap(graphsByConfiguration);
    _resolutions = Collections.unmodifiableMap(resolutions);
}

From source file:com.romeikat.datamessie.core.base.ui.panel.AbstractStatisticsPanel.java

public AbstractStatisticsPanel(final String id, final StatisticsPage statisticsPage) {
    super(id);/*  ww  w.ja  va 2 s.  c om*/
    this.statisticsPage = statisticsPage;

    sourcesModel = new LoadableDetachableModel<Map<Long, SourceOverviewDto>>() {
        private static final long serialVersionUID = 1L;

        @Override
        protected Map<Long, SourceOverviewDto> load() {
            final HibernateSessionProvider sessionProvider = new HibernateSessionProvider(sessionFactory);
            final Long userId = DataMessieSession.get().getUserId();
            final DocumentsFilterSettings dfs = DataMessieSession.get().getDocumentsFilterSettings();
            final Long projectId = dfs.getProjectId();
            final Long sourceId = dfs.getSourceId();
            final Collection<Long> sourceTypeIds = dfs.getSourceTypeIds();
            final List<SourceOverviewDto> sources = sourceDao.getAsOverviewDtos(
                    sessionProvider.getStatelessSession(), userId, projectId, sourceId, sourceTypeIds);
            sessionProvider.closeStatelessSession();

            final Map<Long, SourceOverviewDto> sourcesMap = Maps.newHashMapWithExpectedSize(sources.size());
            for (final SourceOverviewDto source : sources) {
                sourcesMap.put(source.getId(), source);
            }
            return sourcesMap;
        }
    };
}

From source file:com.opengamma.core.AbstractSourceWithExternalBundle.java

public static <V extends UniqueIdentifiable & ExternalBundleIdentifiable> Map<ExternalIdBundle, V> getSingleMultiThread(
        final PoolExecutor executor, final SourceWithExternalBundle<V> source,
        final Collection<ExternalIdBundle> bundles, final VersionCorrection versionCorrection) {
    final PoolExecutor.Service<Void> jobs = executor.createService(null);
    final Map<ExternalIdBundle, V> results = Maps.newHashMapWithExpectedSize(bundles.size());
    for (final ExternalIdBundle bundle : bundles) {
        jobs.execute(new Runnable() {
            @Override/*from  www.  ja  v  a  2 s. c  o  m*/
            public void run() {
                final V result = source.getSingle(bundle, versionCorrection);
                if (result != null) {
                    results.put(bundle, result);
                }
            }
        });
    }
    try {
        jobs.join();
    } catch (InterruptedException e) {
        throw new OpenGammaRuntimeException("Interrupted", e);
    }
    return results;
}

From source file:tv.floe.metronome.classification.logisticregression.POLRModelParameters.java

/**
 * /* w  ww . ja  v  a  2s. com*/
 * Read appropriate fields from the InputStream
 * 
 */
@Override
public void readFields(DataInput in) throws IOException {
    targetVariable = in.readUTF();
    int typeMapSize = in.readInt();
    typeMap = Maps.newHashMapWithExpectedSize(typeMapSize);
    for (int i = 0; i < typeMapSize; i++) {
        String key = in.readUTF();
        String value = in.readUTF();
        typeMap.put(key, value);
    }
    numFeatures = in.readInt();
    useBias = in.readBoolean();
    maxTargetCategories = in.readInt();
    int targetCategoriesSize = in.readInt();
    targetCategories = Lists.newArrayListWithCapacity(targetCategoriesSize);
    for (int i = 0; i < targetCategoriesSize; i++) {
        targetCategories.add(in.readUTF());
    }
    lambda = in.readDouble();
    learningRate = in.readDouble();
    System.out.println("read lambda: " + lambda);
    // csv = null;
    polr = new ParallelOnlineLogisticRegression();
    polr.readFields(in);
}