Example usage for com.google.common.collect Maps newHashMapWithExpectedSize

List of usage examples for com.google.common.collect Maps newHashMapWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Maps newHashMapWithExpectedSize.

Prototype

public static <K, V> HashMap<K, V> newHashMapWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashMap instance, with a high enough "initial capacity" that it should hold expectedSize elements without growth.

Usage

From source file:com.attribyte.essem.DefaultResponseGenerator.java

protected void parseGraph(JsonNode sourceParent, List<String> fields, RateUnit rateUnit, ObjectNode targetMeta,
        ArrayNode targetGraph) {//  ww w  .  j ava 2 s  .co m

    DateTimeFormatter parser = ISODateTimeFormat.basicDateTime();

    Map<MetricKey, ArrayNode> outputGraphs = Maps.newHashMapWithExpectedSize(4);

    JsonNode hitsObj = sourceParent.get("hits");
    if (hitsObj != null) {
        JsonNode hitsArr = hitsObj.get("hits");
        if (hitsArr != null) {

            for (JsonNode hitObj : hitsArr) {
                JsonNode fieldsObj = hitObj.get("fields");
                if (fieldsObj != null) {
                    MetricKey key = new MetricKey(getStringField(fieldsObj, "name"),
                            getStringField(fieldsObj, "application"), getStringField(fieldsObj, "host"),
                            getStringField(fieldsObj, "instance"));

                    ArrayNode samplesArr = outputGraphs.get(key);
                    if (samplesArr == null) {
                        ObjectNode graphObj = targetGraph.addObject();
                        addMeta(key, graphObj, targetMeta);
                        samplesArr = graphObj.putArray("samples");
                        outputGraphs.put(key, samplesArr);
                    }

                    ArrayNode sampleArr = samplesArr.addArray();

                    DateTime timestamp = parser.parseDateTime(getStringField(fieldsObj, "ts"));
                    sampleArr.add(timestamp.getMillis());
                    sampleArr.add(1); //Samples..

                    for (String field : fields) {
                        if (!graphIgnoreProperties.contains(field)) {
                            JsonNode fieldNode = getFieldNode(fieldsObj, field);
                            if (rateUnit == RAW_RATE_UNIT || fieldNode == null || !rateFields.contains(field)) {
                                if (fieldNode != null) {
                                    sampleArr.add(fieldNode);
                                } else {
                                    sampleArr.addNull();
                                }
                            } else {
                                sampleArr.add(fieldNode.doubleValue() * rateUnit.mult);
                            }
                        }
                    }
                }
            }
        }
    }
}

From source file:com.opengamma.integration.marketdata.manipulator.dsl.Scenario.java

/**
 * @return A {@link ScenarioDefinition} created from this scenario's selectors and manipulators
 *//*from  w  w  w.  j av  a 2s.  c o  m*/
@SuppressWarnings("unchecked")
public ScenarioDefinition createDefinition() {
    Map<DistinctMarketDataSelector, FunctionParameters> params = Maps
            .newHashMapWithExpectedSize(_manipulations.size());
    for (Map.Entry<DistinctMarketDataSelector, Collection<StructureManipulator<?>>> entry : _manipulations
            .asMap().entrySet()) {
        DistinctMarketDataSelector selector = entry.getKey();
        // ListMultimap always has Lists as entries even if the signature doesn't say so
        List<StructureManipulator<?>> manipulators = (List<StructureManipulator<?>>) entry.getValue();
        CompositeStructureManipulator compositeManipulator = new CompositeStructureManipulator(manipulators);
        SimpleFunctionParameters functionParameters = new SimpleFunctionParameters();
        functionParameters.setValue(StructureManipulationFunction.EXPECTED_PARAMETER_NAME,
                compositeManipulator);
        params.put(selector, functionParameters);
    }
    return new ScenarioDefinition(_name, params);
}

From source file:org.eclipse.xtext.generator.trace.AbstractTraceRegion.java

public Map<SourceRelativeURI, List<AbstractTraceRegion>> invertAll(SourceRelativeURI myPath) {
    Map<SourceRelativeURI, List<Pair<ILocationData, AbstractTraceRegion>>> matchingLocations = collectMatchingLocations(
            null);//from ww  w  .jav a 2 s  .co  m
    Map<SourceRelativeURI, List<AbstractTraceRegion>> result = Maps
            .newHashMapWithExpectedSize(matchingLocations.size());
    for (SourceRelativeURI uri : matchingLocations.keySet()) {
        List<Pair<ILocationData, AbstractTraceRegion>> expectedMatchingLocations = matchingLocations.get(uri);
        if (expectedMatchingLocations != null) {
            inplaceSortByOffset(expectedMatchingLocations);
            List<AbstractTraceRegion> resultPerURI = toInvertedTraceRegions(expectedMatchingLocations, myPath);
            result.put(uri, resultPerURI);
        }
    }
    return result;
}

From source file:org.zenoss.zep.index.impl.MultiBackendEventIndexDao.java

/** Only call this within a backendsUse.lock() block. */
private void processTasks(String backendId, List<EventIndexBackendTask> tasks, WorkQueue q)
        throws ZepException {
    final EventIndexBackendConfiguration configuration = backends.get(backendId);
    if (configuration == null)
        throw new ZepException("Tried to process tasks for unknown backend: " + backendId);
    final EventIndexBackend backend = configuration.getBackend();
    if (backend == null)
        throw new ZepException("Tried to process tasks for unknown backend: " + backendId);
    logger.debug("Processing {} tasks for backend {}", tasks.size(), backendId);

    final Set<EventIndexBackendTask> flushes = Sets.newHashSet();
    final Map<String, EventIndexBackendTask> indexTasks = Maps.newHashMapWithExpectedSize(tasks.size());
    final Set<EventSummary> toIndex = Sets.newHashSetWithExpectedSize(tasks.size());

    for (EventIndexBackendTask task : tasks) {
        switch (task.op) {
        case FLUSH:
            flushes.add(task);//  w w  w  .  j  av  a 2 s.co  m
            break;
        case INDEX_EVENT:
            indexTasks.put(task.uuid, task);
            toIndex.add(EventSummary.newBuilder().setUuid(task.uuid).setLastSeenTime(task.lastSeen).build());
            break;
        default:
            logger.error("UNEXPECTED TASK OPERATION: {}", task.op);
            q.complete(task);
        }
    }

    try {
        if (!toIndex.isEmpty()) {
            logger.debug(String.format("Looking up %d events by primary key", toIndex.size()));
            List<EventSummary> events = eventDao.findByKey(toIndex);
            if (events.size() != toIndex.size())
                logger.info("Found {} of {} events by primary key", events.size(), toIndex.size());
            else
                logger.debug("Found {} of {} events by primary key", events.size(), toIndex.size());
            try {
                backend.index(events);
                logger.debug("Indexed {} events", events.size());
            } catch (ZepException e) {
                if (logger.isDebugEnabled())
                    logger.warn(String.format("failed to process task to index events (%d) for backend %s",
                            events.size(), backendId), e);
                else
                    logger.warn(String.format("failed to process task to index events (%d) for backend %s",
                            events.size(), backendId));
            }
            List<EventIndexBackendTask> completedTasks = Lists.newArrayListWithExpectedSize(events.size());
            for (EventSummary event : events) {
                EventIndexBackendTask task = indexTasks.remove(event.getUuid());
                if (task != null) // should always be true
                    completedTasks.add(task);
            }
            q.completeAll(completedTasks);

            if (!indexTasks.isEmpty()) {
                try {
                    if (configuration.isHonorDeletes()) {
                        logger.debug(
                                "Removing {} events from the index since they weren't found by primary key in the database",
                                indexTasks.size());
                        backend.delete(indexTasks.keySet());
                    }
                    q.completeAll(indexTasks.values());
                } catch (ZepException e) {
                    if (logger.isDebugEnabled())
                        logger.warn(String.format("failed to delete %d events from backend %s", toIndex.size(),
                                backendId), e);
                    else
                        logger.warn(String.format("failed to delete %d events from backend %s", toIndex.size(),
                                backendId));
                }
            }
        }

        if (!flushes.isEmpty()) {
            try {
                logger.debug("flushing backend");
                backend.flush();
                q.completeAll(flushes);
            } catch (ZepException e) {
                if (logger.isDebugEnabled())
                    logger.warn(String.format("failed to process tasks %s for backend %s", flushes, backendId),
                            e);
                else
                    logger.warn(String.format("failed to process tasks %s for backend %s", flushes, backendId));

            }
        }
    } catch (ZepException e) {
        if (logger.isDebugEnabled())
            logger.warn(String.format("failed to find events for UUIDs %s for backend %s", indexTasks.keySet(),
                    backendId), e);
        else
            logger.warn(String.format("failed to find events for UUIDs %s for backend %s", indexTasks.keySet(),
                    backendId));
    }
}

From source file:com.android.tools.idea.wizard.ParameterDefaultValueComputer.java

private Map<String, Object> getStaticParameterValues(@NotNull Map<Parameter, Object> values,
        @NotNull Map<String, Object> implicitParameters) {
    final Map<String, Object> knownValues = Maps
            .newHashMapWithExpectedSize(myStaticParameters.size() + implicitParameters.size());
    knownValues.putAll(implicitParameters);
    for (Parameter parameter : myStaticParameters) {
        Object value;/* w w  w  .ja v  a 2s. c o  m*/
        if (values.containsKey(parameter)) {
            value = values.get(parameter);
        } else {
            String initial = parameter.initial;
            value = decodeInitialValue(parameter, initial);
        }
        knownValues.put(parameter.id, value);
    }
    return knownValues;
}

From source file:org.sosy_lab.cpachecker.core.algorithm.pcc.ProofSlicer.java

public UnmodifiableReachedSet sliceProof(final UnmodifiableReachedSet pReached) {
    AbstractState first = pReached.getFirstState();
    if (first != null && first instanceof ARGState && AbstractStates.extractLocation(first) != null
            && AbstractStates.extractStateByType(first, ValueAnalysisState.class) != null
            && AbstractStates.extractStateByType(first, CallstackState.class) != null
            && ((ARGState) first).getWrappedState() instanceof CompositeState) {
        numNotCovered = 0;//from   w w w .ja  v  a2s .co  m
        HashMap<ARGState, Set<String>> varMap = Maps.newHashMapWithExpectedSize(pReached.size());

        computeRelevantVariablesPerState((ARGState) first, varMap);

        assert (numNotCovered == pReached.size());
        return buildSlicedARG(varMap, pReached);

    }

    return pReached;
}

From source file:org.apache.kylin.engine.mr.steps.MergeStatisticsWithOldStep.java

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager mgr = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
    final CubeSegment optimizeSegment = cube
            .getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubeSegment oldSegment = optimizeSegment.getCubeInstance().getOriginalSegmentToOptimize(optimizeSegment);
    Preconditions.checkNotNull(oldSegment,
            "cannot find the original segment to be optimized by " + optimizeSegment);

    KylinConfig kylinConf = cube.getConfig();
    Configuration conf = HadoopUtil.getCurrentConfiguration();
    ResourceStore rs = ResourceStore.getStore(kylinConf);
    int averageSamplingPercentage = 0;

    try {//from www  .  j  av  a2 s  .  c  o m
        //1. Add statistics from optimized segment
        Path statisticsDirPath = new Path(CubingExecutableUtil.getStatisticsPath(this.getParams()));
        FileSystem hdfs = FileSystem.get(conf);
        if (!hdfs.exists(statisticsDirPath)) {
            throw new IOException("StatisticsFilePath " + statisticsDirPath + " does not exists");
        }

        if (!hdfs.isDirectory(statisticsDirPath)) {
            throw new IOException("StatisticsFilePath " + statisticsDirPath + " is not a directory");
        }

        Path[] statisticsFiles = HadoopUtil.getFilteredPath(hdfs, statisticsDirPath,
                BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);
        if (statisticsFiles == null) {
            throw new IOException("fail to find the statistics file in base dir: " + statisticsDirPath);
        }

        for (Path item : statisticsFiles) {
            CubeStatsReader optimizeSegmentStatsReader = new CubeStatsReader(optimizeSegment, null,
                    optimizeSegment.getConfig(), item);
            averageSamplingPercentage += optimizeSegmentStatsReader.getSamplingPercentage();
            addFromCubeStatsReader(optimizeSegmentStatsReader);
        }

        //2. Add statistics from old segment
        CubeStatsReader oldSegmentStatsReader = new CubeStatsReader(oldSegment, null, oldSegment.getConfig());
        averageSamplingPercentage += oldSegmentStatsReader.getSamplingPercentage();
        addFromCubeStatsReader(oldSegmentStatsReader);

        logger.info("Cuboid set with stats info: " + cuboidHLLMap.keySet().toString());
        //3. Store merged statistics for recommend cuboids
        averageSamplingPercentage = averageSamplingPercentage / 2;
        Set<Long> cuboidsRecommend = cube.getCuboidsRecommend();

        Map<Long, HLLCounter> resultCuboidHLLMap = Maps.newHashMapWithExpectedSize(cuboidsRecommend.size());
        for (Long cuboid : cuboidsRecommend) {
            HLLCounter hll = cuboidHLLMap.get(cuboid);
            if (hll == null) {
                logger.warn("Cannot get the row count stats for cuboid " + cuboid);
            } else {
                resultCuboidHLLMap.put(cuboid, hll);
            }
        }

        String resultDir = CubingExecutableUtil.getMergedStatisticsPath(this.getParams());
        CubeStatsWriter.writeCuboidStatistics(conf, new Path(resultDir), resultCuboidHLLMap,
                averageSamplingPercentage);

        try (FSDataInputStream mergedStats = hdfs
                .open(new Path(resultDir, BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME))) {
            // put the statistics to metadata store
            String statisticsFileName = optimizeSegment.getStatisticsResourcePath();
            rs.putResource(statisticsFileName, mergedStats, System.currentTimeMillis());
        }

        //By default, the cube optimization will use in-memory cubing
        CubingJob cubingJob = (CubingJob) getManager()
                .getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
        StatisticsDecisionUtil.decideCubingAlgorithm(cubingJob, optimizeSegment);

        return new ExecuteResult();
    } catch (IOException e) {
        logger.error("fail to merge cuboid statistics", e);
        return ExecuteResult.createError(e);
    }

}

From source file:org.elasticsearch.action.search.type.TransportSearchHelper.java

public static ParsedScrollId parseScrollId(String scrollId) {
    CharsRef spare = new CharsRef();
    try {//from ww  w  . ja  v a2 s. c  o m
        byte[] decode = Base64.decode(scrollId, Base64.URL_SAFE);
        UnicodeUtil.UTF8toUTF16(decode, 0, decode.length, spare);
    } catch (Exception e) {
        throw new ElasticsearchIllegalArgumentException("Failed to decode scrollId", e);
    }
    String[] elements = Strings.splitStringToArray(spare, ';');
    if (elements.length < 2) {
        throw new ElasticsearchIllegalArgumentException("Malformed scrollId [" + scrollId + "]");
    }

    int index = 0;
    String type = elements[index++];
    int contextSize = Integer.parseInt(elements[index++]);
    if (elements.length < contextSize + 2) {
        throw new ElasticsearchIllegalArgumentException("Malformed scrollId [" + scrollId + "]");
    }

    @SuppressWarnings({ "unchecked" })
    Tuple<String, Long>[] context = new Tuple[contextSize];
    for (int i = 0; i < contextSize; i++) {
        String element = elements[index++];
        int sep = element.indexOf(':');
        if (sep == -1) {
            throw new ElasticsearchIllegalArgumentException("Malformed scrollId [" + scrollId + "]");
        }
        context[i] = new Tuple<>(element.substring(sep + 1), Long.parseLong(element.substring(0, sep)));
    }
    Map<String, String> attributes;
    int attributesSize = Integer.parseInt(elements[index++]);
    if (attributesSize == 0) {
        attributes = ImmutableMap.of();
    } else {
        attributes = Maps.newHashMapWithExpectedSize(attributesSize);
        for (int i = 0; i < attributesSize; i++) {
            String element = elements[index++];
            int sep = element.indexOf(':');
            attributes.put(element.substring(0, sep), element.substring(sep + 1));
        }
    }
    return new ParsedScrollId(scrollId, type, context, attributes);
}

From source file:nallar.tickthreading.patcher.remapping.Deobfuscator.java

public void setup(File mapData) {
    try {// w  ww  .j  av  a  2 s  .c o  m
        mapData = mapData.getCanonicalFile();
        //noinspection IOResourceOpenedButNotSafelyClosed
        ZipFile mapZip = new ZipFile(mapData);
        ZipEntry classData = mapZip.getEntry("joined.srg");
        ZipInputSupplier zis = new ZipInputSupplier(mapZip, classData);
        InputSupplier<InputStreamReader> srgSupplier = CharStreams.newReaderSupplier(zis, Charsets.UTF_8);
        List<String> srgList = CharStreams.readLines(srgSupplier);
        rawMethodMaps = Maps.newHashMap();
        rawFieldMaps = Maps.newHashMap();
        Builder<String, String> builder = ImmutableBiMap.builder();
        Builder<String, String> mcpBuilder = ImmutableBiMap.builder();
        Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults();
        for (String line : srgList) {
            String[] parts = Iterables.toArray(splitter.split(line), String.class);
            String typ = parts[0];
            if ("CL".equals(typ)) {
                parseClass(builder, parts);
                parseMCPClass(mcpBuilder, parts);
            } else if ("MD".equals(typ)) {
                parseMethod(parts);
            } else if ("FD".equals(typ)) {
                parseField(parts);
            }
        }
        classNameBiMap = builder.build();
        // Special case some mappings for modloader mods
        mcpBuilder.put("BaseMod", "net/minecraft/src/BaseMod");
        mcpBuilder.put("ModLoader", "net/minecraft/src/ModLoader");
        mcpBuilder.put("EntityRendererProxy", "net/minecraft/src/EntityRendererProxy");
        mcpBuilder.put("MLProp", "net/minecraft/src/MLProp");
        mcpBuilder.put("TradeEntry", "net/minecraft/src/TradeEntry");
        mcpNameBiMap = mcpBuilder.build();
    } catch (IOException ioe) {
        Log.severe("An error occurred loading the deobfuscation map data", ioe);
    }
    methodNameMaps = Maps.newHashMapWithExpectedSize(rawMethodMaps.size());
    fieldNameMaps = Maps.newHashMapWithExpectedSize(rawFieldMaps.size());
}