List of usage examples for com.google.common.collect Maps newHashMapWithExpectedSize
public static <K, V> HashMap<K, V> newHashMapWithExpectedSize(int expectedSize)
From source file:com.android.tools.idea.wizard.ParameterDefaultValueComputer.java
/** * Returns a map of parameter values/*from www . ja va2 s .c om*/ * * @return mapping between paramaeter and its current value * @throws CircularParameterDependencyException if there is a circular dependecy between * parameters preventing us from computing the default values. */ public Map<Parameter, Object> getParameterValues() throws CircularParameterDependencyException { Map<String, Object> staticValues = getStaticParameterValues(myUserValues, myImplicitParameters); Map<String, Object> computedValues = computeParameterValues(staticValues); HashMap<Parameter, Object> allValues = Maps .newHashMapWithExpectedSize(computedValues.size() + staticValues.size()); for (Parameter parameter : Iterables.concat(myStaticParameters, myComputedParameters)) { allValues.put(parameter, computedValues.get(parameter.id)); } return allValues; }
From source file:org.graylog2.rest.resources.system.logs.LoggersResource.java
@GET @Timed/*from w ww .j ava 2 s .c om*/ @ApiOperation(value = "List all loggers and their current levels") @Produces(MediaType.APPLICATION_JSON) public LoggersSummary loggers() { final Collection<LoggerConfig> loggerConfigs = getLoggerConfigs(); final Map<String, SingleLoggerSummary> loggers = Maps.newHashMapWithExpectedSize(loggerConfigs.size()); for (LoggerConfig config : loggerConfigs) { if (!isPermitted(RestPermissions.LOGGERS_READ, config.getName())) { continue; } final Level level = config.getLevel(); loggers.put(config.getName(), SingleLoggerSummary.create(level.toString().toLowerCase(Locale.ENGLISH), level.intLevel())); } return LoggersSummary.create(loggers); }
From source file:net.minecraftforge.fml.common.asm.transformers.deobf.FMLDeobfuscatingRemapper.java
public void setupLoadOnly(String deobfFileName, boolean loadAll) { try {/*from w ww . jav a 2 s. c o m*/ File mapData = new File(deobfFileName); LZMAInputSupplier zis = new LZMAInputSupplier(new FileInputStream(mapData)); CharSource srgSource = zis.asCharSource(Charsets.UTF_8); List<String> srgList = srgSource.readLines(); rawMethodMaps = Maps.newHashMap(); rawFieldMaps = Maps.newHashMap(); Builder<String, String> builder = ImmutableBiMap.<String, String>builder(); Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults(); for (String line : srgList) { String[] parts = Iterables.toArray(splitter.split(line), String.class); String typ = parts[0]; if ("CL".equals(typ)) { parseClass(builder, parts); } else if ("MD".equals(typ) && loadAll) { parseMethod(parts); } else if ("FD".equals(typ) && loadAll) { parseField(parts); } } classNameBiMap = builder.build(); } catch (IOException ioe) { FMLRelaunchLog.log(Level.ERROR, "An error occurred loading the deobfuscation map data", ioe); } methodNameMaps = Maps.newHashMapWithExpectedSize(rawMethodMaps.size()); fieldNameMaps = Maps.newHashMapWithExpectedSize(rawFieldMaps.size()); }
From source file:org.opendaylight.yangtools.util.MapAdaptor.java
/** * Creates an initial snapshot. The backing map is selected according to * the expected size./*w ww . j a v a 2 s. c o m*/ * * @param expectedSize Expected map size * @return An empty mutable map. */ public <K, V> Map<K, V> initialSnapshot(final int expectedSize) { Preconditions.checkArgument(expectedSize >= 0); if (expectedSize > persistMinItems) { return new ReadWriteTrieMap<>(); } if (expectedSize < 2) { return new HashMap<>(1); } if (expectedSize == 2) { return new HashMap<>(2); } return Maps.newHashMapWithExpectedSize(expectedSize); }
From source file:org.opentripplanner.routing.algorithm.strategies.InterleavedBidirectionalHeuristic.java
@Override public void initialize(State s, Vertex target, long abortTime) { if (target == this.target) { LOG.debug("reusing existing heuristic"); return;/*from w ww .ja va 2 s . c o m*/ } long start = System.currentTimeMillis(); this.target = target; // int nVertices = AbstractVertex.getMaxIndex(); // will be ever increasing? int nVertices = graph.countVertices(); weights = Maps.newHashMapWithExpectedSize(((int) Math.log(nVertices)) + 1); this.options = s.getOptions(); this.origin = s.getVertex(); // do not use soft limiting in long-distance mode options.softWalkLimiting = false; options.softPreTransitLimiting = false; // make sure distance table is initialized before starting thread LOG.debug("initializing heuristic computation thread"); // forward street search first, sets values around origin to 0 List<State> search = streetSearch(options, false, abortTime); // ~30 msec if (search == null) return; // Search timed out LOG.debug("end foreward street search {} ms", System.currentTimeMillis() - start); // create a new priority queue q = new BinHeap<Vertex>(); // enqueue states for each stop within walking distance of the destination search = streetSearch(options, true, abortTime); if (search == null) return; // Search timed out for (State stopState : search) { // backward street search q.insert(stopState.getVertex(), stopState.getWeight()); } LOG.debug("end backward street search {} ms", System.currentTimeMillis() - start); // once street searches are done, raise the limits to max // because hard walk limiting is incorrect and is observed to cause problems // for trips near the cutoff options.setMaxWalkDistance(Double.POSITIVE_INFINITY); options.setMaxPreTransitTime(Integer.MAX_VALUE); LOG.debug("initialized SSSP"); s.getOptions().rctx.debugOutput.finishedPrecalculating(); }
From source file:com.google.gitiles.LogSoyData.java
private Map<String, Object> toFooterSoyData(Paginator paginator, @Nullable String revision) { Map<String, Object> data = Maps.newHashMapWithExpectedSize(1); ObjectId next = paginator.getNextStart(); if (next != null) { data.put("nextUrl", copyAndCanonicalizeView(revision).replaceParam(LogServlet.START_PARAM, next.name()).toUrl()); }//w ww. j a v a2 s . c om return data; }
From source file:com.google.devtools.j2objc.pipeline.FileProcessor.java
private void processBatch() { if (batchInputs.isEmpty()) { return;//from www.j a v a 2 s. co m } List<String> paths = Lists.newArrayListWithCapacity(batchInputs.size()); final Map<String, ProcessingContext> inputMap = Maps.newHashMapWithExpectedSize(batchInputs.size()); for (ProcessingContext input : batchInputs) { String path = input.getFile().getPath(); paths.add(path); inputMap.put(path, input); } JdtParser.Handler handler = new JdtParser.Handler() { @Override public void handleParsedUnit(String path, CompilationUnit unit) { ProcessingContext input = inputMap.get(path); try { String source = FileUtil.readFile(input.getFile()); processCompiledSource(input, source, unit); batchInputs.remove(input); } catch (IOException e) { ErrorUtil.error(e.getMessage()); } } }; logger.finest("Processing batch of size " + batchInputs.size()); parser.parseFiles(paths, handler, Options.getSourceVersion()); // Any remaining files in batchFiles has some kind of error. for (ProcessingContext input : batchInputs) { handleError(input); } batchInputs.clear(); }
From source file:com.edmunds.etm.loadbalancer.impl.LoadBalancerDataAccessService.java
private Map<String, AvailabilityStatus> readAvailabilityStatusIncrementally(List<String> serverNames) { Map<String, AvailabilityStatus> statusMap = Maps.newHashMapWithExpectedSize(serverNames.size()); for (String name : serverNames) { try {// ww w .ja v a 2s. co m Map<String, AvailabilityStatus> result; result = loadBalancerConnection.getAvailabilityStatus(Collections.singletonList(name)); AvailabilityStatus status = result.get(name); if (status != null) { statusMap.put(name, status); } } catch (VirtualServerNotFoundException e) { logger.error(String.format("Availability status not found for server %s", name), e); } catch (RemoteException e) { logger.error("Unable to read virtual server status", e); } } return statusMap; }
From source file:cpw.mods.fml.common.asm.transformers.deobf.FMLDeobfuscatingRemapper.java
public void setupLoadOnly(String deobfFileName, boolean loadAll) { try {//from w ww . j a va2 s. c o m File mapData = new File(deobfFileName); LZMAInputSupplier zis = new LZMAInputSupplier(new FileInputStream(mapData)); InputSupplier<InputStreamReader> srgSupplier = CharStreams.newReaderSupplier(zis, Charsets.UTF_8); List<String> srgList = CharStreams.readLines(srgSupplier); rawMethodMaps = Maps.newHashMap(); rawFieldMaps = Maps.newHashMap(); Builder<String, String> builder = ImmutableBiMap.<String, String>builder(); Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults(); for (String line : srgList) { String[] parts = Iterables.toArray(splitter.split(line), String.class); String typ = parts[0]; if ("CL".equals(typ)) { parseClass(builder, parts); } else if ("MD".equals(typ) && loadAll) { parseMethod(parts); } else if ("FD".equals(typ) && loadAll) { parseField(parts); } } classNameBiMap = builder.build(); } catch (IOException ioe) { FMLRelaunchLog.log(Level.ERROR, "An error occurred loading the deobfuscation map data", ioe); } methodNameMaps = Maps.newHashMapWithExpectedSize(rawMethodMaps.size()); fieldNameMaps = Maps.newHashMapWithExpectedSize(rawFieldMaps.size()); }
From source file:com.j2swift.pipeline.FileProcessor.java
private void processBatch() { if (batchInputs.isEmpty()) { return;//from w w w. j a v a 2 s . co m } List<String> paths = Lists.newArrayListWithCapacity(batchInputs.size()); final Map<String, ProcessingContext> inputMap = Maps.newHashMapWithExpectedSize(batchInputs.size()); for (ProcessingContext input : batchInputs) { String path = input.getFile().getPath(); paths.add(path); inputMap.put(path, input); } JdtParser.Handler handler = new JdtParser.Handler() { @Override public void handleParsedUnit(String path, CompilationUnit unit) { ProcessingContext input = inputMap.get(path); try { String source = FileUtil.readFile(input.getFile()); processCompiledSource(input, source, unit); batchInputs.remove(input); } catch (IOException e) { ErrorUtil.error(e.getMessage()); } } }; logger.finest("Processing batch of size " + batchInputs.size()); parser.parseFiles(paths, handler); // Any remaining files in batchFiles has some kind of error. for (ProcessingContext input : batchInputs) { handleError(input); } batchInputs.clear(); }