List of usage examples for com.google.common.collect Maps newHashMapWithExpectedSize
public static <K, V> HashMap<K, V> newHashMapWithExpectedSize(int expectedSize)
From source file:org.gradle.plugins.ide.idea.model.ProjectLibrary.java
public void addToNode(Node parentNode, PathFactory pathFactory) { Map<String, Object> libraryAttributes = Maps.newHashMapWithExpectedSize(2); libraryAttributes.put("name", name); if (type != null) { libraryAttributes.put("type", type); }/*from w w w. java 2s . co m*/ Node libraryNode = parentNode.appendNode("library", libraryAttributes); Node classesNode = libraryNode.appendNode("CLASSES"); for (File file : classes) { Map<String, Object> attributes = Maps.newHashMapWithExpectedSize(1); attributes.put("url", pathFactory.path(file).getUrl()); classesNode.appendNode("root", attributes); } Node javadocNode = libraryNode.appendNode("JAVADOC"); for (File file : javadoc) { Map<String, Object> attributes = Maps.newHashMapWithExpectedSize(1); attributes.put("url", pathFactory.path(file).getUrl()); javadocNode.appendNode("root", attributes); } Node sourcesNode = libraryNode.appendNode("SOURCES"); for (File file : sources) { Map<String, Object> attributes = Maps.newHashMapWithExpectedSize(1); attributes.put("url", pathFactory.path(file).getUrl()); sourcesNode.appendNode("root", attributes); } if (compilerClasspath.size() > 0) { Node properties = libraryNode.appendNode("properties"); Node compilerClasspathNode = properties.appendNode("compiler-classpath"); for (File file : compilerClasspath) { Map<String, Object> attributes = Maps.newHashMapWithExpectedSize(1); attributes.put("url", pathFactory.path(file, true).getUrl()); compilerClasspathNode.appendNode("root", attributes); } } }
From source file:org.elasticsearch.common.cli.CheckFileCommand.java
@Override public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { Path[] paths = pathsForPermissionsCheck(settings, env); if (paths == null || paths.length == 0) { return doExecute(settings, env); }//from w ww. j a va2 s . co m Map<Path, Set<PosixFilePermission>> permissions = Maps.newHashMapWithExpectedSize(paths.length); Map<Path, String> owners = Maps.newHashMapWithExpectedSize(paths.length); Map<Path, String> groups = Maps.newHashMapWithExpectedSize(paths.length); if (paths != null && paths.length > 0) { for (Path path : paths) { try { boolean supportsPosixPermissions = Files.getFileStore(path) .supportsFileAttributeView(PosixFileAttributeView.class); if (supportsPosixPermissions) { PosixFileAttributes attributes = Files.readAttributes(path, PosixFileAttributes.class); permissions.put(path, attributes.permissions()); owners.put(path, attributes.owner().getName()); groups.put(path, attributes.group().getName()); } } catch (IOException e) { // silently swallow if not supported, no need to log things } } } CliTool.ExitStatus status = doExecute(settings, env); // check if permissions differ for (Map.Entry<Path, Set<PosixFilePermission>> entry : permissions.entrySet()) { if (!Files.exists(entry.getKey())) { continue; } Set<PosixFilePermission> permissionsBeforeWrite = entry.getValue(); Set<PosixFilePermission> permissionsAfterWrite = Files.getPosixFilePermissions(entry.getKey()); if (!permissionsBeforeWrite.equals(permissionsAfterWrite)) { terminal.printWarn("The file permissions of [%s] have changed from [%s] to [%s]", entry.getKey(), PosixFilePermissions.toString(permissionsBeforeWrite), PosixFilePermissions.toString(permissionsAfterWrite)); terminal.printWarn( "Please ensure that the user account running Elasticsearch has read access to this file!"); } } // check if owner differs for (Map.Entry<Path, String> entry : owners.entrySet()) { if (!Files.exists(entry.getKey())) { continue; } String ownerBeforeWrite = entry.getValue(); String ownerAfterWrite = Files.getOwner(entry.getKey()).getName(); if (!ownerAfterWrite.equals(ownerBeforeWrite)) { terminal.printWarn("WARN: Owner of file [%s] used to be [%s], but now is [%s]", entry.getKey(), ownerBeforeWrite, ownerAfterWrite); } } // check if group differs for (Map.Entry<Path, String> entry : groups.entrySet()) { if (!Files.exists(entry.getKey())) { continue; } String groupBeforeWrite = entry.getValue(); String groupAfterWrite = Files.readAttributes(entry.getKey(), PosixFileAttributes.class).group() .getName(); if (!groupAfterWrite.equals(groupBeforeWrite)) { terminal.printWarn("WARN: Group of file [%s] used to be [%s], but now is [%s]", entry.getKey(), groupBeforeWrite, groupAfterWrite); } } return status; }
From source file:org.opennms.netmgt.measurements.api.FetchResults.java
/** * Used when applying filters./*from w ww . j a v a 2 s . c o m*/ */ public FetchResults(final RowSortedTable<Long, String, Double> table, final long step, final Map<String, Object> constants) { Preconditions.checkNotNull(table, "table argument"); Preconditions.checkNotNull(constants, "constants argument"); m_step = step; m_constants = constants; if (table.size() < 1) { // No rows m_timestamps = new long[0]; m_columns = Maps.newHashMapWithExpectedSize(0); return; } Long firstIndex = null; Long lastIndex = null; Map<Long, Double> timestampsByIndex = table.column(Filter.TIMESTAMP_COLUMN_NAME); for (Long index : timestampsByIndex.keySet()) { if (firstIndex == null) { firstIndex = index; } else { Preconditions.checkState(index == (lastIndex + 1), "filter timestamps must be contiguous"); } lastIndex = index; } int numRows = (int) (lastIndex - firstIndex) + 1; m_columns = Maps.newLinkedHashMap(); // preserve ordering m_timestamps = new long[numRows]; for (int k = 0; k < numRows; k++) { for (String columnName : table.columnKeySet()) { Double value = table.get(Long.valueOf(k), columnName); if (Filter.TIMESTAMP_COLUMN_NAME.equals(columnName)) { Preconditions.checkNotNull(value, "filter timestamps must be contiguous"); m_timestamps[k] = value.longValue(); } else { double column[] = m_columns.get(columnName); if (column == null) { column = new double[numRows]; m_columns.put(columnName, column); } if (value == null) { column[k] = Double.NaN; } else { column[k] = value; } } } } }
From source file:com.opengamma.engine.target.resolver.AbstractIdentifierResolver.java
public static Map<ObjectId, UniqueId> resolveObjectIdsMultiThread(final PoolExecutor executor, final IdentifierResolver resolver, final Collection<ObjectId> identifiers, final VersionCorrection versionCorrection) { final PoolExecutor.Service<Void> jobs = executor.createService(null); final Map<ObjectId, UniqueId> result = Maps.newHashMapWithExpectedSize(identifiers.size()); for (final ObjectId identifier : identifiers) { jobs.execute(new Runnable() { @Override/*w w w.j a v a 2 s . c o m*/ public void run() { final UniqueId uid = resolver.resolveObjectId(identifier, versionCorrection); if (uid != null) { synchronized (result) { result.put(identifier, uid); } } } }); } try { jobs.join(); } catch (InterruptedException e) { throw new OpenGammaRuntimeException("Interrupted", e); } return result; }
From source file:tachyon.master.file.journal.DependencyEntry.java
@Override public Map<String, Object> getParameters() { Map<String, Object> parameters = Maps.newHashMapWithExpectedSize(14); parameters.put("id", mId); parameters.put("parentFiles", mParentFiles); parameters.put("childrenFiles", mChildrenFiles); parameters.put("commandPrefix", mCommandPrefix); parameters.put("data", mData); parameters.put("comment", mComment); parameters.put("framework", mFramework); parameters.put("frameworkVersion", mFrameworkVersion); parameters.put("dependencyType", mDependencyType); parameters.put("parentDependencies", mParentDependencies); parameters.put("childrenDependencies", mChildrenDependencies); parameters.put("creationTimeMs", mCreationTimeMs); parameters.put("uncheckpointedFiles", mUncheckpointedFiles); parameters.put("lostFileIds", mLostFileIds); return parameters; }
From source file:com.android.ide.common.layout.relative.DeletionHandler.java
/** * Creates a new {@link DeletionHandler} * * @param deleted the deleted nodes//from w w w . j a v a 2 s . com * @param moved nodes that were moved (e.g. deleted, but also inserted elsewhere) * @param layout the parent layout of the deleted nodes */ public DeletionHandler(@NonNull List<INode> deleted, @NonNull List<INode> moved, @NonNull INode layout) { mDeleted = deleted; mMoved = moved; mLayout = layout; mChildren = mLayout.getChildren(); mNodeMap = Maps.newHashMapWithExpectedSize(mChildren.length); for (INode child : mChildren) { String id = child.getStringAttr(ANDROID_URI, ATTR_ID); if (id != null) { mNodeMap.put(stripIdPrefix(id), child); } } mDeletedIds = Sets.newHashSetWithExpectedSize(mDeleted.size()); for (INode node : mDeleted) { String id = node.getStringAttr(ANDROID_URI, ATTR_ID); if (id != null) { mDeletedIds.add(stripIdPrefix(id)); } } // Any widgets that remain (e.g. typically because they were moved) should // keep their incoming dependencies for (INode node : mMoved) { String id = node.getStringAttr(ANDROID_URI, ATTR_ID); if (id != null) { mDeletedIds.remove(stripIdPrefix(id)); } } }
From source file:org.apache.brooklyn.rest.util.FormMapProvider.java
@Override public Map<String, Object> readFrom(Class<Map<String, Object>> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { FormMultivaluedMapProvider delegate = new FormMultivaluedMapProvider(); MultivaluedMap<String, String> multi = new MultivaluedMapImpl(); multi = delegate.readFrom(multi, mediaType, entityStream); Map<String, Object> map = Maps.newHashMapWithExpectedSize(multi.keySet().size()); for (String key : multi.keySet()) { List<String> value = multi.get(key); if (value.size() > 1) { map.put(key, Lists.newArrayList(value)); } else if (value.size() == 1) { map.put(key, Iterables.getOnlyElement(value)); } else {// w ww . j av a2 s . c o m map.put(key, null); } } return map; }
From source file:org.pentaho.di.trans.dataservice.optimization.paramgen.ui.ParameterGenerationModel.java
protected void updateParameterMap() { ImmutableList<PushDownOptimizationMeta> list = dialogModel .getPushDownOptimizations(ParameterGeneration.class); Map<String, PushDownOptimizationMeta> map = Maps.newHashMapWithExpectedSize(list.size()); for (PushDownOptimizationMeta meta : list) { ParameterGeneration parameterGeneration = (ParameterGeneration) meta.getType(); String parameterName = parameterGeneration.getParameterName(); // If parameter already exists, add a unique suffix int offset = 0; while (map.containsKey(parameterName)) { parameterName = String.format("%s_%d", parameterGeneration.getParameterName(), ++offset); }//from w w w . j ava 2 s .c om if (offset > 0) { parameterGeneration.setParameterName(parameterName); } map.put(parameterName, meta); } setParameterMap(map); if (!map.containsKey(getSelectedParameter())) { setSelectedParameter(null); } }
From source file:org.elasticsearch.search.suggest.completion.old.CompletionSuggester.java
@Override protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute( String name, CompletionSuggestionContext suggestionContext, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { if (suggestionContext.fieldType() == null) { throw new ElasticsearchException( "Field [" + suggestionContext.getField() + "] is not a completion suggest field"); }/* w w w. j av a2s.c om*/ final IndexReader indexReader = searcher.getIndexReader(); CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize()); spare.copyUTF8Bytes(suggestionContext.getText()); CompletionSuggestion.Entry completionSuggestEntry = new CompletionSuggestion.Entry( new StringText(spare.toString()), 0, spare.length()); completionSuggestion.addTerm(completionSuggestEntry); String fieldName = suggestionContext.getField(); Map<String, CompletionSuggestion.Entry.Option> results = Maps .newHashMapWithExpectedSize(indexReader.leaves().size() * suggestionContext.getSize()); for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { LeafReader atomicReader = atomicReaderContext.reader(); Terms terms = atomicReader.fields().terms(fieldName); if (terms instanceof Completion090PostingsFormat.CompletionTerms) { final Completion090PostingsFormat.CompletionTerms lookupTerms = (Completion090PostingsFormat.CompletionTerms) terms; final Lookup lookup = lookupTerms.getLookup(suggestionContext.fieldType(), suggestionContext); if (lookup == null) { // we don't have a lookup for this segment.. this might be possible if a merge dropped all // docs from the segment that had a value in this segment. continue; } List<Lookup.LookupResult> lookupResults = lookup.lookup(spare.get(), false, suggestionContext.getSize()); for (Lookup.LookupResult res : lookupResults) { final String key = res.key.toString(); final float score = res.value; final CompletionSuggestion.Entry.Option value = results.get(key); if (value == null) { final CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( new StringText(key), score, res.payload == null ? null : new BytesArray(res.payload)); results.put(key, option); } else if (value.getScore() < score) { value.setScore(score); value.setPayload(res.payload == null ? null : new BytesArray(res.payload)); } } } } final List<CompletionSuggestion.Entry.Option> options = new ArrayList<>(results.values()); CollectionUtil.introSort(options, scoreComparator); int optionCount = Math.min(suggestionContext.getSize(), options.size()); for (int i = 0; i < optionCount; i++) { completionSuggestEntry.addOption(options.get(i)); } return completionSuggestion; }
From source file:org.n52.sos.config.sqlite.entities.MultilingualStringSettingValue.java
@Override public SettingValue<MultilingualString> setValue(MultilingualString value) { if (value == null) { this.value = null; } else {//from w w w .j a v a 2 s . c om this.value = Maps.newHashMapWithExpectedSize(value.size()); for (Locale locale : value.getLocales()) { this.value.put(locale.toString(), value.getLocalization(locale).get().getText()); } } return this; }