List of usage examples for com.google.common.collect Maps newLinkedHashMap
public static <K, V> LinkedHashMap<K, V> newLinkedHashMap()
From source file:com.google.gerrit.server.change.ListReviewers.java
@Override public Object apply(ChangeResource rsrc) throws BadRequestException, OrmException { Map<Account.Id, ReviewerResource> reviewers = Maps.newLinkedHashMap(); ReviewDb db = dbProvider.get();//from w w w . j av a 2s . com Change.Id changeId = rsrc.getChange().getId(); for (PatchSetApproval patchSetApproval : db.patchSetApprovals().byChange(changeId)) { Account.Id accountId = patchSetApproval.getAccountId(); if (!reviewers.containsKey(accountId)) { reviewers.put(accountId, resourceFactory.create(rsrc, accountId)); } } return json.format(reviewers.values()); }
From source file:tech.mcprison.prison.util.Scoreboard.java
public Scoreboard(String title) { this.scoreboard = Prison.get().getPlatform().getScoreboardManager().getNewScoreboard(); this.title = title; this.scores = Maps.newLinkedHashMap(); this.teams = Lists.newArrayList(); }
From source file:org.gradle.api.internal.tasks.compile.incremental.classpath.DefaultClasspathEntrySnapshotCache.java
@Override public Map<File, ClasspathEntrySnapshot> getClasspathEntrySnapshots(final Map<File, HashCode> fileHashes) { Map<File, ClasspathEntrySnapshot> out = Maps.newLinkedHashMap(); for (Map.Entry<File, HashCode> entry : fileHashes.entrySet()) { ClasspathEntrySnapshotData snapshotData = cache.get(entry.getValue()); if (snapshotData != null) { ClasspathEntrySnapshot snapshot = new ClasspathEntrySnapshot(snapshotData); out.put(entry.getKey(), snapshot); }//from w w w . ja v a2s .co m } return out; }
From source file:com.cloudera.exhibit.core.PivotCalculator.java
public PivotCalculator(Calculator base, List<String> idColumns, List<Key> keys) { this.fc = base; this.ids = idColumns; this.keys = Maps.newLinkedHashMap(); for (Key key : keys) { this.keys.put(key.name, key.levels); }// w w w . j a v a2 s.co m }
From source file:exec.csharp.statistics.UsageToMicroCommitRatioCalculator.java
public void run() throws IOException { Map<String, Double> usageToHistoryRatio = Maps.newLinkedHashMap(); int numTypesTotal = 0; int numTuplesTotal = 0; int numUsagesTotal = 0; int numTypesDATEV = 0; int numTuplesDATEV = 0; int numUsagesDATEV = 0; int numTypesWith = 0; int numTuplesWith = 0; int numUsagesWith = 0; int numTypesWithout = 0; int numTuplesWithout = 0; int numUsagesWithout = 0; Set<ITypeName> keys = dirMicroCommits.findKeys(); for (ITypeName t : keys) { System.out.printf("reading %s... ", t); List<MicroCommit> histories = dirMicroCommits.readAllZips(t, MicroCommit.class); List<Usage> usages = dirUsages.readAllZips(t, Usage.class); int numTuples = histories.size(); int numUsages = usages.size(); System.out.printf("%d tuples, %d usages\n", numTuples, numUsages); // if (numUsages > 0 && !isDatev(t)) { if (!isDatev(t)) { double ratio = (0.000001 + numUsages) / (1.0 * numTuples); String key = String.format("%s (%d/%d)", t, numUsages, numTuples); usageToHistoryRatio.put(key, ratio); }/*from w w w . j a va 2 s . co m*/ numTypesTotal++; numTuplesTotal += numTuples; numUsagesTotal += numUsages; if (numTuples > 0 && numUsages > 0) { numTypesWith++; numTuplesWith += numTuples; numUsagesWith += numUsages; } else { numTypesWithout++; numTuplesWithout += numTuples; numUsagesWithout += numUsages; if (isDatev(t)) { numTypesDATEV++; numTuplesDATEV += numTuples; numUsagesDATEV += numUsages; } } } System.out.printf("\n\nsummary:\n"); System.out.printf("we have a total of %d start/end tuples and %d usages for %d different types\n", numTuplesTotal, numUsagesTotal, numTypesTotal); System.out.printf("currently, we have both tuples and usages for %d types (%d queries, %d usages)\n", numTypesWith, numTuplesWith, numUsagesWith); System.out.printf("we have tuples, but no usages for %d types (%d queries, %d usages)\n", numTypesWithout, numTuplesWithout, numUsagesWithout); System.out.printf("out of these, %d types (%d queries, %d usages) are related to DATEV\n", numTypesDATEV, numTuplesDATEV, numUsagesDATEV); System.out.printf("\n\nratios (usages/histories):\n"); Map<String, Double> sortedRatios = MapSorter.sort(usageToHistoryRatio); for (String key : sortedRatios.keySet()) { double ratio = sortedRatios.get(key); System.out.printf("%3.2f - %s\n", ratio, key); } }
From source file:com.lastcalc.parsers.web.GetFromElement.java
@Override public ParseResult parse(final TokenList tokens, final int templatePos) { final String what = (String) tokens.get(templatePos + 1); final ElementWrapper ew = (ElementWrapper) tokens.get(templatePos + 3); Object ret;/* w ww. j ava 2 s . c o m*/ if (what.equals("text")) { ret = new Tokenizer.QuotedString(ew.el.text()); } else if (what.equals("attributes")) { final Map<Object, Object> attributes = Maps.newLinkedHashMap(); for (final Attribute x : ew.el.attributes()) { // TODO: Verify that these are single values when tokenized attributes.put(new Tokenizer.QuotedString(x.getKey()), new Tokenizer.QuotedString(x.getValue())); } ret = TokenList.createD(attributes); } else if (what.equals("tag")) { ret = TokenList.createD(ew.el.tagName()); } else return ParseResult.fail(); return ParseResult.success(tokens.replaceWithTokens(templatePos, templatePos + template.size(), ret)); }
From source file:de.iteratec.iteraplan.businesslogic.reports.query.options.TabularReporting.QSealStatus.java
private static Map<SealState, Boolean> initWithDefaultData() { Map<SealState, Boolean> resultMap = Maps.newLinkedHashMap(); for (SealState status : SealState.values()) { resultMap.put(status, Boolean.FALSE); }/*w w w .ja v a 2s . c o m*/ return resultMap; }
From source file:de.flapdoodle.mongoom.mapping.converter.reflection.ClassInformation.java
public static Map<String, Field> getFieldMap(List<Field> fields) { LinkedHashMap<String, Field> ret = Maps.newLinkedHashMap(); for (Field f : fields) { ret.put(f.getName(), f);/*ww w .ja v a 2 s .c o m*/ } return ret; }
From source file:org.opendaylight.controller.netconf.confignetconfconnector.mapping.config.Config.java
public static Map<String, Map<String, Collection<ObjectName>>> getMappedInstances( Set<ObjectName> instancesToMap, Map<String, Map<String, ModuleConfig>> configs) { Multimap<String, ObjectName> moduleToInstances = mapInstancesToModules(instancesToMap); Map<String, Map<String, Collection<ObjectName>>> retVal = Maps.newLinkedHashMap(); for (Entry<String, Map<String, ModuleConfig>> namespaceToModuleToConfigEntry : configs.entrySet()) { Map<String, Collection<ObjectName>> innerRetVal = Maps.newHashMap(); for (Entry<String, ModuleConfig> mbeEntry : namespaceToModuleToConfigEntry.getValue().entrySet()) { String moduleName = mbeEntry.getKey(); Collection<ObjectName> instances = moduleToInstances.get(moduleName); // TODO, this code does not support same module names from different namespaces // Namespace should be present in ObjectName if (instances == null) { continue; }/*w w w .j a v a 2 s. c o m*/ innerRetVal.put(moduleName, instances); } retVal.put(namespaceToModuleToConfigEntry.getKey(), innerRetVal); } return retVal; }
From source file:de.xaniox.heavyspleef.persistence.sql.StatisticAccessor.java
@Override public Map<String, Field> defineSchema() { Map<String, Field> schema = Maps.newLinkedHashMap(); schema.put(ColumnContract.ID, new Field(Type.INTEGER).primaryKey().autoIncrement()); schema.put(ColumnContract.UUID, new Field(Type.CHAR).length(36).unique()); schema.put(ColumnContract.LAST_NAME, new Field(Type.CHAR).length(16)); schema.put(ColumnContract.WINS, new Field(Type.INTEGER)); schema.put(ColumnContract.LOSSES, new Field(Type.INTEGER)); schema.put(ColumnContract.KNOCKOUTS, new Field(Type.INTEGER)); schema.put(ColumnContract.GAMES_PLAYED, new Field(Type.INTEGER)); schema.put(ColumnContract.BLOCKS_BROKEN, new Field(Type.INTEGER)); schema.put(ColumnContract.TIME_PLAYED, new Field(Type.BIGINT)); schema.put(ColumnContract.RATING, new Field(Type.DOUBLE)); return schema; }