List of usage examples for com.google.common.collect Maps newLinkedHashMap
public static <K, V> LinkedHashMap<K, V> newLinkedHashMap()
From source file:com.sk89q.guavabackport.cache.AbstractLoadingCache.java
@Override public ImmutableMap<K, V> getAll(final Iterable<? extends K> keys) throws ExecutionException { final Map<K, V> result = Maps.newLinkedHashMap(); for (final K key : keys) { if (!result.containsKey(key)) { result.put(key, this.get(key)); }//from ww w.j a v a 2 s .com } return ImmutableMap.copyOf(result); }
From source file:com.cloudera.oryx.computation.common.records.csv.CSVSpec.java
public CSVSpec(DataType dataType, String name, DataType type, Object... others) { this.dataType = dataType; this.fields = Maps.newLinkedHashMap(); this.fields.put(name, new CSVFieldSpec(name, 0, type)); for (int i = 0; i < others.length; i += 2) { String n = (String) others[i]; DataType dt = (DataType) others[i + 1]; fields.put(n, new CSVFieldSpec(n, i + 1, dt)); }//from w ww . j av a 2s . c o m }
From source file:com.ning.metrics.collector.processing.db.util.InClauseExpander.java
public InClauseExpander(Iterable<String> elements) { List<String> prefixed = Lists.newArrayList(); Map<String, String> args = Maps.newLinkedHashMap(); int i = 0;/*w w w . ja v a 2s . c om*/ for (String element : elements) { String name = "__InClauseExpander_" + i++; args.put(name, element); prefixed.add(":" + name); } this.args = args; this.expansion = JOINER.join(prefixed); }
From source file:org.nanoframework.core.component.aop.AfterMoreInterceptor.java
@Override public Object invoke(final MethodInvocation invocation) throws Throwable { final AfterMore afterMore = invocation.getMethod().getAnnotation(AfterMore.class); final After[] afters = afterMore.value(); final Map<Method, Object> map = Maps.newLinkedHashMap(); for (After after : afters) { final Method method = after.value().getMethod(MethodNames.AFTER, MethodInvocation.class, Object.class); final Object instance = Globals.get(Injector.class).getInstance(after.value()); map.put(method, instance);// w ww . j a va 2 s . com } Object obj = null; try { return obj = invocation.proceed(); } catch (final Throwable e) { obj = e; throw e; } finally { for (final Iterator<Entry<Method, Object>> iter = map.entrySet().iterator(); iter.hasNext();) { final Entry<Method, Object> entry = iter.next(); entry.getKey().invoke(entry.getValue(), invocation, obj); } } }
From source file:cc.kave.episodes.postprocessor.EpisodesPostprocessor.java
public Map<Integer, Set<Episode>> postprocess(int numbRepos, int freqThresh, double bidirectThresh) { Map<Integer, Set<Episode>> patterns = Maps.newLinkedHashMap(); Map<Integer, Set<Episode>> episodes = parser.parse(numbRepos); Logger.log("Finished parsing the episodes!"); for (Map.Entry<Integer, Set<Episode>> entry : episodes.entrySet()) { if (entry.getKey() == 1) { continue; }//w w w. j a va2s . c o m Logger.log("Postprocessing %d-node episodes!", entry.getKey()); Map<Set<Fact>, Episode> filtered = Maps.newLinkedHashMap(); for (Episode ep : entry.getValue()) { int freq = ep.getFrequency(); double bidirect = ep.getBidirectMeasure(); if ((freq >= freqThresh) && (bidirect >= bidirectThresh)) { if (filtered.containsKey(ep.getEvents())) { Set<Fact> events = ep.getEvents(); Episode filterEp = filtered.get(events); Episode repEp = getRepresentative(filterEp, ep, freqThresh, bidirectThresh); if (repEp.equals(ep)) { filtered.put(events, repEp); } } else { filtered.put(ep.getEvents(), ep); } } } Set<Episode> repEpisodes = getfilteredEp(filtered); patterns.put(entry.getKey(), repEpisodes); } return patterns; }
From source file:org.sonatype.nexus.ldap.internal.MockLdapConfigurationSource.java
public MockLdapConfigurationSource() { this.configuration = Maps.newLinkedHashMap(); }
From source file:clocker.mesos.location.framework.MesosFrameworkLocation.java
public MesosFrameworkLocation() { this(Maps.newLinkedHashMap()); }
From source file:exec.csharp.evaluation.impl.AnalysisOfNoise.java
@Override public void run() { results = Maps.newLinkedHashMap(); for (QueryMode qm : QueryMode.values()) { results.put(qm, Maps.newLinkedHashMap()); for (NoiseMode nm : NoiseMode.values()) { results.get(qm).put(nm, new BoxplotData()); }// w ww . j av a2 s.c o m } eval.run(this); }
From source file:org.apache.drill.exec.record.SchemaUtil.java
/** * Returns the merger of schemas. The merged schema will include the union all columns. If there is a type conflict * between columns with the same schemapath but different types, the merged schema will contain a Union type. * @param schemas//from w ww . ja v a 2s . com * @return */ public static BatchSchema mergeSchemas(BatchSchema... schemas) { Map<SchemaPath, Set<MinorType>> typeSetMap = Maps.newLinkedHashMap(); for (BatchSchema s : schemas) { for (MaterializedField field : s) { SchemaPath path = SchemaPath.getSimplePath(field.getPath()); Set<MinorType> currentTypes = typeSetMap.get(path); if (currentTypes == null) { currentTypes = Sets.newHashSet(); typeSetMap.put(path, currentTypes); } MinorType newType = field.getType().getMinorType(); if (newType == MinorType.MAP || newType == MinorType.LIST) { throw new RuntimeException( "Schema change not currently supported for schemas with complex types"); } if (newType == MinorType.UNION) { for (MinorType subType : field.getType().getSubTypeList()) { currentTypes.add(subType); } } else { currentTypes.add(newType); } } } List<MaterializedField> fields = Lists.newArrayList(); for (SchemaPath path : typeSetMap.keySet()) { Set<MinorType> types = typeSetMap.get(path); if (types.size() > 1) { MajorType.Builder builder = MajorType.newBuilder().setMinorType(MinorType.UNION) .setMode(DataMode.OPTIONAL); for (MinorType t : types) { builder.addSubType(t); } MaterializedField field = MaterializedField.create(path.getAsUnescapedPath(), builder.build()); fields.add(field); } else { MaterializedField field = MaterializedField.create(path.getAsUnescapedPath(), Types.optional(types.iterator().next())); fields.add(field); } } SchemaBuilder schemaBuilder = new SchemaBuilder(); BatchSchema s = schemaBuilder.addFields(fields).setSelectionVectorMode(schemas[0].getSelectionVectorMode()) .build(); return s; }
From source file:org.gradle.jvm.internal.resolve.DefaultVariantsMetaData.java
public static VariantsMetaData extractFrom(BinarySpec binarySpec, ModelSchema<?> binarySpecSchema) { Map<String, Object> variants = Maps.newLinkedHashMap(); ImmutableMap.Builder<String, ModelType<?>> dimensionTypesBuilder = ImmutableMap.builder(); if (binarySpecSchema instanceof StructSchema) { VariantAspect variantAspect = ((StructSchema<?>) binarySpecSchema).getAspect(VariantAspect.class); if (variantAspect != null) { for (ModelProperty<?> property : variantAspect.getDimensions()) { // note: it's not the role of this class to validate that the annotation is properly used, that // is to say only on a getter returning String or a Named instance, so we trust the result of // the call Object value = property.getPropertyValue(binarySpec); variants.put(property.getName(), value); dimensionTypesBuilder.put(property.getName(), property.getType()); }/*from w w w . j a v a 2s . c om*/ } } return new DefaultVariantsMetaData(Collections.unmodifiableMap(variants), dimensionTypesBuilder.build()); }