List of usage examples for com.google.common.collect Maps newLinkedHashMap
public static <K, V> LinkedHashMap<K, V> newLinkedHashMap()
From source file:io.druid.cli.convert.PrefixRename.java
@Override public Map<String, String> convert(Properties properties) { if (!ran.getAndSet(true)) { Map<String, String> retVal = Maps.newLinkedHashMap(); for (String property : properties.stringPropertyNames()) { if (property.startsWith(prefix)) { retVal.put(property.replace(prefix, outputPrefix), properties.getProperty(property)); }/*w ww. j a v a 2s . c o m*/ } return retVal; } return ImmutableMap.of(); }
From source file:org.apache.brooklyn.core.mgmt.persist.DeserializingClassRenamesProvider.java
private static Map<String, String> loadDeserializingClassRenamesCache() { InputStream resource = new ResourceUtils(DeserializingClassRenamesProvider.class) .getResourceFromUrl(DESERIALIZING_CLASS_RENAMES_PROPERTIES_PATH); if (resource != null) { try {//from w w w.j a v a 2s . co m Properties props = new Properties(); props.load(resource); Map<String, String> result = Maps.newLinkedHashMap(); for (Enumeration<?> iter = props.propertyNames(); iter.hasMoreElements();) { String key = (String) iter.nextElement(); String value = props.getProperty(key); result.put(key, value); } return result; } catch (IOException e) { throw Exceptions.propagate(e); } finally { Streams.closeQuietly(resource); } } else { return ImmutableMap.<String, String>of(); } }
From source file:com.github.nethad.clustermeister.api.impl.ConfigurationUtil.java
/** * This helps reading lists form the configuration that consist of * 'named objects', e.g. (YAML)://from w ww. j a va 2 s . c o m * * <pre> * list: * - name1: * key1: value * key2: value2 * - name2: * key1: value3 * key3: value4 * ... * </pre> * * More specifically it reduces a List<Map<String, Map<String, * String>>> as produced by above example to a Map<String, * Map<String, String>> like this (Java syntax, referring to above * example): * * <pre> * [ * name1 => [ * key1 => value, * key2 => value2 * ], * name2 => [ * key1 => value3, * key3 => value4 * ], * ... * ] * </pre> * * @param list the list to reduce. * @param errorMessage * Custom error message to add to exception in case of the * list not being convertible. * @return A map reduced as described above. * @throws IllegalArgumentException if the list can not be converted in * this manner. */ public static Map<String, Map<String, String>> reduceObjectList(List<Object> list, String errorMessage) { try { Map<String, Map<String, String>> result = Maps.newLinkedHashMap(); List<Map<String, Map<String, String>>> mapList = Lists.transform(list, new Function<Object, Map<String, Map<String, String>>>() { @Override public Map apply(Object input) { return (Map<String, Map<String, String>>) input; } }); for (Map<String, Map<String, String>> map : mapList) { for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) { String key = entry.getKey(); Map<String, String> value = entry.getValue(); for (Map.Entry<String, String> valueEntry : value.entrySet()) { Object valueValue = valueEntry.getValue(); valueEntry.setValue(String.valueOf(valueValue)); } result.put(key, value); } } return result; } catch (ClassCastException ex) { throw new IllegalArgumentException(errorMessage, ex); } }
From source file:org.terasology.persistence.typeHandling.mathTypes.BorderTypeHandler.java
@Override public PersistedData serialize(Border value, SerializationContext context) { Map<String, PersistedData> map = Maps.newLinkedHashMap(); map.put(LEFT_FIELD, context.create(value.getLeft())); map.put(RIGHT_FIELD, context.create(value.getRight())); map.put(TOP_FIELD, context.create(value.getTop())); map.put(BOTTOM_FIELD, context.create(value.getBottom())); return context.create(map); }
From source file:elaborate.editor.resources.AboutResource.java
@GET @APIDesc("Get version info") @Produces(UTF8MediaType.APPLICATION_JSON) public Map<String, String> getAbout() { Map<String, String> data = Maps.newLinkedHashMap(); // data.put("version", Configuration.instance().getStringSetting("version", "[undefined]")); data.put("version", getProperty("version")); data.put("commitId", getProperty("commitId")); data.put("scmBranch", getProperty("scmBranch")); data.put("buildDate", getProperty("buildDate")); data.put("publicationBackendBuild", getPublicationProperty("commitId")); data.put("publicationBackendBuildDate", getPublicationProperty("buildDate")); data.put("startTime", System.getProperty("application.starttime")); return data;//from w ww.j a va2 s .com }
From source file:com.google.gdata.model.AdaptationRegistryFactory.java
/** * Create a union of the base attribute map + any adaptor attribute maps. * The result is a map from attribute names to the most appropriate * adaptation and attribute key for that name. This map will contain only * attributes that are not defined in the base transform and have the same * datatype in all adaptations in which they appear. * * <p>If we find multiple incompatible attributes of the same name, we log a * warning and leave them out of the union map. This means that during * parsing the incompatible attributes will be parsed as if they were * undeclared, and later adapted to the correct datatype during resolution. *//* w w w .j a va 2 s . co m*/ private static Map<QName, AttributeKey<?>> unionAttributes(Schema schema, ElementTransform transform) { Map<QName, AttributeKey<?>> union = Maps.newLinkedHashMap(); Set<QName> base = getAttributeNames(transform); Set<QName> invalid = Sets.newHashSet(); for (ElementKey<?, ?> adaptorKey : transform.getAdaptations().values()) { // We get the adaptor transform so we can access its attributes. ElementTransform adaptor = schema.getTransform(null, adaptorKey, null); if (adaptor == null) { throw new IllegalStateException("Invalid adaptor key " + adaptorKey); } for (AttributeInfo info : adaptor.getAttributes().values()) { AttributeKey<?> key = info.key; QName id = key.getId(); // Skip attributes contained in the base transform. if (base.contains(id)) { continue; } // Skip any attributes that we already know are invalid. if (invalid.contains(id)) { continue; } AttributeKey<?> existing = union.get(id); if (existing != null) { // Check that multiple attributes with the same ID are compatible. if (!checkCompatible(existing, key)) { union.remove(id); invalid.add(id); } } else { union.put(id, key); } } } // Return an immutable copy of the union of attributes. If this is empty // it will be a reference to the empty immutable map. return ImmutableMap.copyOf(union); }
From source file:org.artifactory.descriptor.repo.jaxb.LocalRepositoriesMapAdapter.java
@Override public Map<String, LocalRepoDescriptor> unmarshal(Wrappper wrapper) throws Exception { Map<String, LocalRepoDescriptor> localRepositoriesMap = Maps.newLinkedHashMap(); for (LocalRepoDescriptor repository : wrapper.getList()) { String key = repository.getKey(); LocalRepoDescriptor repo = localRepositoriesMap.put(key, repository); //Test for repositories with the same key if (repo != null) { //Throw an error since jaxb swallows exceptions throw new Error("Duplicate repository key in configuration: " + key + "."); }//from www . ja v a 2s . c om } return localRepositoriesMap; }
From source file:org.apache.kylin.metadata.model.TableRef.java
TableRef(DataModelDesc model, String alias, TableDesc table) { this.model = model; this.modelName = model.getName(); this.alias = alias; this.table = table; this.columns = Maps.newLinkedHashMap(); for (ColumnDesc col : table.getColumns()) { columns.put(col.getName(), new TblColRef(this, col)); }//from w ww.j a va2 s . c o m }
From source file:brooklyn.location.cloud.CloudMachineLocation.java
public CloudMachineLocation() { this(Maps.newLinkedHashMap()); }
From source file:dagger.internal.codegen.writer.TypeWriter.java
TypeWriter(ClassName name) { this.name = name; this.implementedTypes = Lists.newArrayList(); this.methodWriters = Lists.newArrayList(); this.nestedTypeWriters = Lists.newArrayList(); this.fieldWriters = Maps.newLinkedHashMap(); }