List of usage examples for com.google.common.collect Maps newHashMap
public static <K, V> HashMap<K, V> newHashMap(Map<? extends K, ? extends V> map)
From source file:com.google.caliper.runner.CaliperRc.java
static CaliperRc create(Map<String, String> overrides, Map<String, String> defaults) { Map<String, String> map = Maps.newHashMap(defaults); map.putAll(overrides); // overwrite and augment Iterables.removeIf(map.values(), Predicates.equalTo("")); return new CaliperRc(map); }
From source file:org.thiesen.collections.map.impl.MutableHashMap.java
public static <K, V> MutableHashMap<K, V> copyOf(final IMap<? extends K, ? extends V> entries) { return new MutableHashMap<K, V>(Maps.newHashMap(entries.asMapView())); }
From source file:de.flapdoodle.logparser.collections.Collections.java
public static <K, V> Map<K, V> join(Map<K, V> a, Map<K, V> b) { Map<K, V> ret = Maps.newHashMap(a); ret.putAll(b);/*from ww w . j a va 2s. com*/ if (ret.size() != a.size() + b.size()) { throw new IllegalArgumentException( "Map contains same keys: " + Sets.intersection(a.keySet(), b.keySet())); } return ret; }
From source file:de.tu_berlin.dima.oligos.stat.distribution.histogram.Histograms.java
public static <T> Histogram<T> combineHistograms(Histogram<T> hist, Map<T, Long> mostFrequent, Operator<T> operator) throws SQLException { Histogram<T> histogram = new CustomHistogram<T>(operator); // Make a deep copy to keep function side effect free mostFrequent = Maps.newHashMap(mostFrequent); // generate histogram with one sized buckets if (hist.isEmpty()) { for (Entry<T, Long> e : mostFrequent.entrySet()) { T value = e.getKey();/*from w w w . j a v a 2s. co m*/ long count = e.getValue(); histogram.add(value, value, count); } } for (Bucket<T> bucket : hist) { SortedSet<T> elemsInRange = collectElementsInRange(bucket, mostFrequent, operator); // sum the most frequent elements in range long sumInRange = 0l; for (T e : elemsInRange) { sumInRange += mostFrequent.get(e); } // adapt the frequency count of the current bucket // i.e. subtract the number of most frequent elements bucket = new Bucket<T>(bucket.getLowerBound(), bucket.getUpperBound(), bucket.getFrequency() - sumInRange); // adapt the bucket // i.e. change the boundaries, introduce new buckets, ... for (T elem : elemsInRange) { T lBound = bucket.getLowerBound(); T uBound = bucket.getUpperBound(); long elemCnt = mostFrequent.get(elem); // bucket has exact one element and this is the most frequent if (lBound.equals(uBound) && lBound.equals(elem)) { histogram.add(lBound, uBound, elemCnt); mostFrequent.remove(elem); } // the most frequent element is the lower bound of the current bucket else if (lBound.equals(elem)) { histogram.add(lBound, elem, elemCnt); bucket = new Bucket<T>(operator.increment(lBound), uBound, bucket.getFrequency()); mostFrequent.remove(elem); } // the most frequent element is the upper bound of the current bucket else if (uBound.equals(elem)) { histogram.add(lBound, operator.decrement(uBound), bucket.getFrequency()); histogram.add(elem, elem, elemCnt); mostFrequent.remove(elem); } // common case, that the most frequent value is within the current bucket else { // shrink the current bucket and add shrunk bucket and most frequent element to histogram long range = operator.range(lBound, uBound); long lowerSize = operator.range(lBound, elem); long lowerFreq = lowerSize * bucket.getFrequency() / range; histogram.add(lBound, operator.decrement(elem), lowerFreq); histogram.add(elem, elem, elemCnt); long upperSize = operator.range(operator.increment(elem), uBound); long upperFreq = upperSize * bucket.getFrequency() / range; bucket = new Bucket<T>(operator.increment(elem), uBound, upperFreq); mostFrequent.remove(elem); } } histogram.add(bucket.getLowerBound(), bucket.getUpperBound(), bucket.getFrequency()); } return histogram; }
From source file:com.github.cherimojava.data.mongo.io.EntityTypeMap.java
private static Map<BsonType, Class<?>> replacement(Class<? extends Entity> clazz) { Map<BsonType, Class<?>> repl = Maps.newHashMap(replacements); repl.put(BsonType.DOCUMENT, clazz);// w w w . ja va2 s . co m return repl; }
From source file:tech.beshu.ror.mocks.RequestContextMock.java
public static __old_RequestContext mockedRequestContext(String user, String pass) { __old_RequestContext mock = mock(__old_RequestContext.class); when(mock.getHeaders()).thenReturn(Maps.newHashMap(ImmutableMap.<String, String>builder() .put("Authorization", "Basic " + Base64.getEncoder().encodeToString((user + ":" + pass).getBytes())) .build()));/*from w w w . ja v a 2 s . co m*/ when(mock.getLoggedInUser()).thenReturn(Optional.of(new LoggedUser(user))); return mock; }
From source file:org.elasticsearch.cache.recycler.MockPageCacheRecycler.java
public static void ensureAllPagesAreReleased() throws Exception { final Map<Object, Throwable> masterCopy = Maps.newHashMap(ACQUIRED_PAGES); if (!masterCopy.isEmpty()) { // not empty, we might be executing on a shared cluster that keeps on obtaining // and releasing pages, lets make sure that after a reasonable timeout, all master // copy (snapshot) have been released boolean success = ESTestCase.awaitBusy(new Predicate<Object>() { @Override/* ww w. j a v a 2 s. c o m*/ public boolean apply(Object input) { return Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty(); } }); if (!success) { masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet()); ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on if (!masterCopy.isEmpty()) { final Throwable t = masterCopy.entrySet().iterator().next().getValue(); throw new RuntimeException(masterCopy.size() + " pages have not been released", t); } } } }
From source file:com.enonic.cms.core.portal.datasource.xml.DataSourceElement.java
public DataSourceElement(final DataSourceElement source) { this.name = source.name; this.cache = source.cache; this.resultElement = source.resultElement; this.condition = source.condition; this.parameters = Maps.newHashMap(source.parameters); }
From source file:run.ejb.entite.util.runsense.UtilRsns.java
/** * supprssion - et espace pour adresse/*from www . j a va 2 s .co m*/ * @param v * @return */ public static String invDcdVille(String v) { Map<String, List<String>> mvreg = Maps.newHashMap(Variable.getRegion()); String[] chrsup = new String[2]; chrsup[0] = " "; chrsup[1] = "-"; for (String chr : chrsup) if (v.contains(chr)) { int iof = v.indexOf(chr); char c = Character.toUpperCase(v.charAt(iof + 1)); v = v.substring(0, iof - 1) + c + v.substring(iof + 2); } return v; }
From source file:org.openengsb.core.services.internal.security.PermissionUtils.java
/** * Converts a {@link Permission} into {@link PermissionData} to allow easier storing of the permission. *///from w ww .ja va 2s .co m public static PermissionData convertPermissionToPermissionData(Permission permission) { PermissionData permissionData = new PermissionData(); String type = permission.getClass().getName(); permissionData.setType(type); Map<String, EntryValue> entryMap = EntryUtils.convertBeanToEntryMap(permission); // copy the map, because JPA does not like the transformed map for some reason entryMap = Maps.newHashMap(entryMap); permissionData.setAttributes(entryMap); return permissionData; }