Example usage for com.google.common.collect Maps newLinkedHashMap

List of usage examples for com.google.common.collect Maps newLinkedHashMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newLinkedHashMap.

Prototype

public static <K, V> LinkedHashMap<K, V> newLinkedHashMap() 

Source Link

Document

Creates a mutable, empty, insertion-ordered LinkedHashMap instance.

Usage

From source file:org.immutables.eventual.CompletedModule.java

private static LinkedHashMap<Key<?>, Key<?>> mapUnfutureKeys(Injector injector) {
    LinkedHashMap<Key<?>, Key<?>> keyMapping = Maps.newLinkedHashMap();

    for (Key<?> key : injector.getBindings().keySet()) {
        TypeLiteral<?> typeLiteral = key.getTypeLiteral();

        if (ListenableFuture.class.isAssignableFrom(typeLiteral.getRawType())) {
            ParameterizedType parametrizedType = ((ParameterizedType) typeLiteral.getType());
            TypeLiteral<?> deferefencedType = TypeLiteral.get(parametrizedType.getActualTypeArguments()[0]);
            keyMapping.put(key, key.ofType(deferefencedType));
        }/*from   w  w  w  . j  av  a  2s.c  o m*/
    }
    return keyMapping;
}

From source file:org.ldp4j.application.kernel.impl.InMemoryConstraintReportRepository.java

InMemoryConstraintReportRepository() {
    this.reports = Maps.newLinkedHashMap();
    this.failureIds = LinkedHashMultimap.create();
}

From source file:com.opengamma.financial.analytics.model.equity.varianceswap.EquityVarianceSwapDefaults.java

public EquityVarianceSwapDefaults(final String priority, final String... perEquityConfig) {
    super(FinancialSecurityTypes.EQUITY_VARIANCE_SWAP_SECURITY, true);
    ArgumentChecker.notNull(priority, "priority");
    ArgumentChecker.notNull(perEquityConfig, "per equity config");
    final int n = perEquityConfig.length;
    ArgumentChecker.isTrue(n % 7 == 0,/*from   w w w  .  j  a va  2s .  com*/
            "Must have a discounting curve name, forward curve name, forward curve calculation config, "
                    + "forward curve calculation method, currency and surface name per equity");
    _priority = PriorityClass.valueOf(priority);
    _discountingCurveNames = Maps.newLinkedHashMap();
    _forwardCurveNames = Maps.newLinkedHashMap();
    _forwardCurveConfigNames = Maps.newLinkedHashMap();
    _forwardCurveCalculationMethodNames = Maps.newLinkedHashMap();
    _curveCurrencyNames = Maps.newLinkedHashMap();
    _surfaceNames = Maps.newLinkedHashMap();
    for (int i = 0; i < n; i += 7) {
        final String equity = perEquityConfig[i];
        _discountingCurveNames.put(equity, perEquityConfig[i + 1]);
        _forwardCurveNames.put(equity, perEquityConfig[i + 2]);
        _forwardCurveConfigNames.put(equity, perEquityConfig[i + 3]);
        _forwardCurveCalculationMethodNames.put(equity, perEquityConfig[i + 4]);
        _curveCurrencyNames.put(equity, perEquityConfig[i + 5]);
        _surfaceNames.put(equity, perEquityConfig[i + 6]);
    }
}

From source file:org.splevo.diffing.util.NormalizationUtil.java

/**
 * Load the remove normalization pattern configuration from a provided configuration string.
 *
 * @param configString/* w w  w  . j av a2  s. com*/
 *            The configuration string containing the normalization definitions.
 * @param suffix
 *            The (file extension) suffix to be ignored and preserved by the normalization.
 * @return The prepared normalization pattern map.
 */
public static LinkedHashMap<Pattern, String> loadRemoveNormalizations(String configString, String suffix) {

    suffix = Strings.nullToEmpty(suffix);
    configString = Strings.nullToEmpty(configString);

    LinkedHashMap<Pattern, String> normalizations = Maps.newLinkedHashMap();
    Iterable<String> entries = Splitter.on(LINE_SEPARATOR).omitEmptyStrings().trimResults().split(configString);
    for (String entry : entries) {
        if (entry.startsWith("*")) {
            String patternString = "(.*)" + entry.substring(1) + suffix;
            Pattern pattern = Pattern.compile(patternString);
            normalizations.put(pattern, "$1" + suffix);
        } else if (entry.endsWith("*")) {
            String patternString = entry.substring(0, entry.length()) + "(.*)" + suffix;
            Pattern pattern = Pattern.compile(patternString);
            normalizations.put(pattern, "$1" + suffix);
        } else {
            logger.warn("Classifier normalization config without * wildcard: " + entry);
            continue;
        }
    }
    return normalizations;
}

From source file:com.cask.twitter.RecoUpdater.java

@SuppressWarnings("unchecked")
private Map<String, Double> generateNewReco(Map<String, Double> current, String hashTag, Double score) {
    //Add new hashtag to the reco list (if it exists already, its score will get updated).
    current.put(hashTag, score);/*from  w w w  . j  a va  2  s.  co m*/
    if (current.size() < MAX_RECOS) {
        //If size of reco is less than MAX_RECOS, return it.
        return current;
    } else {
        //If size is more than MAX_RECOS, take the top MAX_RECOS records. This keeps the map size, per prefix, constant.
        Map<String, Double> sortedMap = MapSerdesUtil.sortByComparator(current);
        Map<String, Double> returnMap = Maps.newLinkedHashMap();
        for (Map.Entry<String, Double> entry : sortedMap.entrySet()) {
            if (returnMap.size() >= MAX_RECOS) {
                break;
            }
            returnMap.put(entry.getKey(), entry.getValue());
        }
        return returnMap;
    }
}

From source file:org.apache.tez.analyzer.plugins.VertexLevelCriticalPathAnalyzer.java

@Override
public void analyze(DagInfo dagInfo) throws TezException {
    Map<String, Long> result = Maps.newLinkedHashMap();
    getCriticalPath("", dagInfo.getVertices().get(dagInfo.getVertices().size() - 1), 0, result);

    Map<String, Long> sortedByValues = sortByValues(result);
    for (Map.Entry<String, Long> entry : sortedByValues.entrySet()) {
        List<String> record = Lists.newLinkedList();
        record.add(entry.getKey());//from   w w w .ja v a  2  s  .  c  o  m
        record.add(entry.getValue() + "");
        csvResult.addRecord(record.toArray(new String[record.size()]));
    }

    String dotFile = dotFileLocation + File.separator + dagInfo.getDagId() + ".dot";
    try {
        List<String> criticalVertices = null;
        if (!sortedByValues.isEmpty()) {
            String criticalPath = sortedByValues.keySet().iterator().next();
            criticalVertices = getVertexNames(criticalPath);
        } else {
            criticalVertices = Lists.newLinkedList();
        }
        Utils.generateDAGVizFile(dagInfo, dotFile, criticalVertices);
    } catch (IOException e) {
        throw new TezException(e);
    }
}

From source file:brooklyn.location.waratek.WaratekContainerLocation.java

public WaratekContainerLocation() {
    this(Maps.newLinkedHashMap());
}

From source file:org.cinchapi.concourse.server.storage.db.BrowsableRecord.java

/**
 * Return a view of all the data that was contained in this record at
 * {@code timestamp}.//from   w ww .j  av  a2s.  c om
 * 
 * @param timestamp
 * @return the data
 */
public Map<K, Set<V>> browse(long timestamp) {
    read.lock();
    try {
        Map<K, Set<V>> data = Maps.newLinkedHashMap();
        for (K key : describe(timestamp)) {
            data.put(key, get(key, timestamp));
        }
        return data;
    } finally {
        read.unlock();
    }
}

From source file:io.airlift.jmx.MBeanRepresentation.java

public MBeanRepresentation(MBeanServer mbeanServer, ObjectName objectName, ObjectMapper objectMapper)
        throws JMException {
    this.objectName = objectName;

    MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName);

    className = mbeanInfo.getClassName();
    description = mbeanInfo.getDescription();
    descriptor = toMap(mbeanInfo.getDescriptor());

    ///*from ww  w.j  a v  a2  s.c  o m*/
    // Attributes
    //
    LinkedHashMap<String, MBeanAttributeInfo> attributeInfos = Maps.newLinkedHashMap();
    for (MBeanAttributeInfo attributeInfo : mbeanInfo.getAttributes()) {
        attributeInfos.put(attributeInfo.getName(), attributeInfo);
    }

    String[] attributeNames = attributeInfos.keySet().toArray(new String[attributeInfos.size()]);
    ImmutableList.Builder<AttributeRepresentation> attributes = ImmutableList.builder();
    for (Attribute attribute : mbeanServer.getAttributes(objectName, attributeNames).asList()) {
        String attributeName = attribute.getName();

        // use remove so we only include one value for each attribute
        MBeanAttributeInfo attributeInfo = attributeInfos.remove(attributeName);
        if (attributeInfo == null) {
            // unknown extra attribute, could have been added after MBeanInfo was fetched
            continue;
        }

        Object attributeValue = attribute.getValue();
        AttributeRepresentation attributeRepresentation = new AttributeRepresentation(attributeInfo,
                attributeValue, objectMapper);
        attributes.add(attributeRepresentation);
    }
    this.attributes = attributes.build();

    //
    // Operations
    //
    ImmutableList.Builder<OperationRepresentation> operations = ImmutableList.builder();
    for (MBeanOperationInfo operationInfo : mbeanInfo.getOperations()) {
        operations.add(new OperationRepresentation(operationInfo));
    }
    this.operations = operations.build();
}

From source file:cc.recommenders.evaluation.evaluators.NMF1Evaluator.java

public Map<NM, double[]> getRawResults() {
    Map<NM, double[]> out = Maps.newLinkedHashMap();
    for (NM nm : results.keySet()) {
        double[] values = results.get(nm).getRawValues();
        out.put(nm, values);// w  w  w .  j ava 2s.co  m
    }
    return out;
}