Example usage for java.util LinkedHashMap putAll

List of usage examples for java.util LinkedHashMap putAll

Introduction

In this page you can find the example usage for java.util LinkedHashMap putAll.

Prototype

void putAll(Map<? extends K, ? extends V> m);

Source Link

Document

Copies all of the mappings from the specified map to this map (optional operation).

Usage

From source file:com.opengamma.analytics.financial.interestrate.capletstripping.SABRTermStructureModelProvider.java

protected LinkedHashMap<String, InterpolatedDoublesCurve> getCurves(final DoubleMatrix1D x) {
    final LinkedHashMap<String, InterpolatedDoublesCurve> curves = _curveBuilder.evaluate(x);

    // set any known (i.e. fixed) curves
    if (_knownParameterTermStructures != null) {
        curves.putAll(_knownParameterTermStructures);
    }/*from  w ww.j  a v a2  s  .c  o  m*/
    return curves;
}

From source file:com.kixeye.chassis.support.eureka.MetadataCollector.java

/**
 * Get metadata published but the application that can dynamically change.
 *
 * @return Map of dynamic metadata//from ww w. j  a v a  2  s  . c o  m
 */
public Map<String, String> getDynamicMetadataMap() {
    if (publishers == null || publishers.size() <= 0) {
        return null;
    }
    LinkedHashMap<String, String> metadata = new LinkedHashMap<>();
    for (MetadataPublisher publisher : publishers) {
        metadata.putAll(publisher.getMetadataMap());
    }
    return metadata;
}

From source file:com.logsniffer.reader.grok.GrokTextReader.java

@Override
public LinkedHashMap<String, FieldBaseTypes> getFieldTypes() throws FormatException {
    init();/* www  .  ja  v a 2  s.  co  m*/
    final LinkedHashMap<String, FieldBaseTypes> fields = super.getFieldTypes();
    fields.putAll(grokBean.getGrok(groksRegistry).getFieldTypes());
    if (overflowAttribute != null && !fields.containsKey(overflowAttribute)) {
        fields.put(overflowAttribute, FieldBaseTypes.STRING);
    }
    return fields;
}

From source file:org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.java

@Override
protected LinkedHashMap<String, ZuulRoute> locateRoutes() {
    LinkedHashMap<String, ZuulRoute> routesMap = new LinkedHashMap<String, ZuulRoute>();
    routesMap.putAll(super.locateRoutes());
    if (this.discovery != null) {
        Map<String, ZuulRoute> staticServices = new LinkedHashMap<String, ZuulRoute>();
        for (ZuulRoute route : routesMap.values()) {
            String serviceId = route.getServiceId();
            if (serviceId == null) {
                serviceId = route.getId();
            }/*from  w w w  . j av  a  2 s.c om*/
            if (serviceId != null) {
                staticServices.put(serviceId, route);
            }
        }
        // Add routes for discovery services by default
        List<String> services = this.discovery.getServices();
        String[] ignored = this.properties.getIgnoredServices().toArray(new String[0]);
        for (String serviceId : services) {
            // Ignore specifically ignored services and those that were manually
            // configured
            String key = "/" + mapRouteToService(serviceId) + "/**";
            if (staticServices.containsKey(serviceId) && staticServices.get(serviceId).getUrl() == null) {
                // Explicitly configured with no URL, cannot be ignored
                // all static routes are already in routesMap
                // Update location using serviceId if location is null
                ZuulRoute staticRoute = staticServices.get(serviceId);
                if (!StringUtils.hasText(staticRoute.getLocation())) {
                    staticRoute.setLocation(serviceId);
                }
            }
            if (!PatternMatchUtils.simpleMatch(ignored, serviceId) && !routesMap.containsKey(key)) {
                // Not ignored
                routesMap.put(key, new ZuulRoute(key, serviceId));
            }
        }
    }
    if (routesMap.get(DEFAULT_ROUTE) != null) {
        ZuulRoute defaultRoute = routesMap.get(DEFAULT_ROUTE);
        // Move the defaultServiceId to the end
        routesMap.remove(DEFAULT_ROUTE);
        routesMap.put(DEFAULT_ROUTE, defaultRoute);
    }
    LinkedHashMap<String, ZuulRoute> values = new LinkedHashMap<>();
    for (Entry<String, ZuulRoute> entry : routesMap.entrySet()) {
        String path = entry.getKey();
        // Prepend with slash if not already present.
        if (!path.startsWith("/")) {
            path = "/" + path;
        }
        if (StringUtils.hasText(this.properties.getPrefix())) {
            path = this.properties.getPrefix() + path;
            if (!path.startsWith("/")) {
                path = "/" + path;
            }
        }
        values.put(path, entry.getValue());
    }
    return values;
}

From source file:org.elasticsearch.storm.EsSpout.java

@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = collector;

    LinkedHashMap copy = new LinkedHashMap(conf);
    copy.putAll(spoutConfig);

    StormSettings settings = new StormSettings(copy);

    InitializationUtils.setValueReaderIfNotSet(settings, JdkValueReader.class, log);

    ackReads = settings.getStormSpoutReliable();

    if (ackReads) {
        inTransitQueue = new LinkedHashMap<Object, Object>();
        replayQueue = new LinkedList<Object[]>();
        retries = new HashMap<Object, Integer>();
        queueSize = settings.getStormSpoutReliableQueueSize();
        tupleRetries = settings.getStormSpoutReliableRetriesPerTuple();
        tupleFailure = settings.getStormSpoutReliableTupleFailureHandling();
    }/*from   w  w  w  .j ava  2s. c o m*/

    int totalTasks = context.getComponentTasks(context.getThisComponentId()).size();
    int currentTask = context.getThisTaskIndex();

    // match the partitions based on the current topology
    List<PartitionDefinition> partitions = RestService.findPartitions(settings, log);
    List<PartitionDefinition> assigned = RestService.assignPartitions(partitions, currentTask, totalTasks);
    iterator = RestService.multiReader(settings, assigned, log);
}

From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingFunction.java

@Override
public DoubleMatrix1D evaluate(final DoubleMatrix1D x) {

    final LinkedHashMap<String, InterpolatedDoublesCurve> curves = _curveBuilder.evaluate(x);

    // set any known (i.e. fixed) curves
    if (_knownParameterTermStructures != null) {
        curves.putAll(_knownParameterTermStructures);
    }/*from  www  . ja v a2  s  .  c  o m*/

    //TODO for now this is tied to SABRTermStructureParameters - what to be able to drop in any volatility model that has a term structure of
    //parameters
    final VolatilityModel1D volModel = new SABRTermStructureParameters(curves.get(ALPHA), curves.get(BETA),
            curves.get(RHO), curves.get(NU));

    final double[] res = new double[_capPricers.size()];
    for (int i = 0; i < _capPricers.size(); i++) {
        res[i] = _capPricers.get(i).impliedVol(volModel);
    }

    return new DoubleMatrix1D(res);
}

From source file:org.fusesource.meshkeeper.distribution.MopPluginResolver.java

private synchronized MOPRepository getMopRepository() {
    if (MOP_REPO == null) {
        MOP_REPO = new MOPRepository();

        if (System.getProperty(MOPRepository.MOP_BASE) == null
                && System.getProperty(MOPRepository.MOP_REPO_CONFIG_PROP) == null) {
            LOG.warn("Neither: " + MOPRepository.MOP_BASE + " or " + MOPRepository.MOP_REPO_CONFIG_PROP
                    + " are set. Will use default repos");
        }/*w  ww.ja v a 2  s  . c o  m*/
        // The plexus container is created on demand /w the context classloader.
        // Lets load it now, so we can properly set it's classloader.
        ClassLoader original = Thread.currentThread().getContextClassLoader();
        try {
            Thread.currentThread().setContextClassLoader(MOPRepository.class.getClassLoader());
            MOP_REPO.getContainer();
        } finally {
            Thread.currentThread().setContextClassLoader(original);
        }

        LinkedHashMap<String, String> repositories = MOP_REPO.getRemoteRepositories();
        repositories.clear();
        //Add in configured repos:
        repositories.putAll(MOP_REPO.getConfiguredRepositories());
        //Add in meshkeeper repos:
        repositories.put("fusesource-nexus-releases",
                "http://repo.fusesource.com/nexus/content/repositories/releases");
        repositories.put("fusesource-nexus-snapshots",
                "http://repo.fusesource.com/nexus/content/repositories/snapshots");
        MOP_REPO.setIncludeOptional(true);

    }
    return MOP_REPO;
}

From source file:org.kuali.rice.core.impl.config.property.ConfigParserImpl.java

/**
 * @see org.kuali.rice.core.api.config.ConfigParser#parse(java.lang.String[])
 *///from   w w  w.j a  v a2  s.c  o  m
public void parse(Map props, String[] locations) throws IOException {
    LinkedHashMap params = new LinkedHashMap();
    params.putAll(props);
    parse(params, locations);
    props.putAll(params);
}

From source file:com.opengamma.analytics.financial.curve.sensitivity.ParameterSensitivity.java

/**
 * Create a copy of the sensitivity and add a given named sensitivity to it. If the name / currency pair is in the map, the two sensitivity matrices are added.
 * Otherwise, a new entry is put into the map
 * @param nameCcy The name and the currency, not null
 * @param sensitivity The sensitivity to add, not null
 * @return The total sensitivity.// w  ww.java2  s .  c  o m
 */
public ParameterSensitivity plus(final Pair<String, Currency> nameCcy, final DoubleMatrix1D sensitivity) {
    ArgumentChecker.notNull(nameCcy, "Name/currency");
    ArgumentChecker.notNull(sensitivity, "Matrix");
    final MatrixAlgebra algebra = MatrixAlgebraFactory.COMMONS_ALGEBRA;
    final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> result = new LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D>();
    result.putAll(_sensitivity);
    if (result.containsKey(nameCcy)) {
        result.put(nameCcy, (DoubleMatrix1D) algebra.add(result.get(nameCcy), sensitivity));
    } else {
        result.put(nameCcy, sensitivity);
    }
    return new ParameterSensitivity(result);
}

From source file:com.opengamma.analytics.financial.curve.sensitivity.ParameterSensitivity.java

/**
 * Create a copy of the sensitivity and add a given sensitivity to it.
 * @param other The sensitivity to add./*from w w w  .  j av  a  2s .c  o m*/
 * @return The total sensitivity.
 */
public ParameterSensitivity plus(final ParameterSensitivity other) {
    ArgumentChecker.notNull(other, "Sensitivity to add");
    final MatrixAlgebra algebra = MatrixAlgebraFactory.COMMONS_ALGEBRA;
    final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> result = new LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D>();
    result.putAll(_sensitivity);
    for (final Map.Entry<Pair<String, Currency>, DoubleMatrix1D> entry : other.getSensitivities().entrySet()) {
        final Pair<String, Currency> nameCcy = entry.getKey();
        if (result.containsKey(nameCcy)) {
            result.put(nameCcy, (DoubleMatrix1D) algebra.add(result.get(nameCcy), entry.getValue()));
        } else {
            result.put(nameCcy, entry.getValue());
        }
    }
    return new ParameterSensitivity(result);
}