Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:com.redhat.rcm.version.Cli.java

private static void printVersionInfo() {
    final StringBuilder sb = new StringBuilder();
    sb.append(APP_NAME).append("\n\n").append(APP_DESCRIPTION).append("\n\n");

    final LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
    map.put("Built By:", APP_BUILDER);
    map.put("Commit ID:", APP_COMMIT_ID);
    map.put("Built On:", APP_TIMESTAMP);
    map.put("Version:", APP_VERSION);

    sb.append(formatHelpMap(map, "\n"));
    sb.append("\n\n");

    System.out.println(sb.toString());
}

From source file:com.opengamma.analytics.financial.curve.sensitivity.ParameterUnderlyingSensitivityCalculator.java

/**
 * Computes the sensitivity with respect to the parameters from the point sensitivities to the continuously compounded rate.
 * The sensitivity computed is only to the curves not in the fixedCurves set. When a curve depend on another underlying curve and the underlying curve is a fixed curve, 
 * its sensitivity is not reported.//from www .ja  v  a2  s  .co m
 * @param sensitivity The point sensitivity.
 * @param fixedCurves The fixed curves names (for which the parameter sensitivity are not computed even if they are necessary for the instrument pricing).
 * The curve in the list may or may not be in the bundle. Not null.
 * @param bundle The curve bundle with all the curves with respect to which the sensitivity should be computed. Not null.
 * @return The sensitivity (as a DoubleMatrix1D).
 */
@Override
public DoubleMatrix1D pointToParameterSensitivity(final InterestRateCurveSensitivity sensitivity,
        final Set<String> fixedCurves, final YieldCurveBundle bundle) {
    Set<String> curveNamesSet = bundle.getAllNames();
    int nbCurve = curveNamesSet.size();
    String[] curveNamesArray = new String[nbCurve];
    int loopname = 0;
    LinkedHashMap<String, Integer> curveNum = new LinkedHashMap<String, Integer>();
    for (final String name : curveNamesSet) { // loop over all curves (by name)
        curveNamesArray[loopname] = name;
        curveNum.put(name, loopname++);
    }
    int[] nbNewParameters = new int[nbCurve];
    // Implementation note: nbNewParameters - number of new parameters in the curve, parameters not from an underlying curve which is another curve of the bundle.
    int[][] indexOther = new int[nbCurve][];
    // Implementation note: indexOther - the index of the underlying curves, if any.
    loopname = 0;
    for (final String name : curveNamesSet) { // loop over all curves (by name)
        final YieldAndDiscountCurve curve = bundle.getCurve(name);
        List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames();
        nbNewParameters[loopname] = curve.getNumberOfParameters();
        List<Integer> indexOtherList = new ArrayList<Integer>();
        for (String u : underlyingCurveNames) {
            Integer i = curveNum.get(u);
            if (i != null) {
                indexOtherList.add(i);
                nbNewParameters[loopname] -= nbNewParameters[i];
            }
        }
        indexOther[loopname] = ArrayUtils.toPrimitive(indexOtherList.toArray(new Integer[0]));
        loopname++;
    }
    int nbSensiCurve = 0;
    for (final String name : bundle.getAllNames()) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            nbSensiCurve++;
        }
    }
    int[] nbNewParamSensiCurve = new int[nbSensiCurve];
    // Implementation note: nbNewParamSensiCurve
    int[][] indexOtherSensiCurve = new int[nbSensiCurve][];
    // Implementation note: indexOtherSensiCurve - 
    int[] startCleanParameter = new int[nbSensiCurve];
    // Implementation note: startCleanParameter - for each curve for which the sensitivity should be computed, the index in the total sensitivity vector at which that curve start.
    int[][] startDirtyParameter = new int[nbSensiCurve][];
    // Implementation note: startDirtyParameter - for each curve for which the sensitivity should be computed, the indexes of the underlying curves.
    int nbSensitivityCurve = 0;
    int nbCleanParameters = 0;
    int currentDirtyStart = 0;
    for (final String name : curveNamesSet) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            int num = curveNum.get(name);
            final YieldAndDiscountCurve curve = bundle.getCurve(name);
            List<Integer> startDirtyParameterList = new ArrayList<Integer>();
            List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames();
            for (String u : underlyingCurveNames) {
                Integer i = curveNum.get(u);
                if (i != null) {
                    startDirtyParameterList.add(currentDirtyStart);
                    currentDirtyStart += nbNewParameters[i];
                }
            }
            startDirtyParameterList.add(currentDirtyStart);
            currentDirtyStart += nbNewParameters[nbSensitivityCurve];
            startDirtyParameter[nbSensitivityCurve] = ArrayUtils
                    .toPrimitive(startDirtyParameterList.toArray(new Integer[0]));
            nbNewParamSensiCurve[nbSensitivityCurve] = nbNewParameters[num];
            indexOtherSensiCurve[nbSensitivityCurve] = indexOther[num];
            startCleanParameter[nbSensitivityCurve] = nbCleanParameters;
            nbCleanParameters += nbNewParamSensiCurve[nbSensitivityCurve];
            nbSensitivityCurve++;
        }
    }
    final List<Double> sensiDirtyList = new ArrayList<Double>();
    for (final String name : curveNamesSet) { // loop over all curves (by name)
        if (!fixedCurves.contains(name)) {
            final YieldAndDiscountCurve curve = bundle.getCurve(name);
            List<Double> oneCurveSensitivity = pointToParameterSensitivity(
                    sensitivity.getSensitivities().get(name), curve);
            sensiDirtyList.addAll(oneCurveSensitivity);
        }
    }
    double[] sensiDirty = ArrayUtils.toPrimitive(sensiDirtyList.toArray(new Double[0]));
    double[] sensiClean = new double[nbCleanParameters];
    for (int loopcurve = 0; loopcurve < nbSensiCurve; loopcurve++) {
        for (int loopo = 0; loopo < indexOtherSensiCurve[loopcurve].length; loopo++) {
            if (!fixedCurves.contains(curveNamesArray[indexOtherSensiCurve[loopcurve][loopo]])) {
                for (int loops = 0; loops < nbNewParamSensiCurve[indexOtherSensiCurve[loopcurve][loopo]]; loops++) {
                    sensiClean[startCleanParameter[indexOtherSensiCurve[loopcurve][loopo]]
                            + loops] += sensiDirty[startDirtyParameter[loopcurve][loopo] + loops];
                }
            }
        }
        for (int loops = 0; loops < nbNewParamSensiCurve[loopcurve]; loops++) {
            sensiClean[startCleanParameter[loopcurve]
                    + loops] += sensiDirty[startDirtyParameter[loopcurve][indexOtherSensiCurve[loopcurve].length]
                            + loops];
        }
    }
    return new DoubleMatrix1D(sensiClean);
}

From source file:info.magnolia.cms.util.ExtendingContentWrapper.java

@Override
public Collection<Content> getChildren(ContentFilter filter, String namePattern,
        Comparator<Content> orderCriteria) {
    Collection<Content> directChildren = ((AbstractContent) getWrappedContent()).getChildren(filter,
            namePattern, orderCriteria);
    if (extending) {
        Collection<Content> inheritedChildren = ((AbstractContent) extendedContent).getChildren(filter,
                namePattern, orderCriteria);
        // keep order, add new elements at the end of the collection
        LinkedHashMap<String, Content> merged = new LinkedHashMap<String, Content>();
        for (Content content : inheritedChildren) {
            merged.put(content.getName(), content);
        }/*from   w w  w  .ja  v a 2 s.  co  m*/
        for (Content content : directChildren) {
            merged.put(content.getName(), content);
        }
        return wrapContentNodes(merged.values());
    }
    return wrapContentNodes(directChildren);
}

From source file:com.epam.dlab.backendapi.dao.ComputationalDAO.java

private LinkedHashMap<String, String> toUrlDocument(ResourceURL url) {
    LinkedHashMap<String, String> map = new LinkedHashMap<>();
    map.put(COMPUTATIONAL_URL_DESC, url.getDescription());
    map.put(COMPUTATIONAL_URL_URL, url.getUrl());
    return map;/*  w ww. j av  a2 s. c o  m*/
}

From source file:org.openmeetings.app.rss.LoadAtomRssFeed.java

public LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, Object>>>> getRssFeeds(
        Long user_level) {//w  ww .  j  a v a  2 s  .  c om
    try {
        if (authLevelmanagement.checkUserLevel(user_level)) {
            LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, Object>>>> returnMap = new LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap<String, Object>>>>();

            String url1 = cfgManagement.getConfKey(3, "rss_feed1").getConf_value();
            returnMap.put("feed1", this.parseRssFeed(url1));

            String url2 = cfgManagement.getConfKey(3, "rss_feed2").getConf_value();
            returnMap.put("feed2", this.parseRssFeed(url2));

            return returnMap;
        } else {
            log.error("[getRssFeeds] authorization required");
        }

    } catch (Exception ex) {
        log.error("[getRssFeeds]", ex);
    }
    return null;
}

From source file:com.streamsets.pipeline.lib.parser.excel.TestWorkbookParser.java

@Test
public void testParseCorrectlyReturnsCachedValueOfFormula()
        throws IOException, InvalidFormatException, DataParserException {
    Workbook workbook = createWorkbook("/excel/TestFormulas.xlsx");

    WorkbookParser parser = new WorkbookParser(settingsNoHeader, getContext(), workbook, "Sheet1::0");

    Record recordFirstRow = parser.parse();
    Record recordSecondRow = parser.parse();

    LinkedHashMap<String, Field> firstMap = new LinkedHashMap<>();
    firstMap.put("0", Field.create("Addition"));
    firstMap.put("1", Field.create("Division"));
    firstMap.put("2", Field.create("Neighbor Multiplication"));
    Field expectedFirstRow = Field.createListMap(firstMap);

    LinkedHashMap<String, Field> secondMap = new LinkedHashMap<>();
    secondMap.put("0", Field.create(new BigDecimal(8.0).setScale(1)));
    secondMap.put("1", Field.create(new BigDecimal(9.0).setScale(1)));
    secondMap.put("2", Field.create(new BigDecimal(72.0).setScale(1)));
    Field expectedSecondRow = Field.createListMap(secondMap);

    assertEquals(expectedFirstRow, recordFirstRow.get());
    assertEquals(expectedSecondRow, recordSecondRow.get());
}

From source file:com.streamsets.pipeline.lib.parser.excel.TestWorkbookParser.java

@Test
public void testParseHandlesBlanksCells() throws IOException, InvalidFormatException, DataParserException {
    Workbook workbook = createWorkbook("/excel/TestBlankCells.xlsx");

    WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Sheet1::0");

    Record recordFirstRow = parser.parse();

    LinkedHashMap<String, Field> firstContentMap = new LinkedHashMap<>();
    firstContentMap.put("column1", Field.create(BigDecimal.valueOf(11)));
    firstContentMap.put("column2", Field.create(""));
    firstContentMap.put("column3", Field.create(""));
    firstContentMap.put("column4", Field.create(BigDecimal.valueOf(44)));

    Field expectedFirstRow = Field.createListMap(firstContentMap);

    assertEquals(expectedFirstRow, recordFirstRow.get());
}

From source file:com.intel.iotkitlib.RuleManagement.java

/**
 * Get specific rule details for the account
 *
 * @param ruleId the identifier for the rule to retrieve info for.
 * @return For async model, return CloudResponse which wraps true if the request of REST
 * call is valid; otherwise false. The actual result from
 * the REST call is return asynchronously as part {@link RequestStatusHandler#readResponse}.
 * For synch model, return CloudResponse which wraps HTTP return code and response.
 *//*  w  ww  .ja va2 s.c  o  m*/
public CloudResponse getInformationOnRule(String ruleId) {
    if (ruleId == null) {
        Log.d(TAG, ERR_INVALID_ID);
        return new CloudResponse(false, ERR_INVALID_ID);
    }
    //initiating get for rule info
    HttpGetTask infoOnRule = new HttpGetTask();
    infoOnRule.setHeaders(basicHeaderList);
    LinkedHashMap<String, String> linkedHashMap = new LinkedHashMap<String, String>();
    linkedHashMap.put("rule_id", ruleId);
    String url = objIotKit.prepareUrl(objIotKit.getInfoOfRule, linkedHashMap);
    return super.invokeHttpExecuteOnURL(url, infoOnRule);
}

From source file:com.intel.iotkitlib.RuleManagement.java

/**
 * Delete a specific draft rule for account.
 *
 * @param ruleId the identifier for the rule to delete.
 * @return For async model, return CloudResponse which wraps true if the request of REST
 * call is valid; otherwise false. The actual result from
 * the REST call is return asynchronously as part {@link RequestStatusHandler#readResponse}.
 * For synch model, return CloudResponse which wraps HTTP return code and response.
 *//* www. j  a v  a2 s .  c  om*/
public CloudResponse deleteADraftRule(String ruleId) {
    if (ruleId == null) {
        Log.d(TAG, ERR_INVALID_ID);
        return new CloudResponse(false, ERR_INVALID_ID);
    }
    //initiating delete for draft rule
    HttpDeleteTask deleteDraftRule = new HttpDeleteTask();
    deleteDraftRule.setHeaders(basicHeaderList);
    LinkedHashMap<String, String> linkedHashMap = new LinkedHashMap<String, String>();
    linkedHashMap.put("rule_id", ruleId);
    String url = objIotKit.prepareUrl(objIotKit.deleteDraftRule, linkedHashMap);
    return super.invokeHttpExecuteOnURL(url, deleteDraftRule);
}

From source file:org.karndo.graphs.CustomChartFactory.java

/**
 * Creates a chart of the selected PiracyEvent data graphed by event 
 * status. Presently uses a very basic method of graphing this data by 
 * using the static CreateBarChart3D method available in class
 * org.jfree.chart.ChartFactory.    /*  ww  w.  j ava  2  s  . c om*/
 * 
 * @param data the selected PiracyEvent data to graph.
 * @return A JFreeChart object representing a graph of the selected 
 * PiracyEvent data against event status.
 */
public JFreeChart createHistogramStatus(LinkedList<PiracyEvent> data) {
    //the data to plot
    DefaultCategoryDataset dataset = new DefaultCategoryDataset();
    LinkedHashMap<String, MutableInt> freqs_cats = new LinkedHashMap<String, MutableInt>();

    for (PiracyEvent ev : data) {
        if (!freqs_cats.containsKey(ev.getStatus())) {
            freqs_cats.put(ev.getStatus(), new MutableInt(1));
        } else {
            freqs_cats.get(ev.getStatus()).increment();
        }
    }

    Iterator itr = freqs_cats.keySet().iterator();
    while (itr.hasNext()) {
        String category = (String) itr.next();
        Integer i1 = freqs_cats.get(category).getValue();
        dataset.addValue(i1, "Piracy Incidents", category);
    }

    JFreeChart chart = ChartFactory.createBarChart3D("Piracy Incidents " + "by event status", "Event Status",
            "Frequency", dataset, PlotOrientation.VERTICAL, false, true, false);
    return chart;
}