Example usage for java.util LinkedHashMap get

List of usage examples for java.util LinkedHashMap get

Introduction

In this page you can find the example usage for java.util LinkedHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:net.sf.jasperreports.engine.fill.DelayedFillActions.java

public boolean hasDelayedActions(JRPrintPage page) {
    FillPageKey pageKey = new FillPageKey(page);
    for (LinkedHashMap<FillPageKey, LinkedMap<Object, EvaluationBoundAction>> map : actionsMap.values()) {
        fillContext.lockVirtualizationContext();
        try {//  w w w  . j a  v  a  2 s  .co  m
            synchronized (map) {
                LinkedMap<Object, EvaluationBoundAction> boundMap = map.get(pageKey);
                if (boundMap != null && !boundMap.isEmpty()) {
                    return true;
                }
            }
        } finally {
            fillContext.unlockVirtualizationContext();
        }
    }

    return false;
}

From source file:aldenjava.opticalmapping.data.mappingresult.OptMapResultNode.java

public static List<GenomicPosNode> getPotentiallyMappedRegion(LinkedHashMap<String, DataNode> optrefmap,
        List<OptMapResultNode> resultList) {
    List<GenomicPosNode> targetRegionList = new ArrayList<GenomicPosNode>();
    if (resultList.isEmpty())
        return targetRegionList;
    if (!resultList.get(0).isUsed())
        return targetRegionList;

    for (OptMapResultNode result : resultList) {
        String ref = result.mappedRegion.ref;
        long start = result.mappedRegion.start - result.parentFrag.size;
        long stop = result.mappedRegion.stop + result.parentFrag.size;
        if (start < 1)
            start = 1;//from  ww w.j  a v  a 2  s. c o  m
        if (stop > optrefmap.get(ref).size)
            stop = optrefmap.get(ref).size;
        targetRegionList.add(new GenomicPosNode(ref, start, stop));
    }
    return GenomicPosNode.merge(targetRegionList);
}

From source file:edu.jhuapl.openessence.web.util.ControllerUtils.java

public static LinkedHashMap<String, ChartData> getSortedAndLimitedChartDataMap(
        LinkedHashMap<String, ChartData> map, Integer limit, String limitLabel) {
    //test if we need to trim
    if (limit <= 0 || limit >= map.size()) {
        return map;
    }// w  ww  .j ava  2  s. com

    //sort by value
    Map<String, ChartData> sortedMap = ControllerUtils.getSortedByChartDataMap(map);
    //limit and combine results
    Map<String, ChartData> sortedLimitedMap = ControllerUtils.getLimitedChartDataMap(sortedMap, limit,
            limitLabel);

    //put the original sort order back (minus the values combined)
    LinkedHashMap<String, ChartData> originalSortResultMap = new LinkedHashMap<String, ChartData>(limit);
    LinkedHashMap<String, ChartData> passedValuesMap = new LinkedHashMap<String, ChartData>(map.size());
    int i = 0;
    for (String key : map.keySet()) {
        if (i < limit) {
            if (sortedLimitedMap.containsKey(key)) {
                ChartData value = sortedLimitedMap.get(key);
                //if value is not null/zero, add it and increment
                if (value != null && value.getCount() != null && !Double.isNaN(value.getCount())
                        && value.getCount() > 0) {
                    originalSortResultMap.put(key, value);
                    i++;
                } else { //put it in a list of passed up values for inclusion at the end
                    passedValuesMap.put(key, value);
                }
            }
        }
    }
    //if we still have room after adding all sorted non zero values... fill the rest with passed values
    if (i < limit) {
        for (String key : passedValuesMap.keySet()) {
            if (i < limit) {
                originalSortResultMap.put(key, passedValuesMap.get(key));
                i++;
            }
        }
    }
    //add combined field if it is not null (indicates it was used even if the value is 0)
    ChartData cVal = sortedLimitedMap.get(limitLabel);
    if (cVal != null && cVal.getCount() != null && !Double.isNaN(cVal.getCount())) {
        originalSortResultMap.put(limitLabel, cVal);
    }
    return originalSortResultMap;
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testAddWatch() {
    LinkedHashMap<String, String> m = new LinkedHashMap<String, String>();
    m.put("watch", "watch");
    assertEquals(m.get("watch"), "watch");
}

From source file:com.vmware.photon.controller.cloudstore.dcp.monitors.CloudStoreCache.java

public CloudStoreCache(DcpRestClient dcpRestClient, LinkedHashMap<String, Class> paths) {
    this.dcpRestClient = dcpRestClient;

    // The cache will multiple paths according to their order in paths
    // CloudStoreCache will update its cache according to the order of
    // paths supplied. The reason we need to impose order on retrieving
    // the data is because, some resources reference other documents which
    // need to be retrieved prior to triggering the event.

    this.currentData = new LinkedHashMap();

    this.pathTypes = new HashMap();
    for (String typeName : paths.keySet()) {
        this.pathTypes.put(typeName, paths.get(typeName));
    }/*from  w w w. j av  a 2s.  c  o m*/

    /*
     * Since the CloudStoreCache can cache multiple paths at the same
     * time, a prefix mapping is needed to map a prefix with all its
     * suffixes (i.e. its children). For example, if the paths variable is
     * {A,B}, and A has a1 and a2 as children, and B has b1 as a child, then
     * the prefix map would like like this:
     *    A -> map1
     *    B -> map2
     *
     *    where map1 is :
     *    a1 -> a1 Document
     *    a2 -> a2 Document
     *
     *    and map2 is:
     *    b1 -> b1 Document
     */
    for (String prefixPath : paths.keySet()) {
        this.currentData.put(prefixPath, new HashMap());
    }
}

From source file:com.aliyun.odps.graph.local.LocalGraphJobRunner.java

private void processInput(TableInfo tableInfo) throws IOException, OdpsException {
    LOG.info("Processing input: " + tableInfo);

    String projName = tableInfo.getProjectName();
    if (projName == null) {
        projName = SessionState.get().getOdps().getDefaultProject();
    }//w  ww  .  ja va2  s.  c  o  m
    String tblName = tableInfo.getTableName();
    String[] readCols = tableInfo.getCols();

    // ?MR??
    LinkedHashMap<String, String> expectPartsHashMap = tableInfo.getPartSpec();
    PartitionSpec expectParts = null;
    if (expectPartsHashMap != null && expectPartsHashMap.size() > 0) {
        StringBuffer sb = new StringBuffer();
        for (String key : expectPartsHashMap.keySet()) {
            if (sb.length() > 0) {
                sb.append(",");
            }
            sb.append(key + "=" + expectPartsHashMap.get(key));
        }
        expectParts = new PartitionSpec(sb.toString());
    }

    // ?Table Scheme???
    if (!wareHouse.existsTable(projName, tblName) || wareHouse.getDownloadMode() == DownloadMode.ALWAYS) {

        DownloadUtils.downloadTableSchemeAndData(odps, tableInfo, wareHouse.getLimitDownloadRecordCount(),
                wareHouse.getInputColumnSeperator());

        if (!wareHouse.existsTable(projName, tblName)) {
            throw new OdpsException("download table from remote host failure");
        }
    }

    // ////warehouse _scheme_????////
    TableMeta whTblMeta = wareHouse.getTableMeta(projName, tblName);
    Column[] whReadFields = LocalRunUtils.getInputTableFields(whTblMeta, readCols);
    List<PartitionSpec> whParts = wareHouse.getPartitions(projName, tblName);
    // //////////////////////

    if (whParts.size() > 0) {
        // partitioned table
        for (PartitionSpec partSpec : whParts) {
            // ?
            if (!match(expectParts, partSpec)) {
                continue;
            }
            File whSrcDir = wareHouse.getPartitionDir(whTblMeta.getProjName(), whTblMeta.getTableName(),
                    partSpec);
            // add input split only when src dir has data file
            if (LocalRunUtils.listDataFiles(whSrcDir).size() > 0) {

                // ??warehouse
                File tempDataDir = jobDirecotry.getInputDir(
                        wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), partSpec));
                File tempSchemeDir = jobDirecotry.getInputDir(
                        wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), null));
                wareHouse.copyTable(whTblMeta.getProjName(), whTblMeta.getTableName(), partSpec, readCols,
                        tempSchemeDir, wareHouse.getLimitDownloadRecordCount(),
                        wareHouse.getInputColumnSeperator());
                for (File file : LocalRunUtils.listDataFiles(tempDataDir)) {
                    inputs.add(new InputSplit(file, whReadFields, 0L, file.length(), tableInfo));
                }
            }
        }
    } else {
        // not partitioned table
        if (tableInfo.getPartSpec() != null && tableInfo.getPartSpec().size() > 0) {
            throw new IOException(ExceptionCode.ODPS_0720121 + "table " + projName + "." + tblName
                    + " is not partitioned table");
        }

        File whSrcDir = wareHouse.getTableDir(whTblMeta.getProjName(), whTblMeta.getTableName());
        if (LocalRunUtils.listDataFiles(whSrcDir).size() > 0) {
            // ??warehouse
            File tempDataDir = jobDirecotry.getInputDir(
                    wareHouse.getRelativePath(whTblMeta.getProjName(), whTblMeta.getTableName(), null));
            File tempSchemeDir = tempDataDir;
            wareHouse.copyTable(whTblMeta.getProjName(), whTblMeta.getTableName(), null, readCols,
                    tempSchemeDir, wareHouse.getLimitDownloadRecordCount(),
                    wareHouse.getInputColumnSeperator());
            for (File file : LocalRunUtils.listDataFiles(tempDataDir)) {
                inputs.add(new InputSplit(file, whReadFields, 0L, file.length(), tableInfo));
            }
        }
    }

}

From source file:jp.primecloud.auto.api.ApiFilter.java

/**
 * PCC-API ?//  ww  w  .j  a v  a2s  . c  om
 *
 * ??????
 * notUseFarmApies?API??????
 * ????????????????
 * (????????API???)
 * {@inheritDoc}
 */
public ContainerRequest filter(ContainerRequest request) {

    try {
        // URI()
        URI uri = request.getRequestUri();

        // BASE64???URI(LinkedHashMap)??
        LinkedHashMap<String, String> decodeParamMap = getDecodedParamMap(uri);

        String apiName = uri.getPath().substring(request.getBaseUri().getPath().length());
        if (StringUtils.isEmpty(apiName)) {
            //API??????
            throw new AutoApplicationException("EAPI-000008", "URL", uri.toString());
        }

        //String userName = decodeParamMap.get(PARAM_NAME_KEY);
        String accessId = decodeParamMap.get(PARAM_NAME_ACCESS_ID);
        String signature = decodeParamMap.get(PARAM_NAME_SIGNATURE);
        String timestamp = decodeParamMap.get(PARAM_NAME_TIMESTAMP);
        String farmNo = decodeParamMap.get(PARAM_NAME_FARM_NO);
        String userName = null;
        Long userNo = null;
        Farm farm = null;
        User accessUser = null;
        User autoScaleUser = null;
        User masterUser = null;

        // ?
        // Key(??)
        ApiValidate.validateAccessId(accessId);
        // Signature
        ApiValidate.validateSignature(signature);
        // Timestamp(yyyy/MM/dd HH:mm:ss)
        ApiValidate.validateTimestamp(timestamp);

        // PCC-API??
        ApiCertificate apiCertificate = apiCertificateDao.readByApiAccessId(accessId);
        if (apiCertificate == null) {
            // PCC-API?????
            Thread.sleep(SECURE_WAIT_TIME.intValue() * 1000);
            throw new AutoApplicationException("EAPI-000008", PARAM_NAME_ACCESS_ID, accessId);
        }

        //(API)?
        accessUser = userDao.read(apiCertificate.getUserNo());
        if (accessUser == null) {
            // ????
            throw new AutoApplicationException("EAPI-100000", "User", "UserNo", apiCertificate.getUserNo());
        }

        //TODO ????
        //????/??
        //??????????????
        //if (?) {
        //    ?
        //}

        // Signature??
        String uriText = createUriQueryParams(decodeParamMap, uri);
        String encodeUriText = encodeSHA256(uriText, apiCertificate.getApiSecretKey());
        if (BooleanUtils.isFalse(encodeUriText.equals(signature))) {
            //Signature??????
            Thread.sleep(SECURE_WAIT_TIME.intValue() * 1000);
            throw new AutoApplicationException("EAPI-000008", "URL", uri.toString());
        }

        if (Arrays.asList(notUseFarmApies).contains(apiName) == false) {
            //FarmNo
            ApiValidate.validateFarmNo(farmNo);
            farm = farmDao.read(Long.parseLong(farmNo));
            if (farm == null) {
                // ????
                throw new AutoApplicationException("EAPI-100000", "Farm", PARAM_NAME_FARM_NO, farmNo);
            }
        }

        //?????
        if (farm != null && (StringUtils.isNotEmpty(AUTO_SCALING_USER)
                && AUTO_SCALING_USER.equals(accessUser.getUsername()) || accessUser.getPowerUser())) {
            //????POWER USER????
            //????????????
            //TODO ?????PCC-API?????
            autoScaleUser = userDao.read(farm.getUserNo());
            userNo = autoScaleUser.getUserNo();
            userName = autoScaleUser.getUsername();
        } else if (!accessUser.getPowerUser()
                && accessUser.getUserNo().equals(accessUser.getMasterUser()) == false) {
            //?PCC  ?????
            if (accessUser.getMasterUser() != null) {
                userNo = accessUser.getMasterUser();
                masterUser = userDao.read(accessUser.getMasterUser());
                userName = masterUser.getUsername();
            }
        } else {
            //  ?????
            userNo = accessUser.getUserNo();
            userName = accessUser.getUsername();
        }

        if (farm != null && farm.getUserNo().equals(userNo) == false) {
            // ?????????
            throw new AutoApplicationException("EAPI-100026", farmNo, accessUser.getUsername());
        }

        //User(??)URL?
        if (!apiName.equals("Login")) {
            decodeParamMap.put(PARAM_NAME_USER, userName);
        }

        //LoggingUtils?
        LoggingUtils.setUserNo(userNo);
        LoggingUtils.setLoginUserNo(accessUser.getUserNo());
        LoggingUtils.setUserName(accessUser.getUsername());
        if (farm != null) {
            LoggingUtils.setFarmNo(farm.getFarmNo());
            LoggingUtils.setFarmName(farm.getFarmName());
        }

        // ??URL?
        for (String key : decodeParamMap.keySet()) {
            request.getQueryParameters().putSingle(key, decodeParamMap.get(key));
        }

        // 
        log.info(MessageUtils.getMessage("IAPI-000001", accessUser.getUsername(), apiName));
    } catch (Throwable e) {
        String message = "";
        if (e instanceof AutoException || e instanceof AutoApplicationException) {
            message = e.getMessage();
        } else {
            message = MessageUtils.getMessage("EAPI-000000");
        }
        // 
        log.error(message, e);

        // ErrorApi?
        URI errorUri = URI.create(request.getBaseUri() + "Error");
        request.setUris(request.getBaseUri(), errorUri);
        request.getQueryParameters().putSingle("Message", message);
    }

    return request;
}

From source file:com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurfaceFitter.java

/**
 * @return Returns a function that takes the fitting parameters (node values in the transformed fitting space) and returned the set of (model) volatilities
 *//*w w  w .j ava2  s.co m*/
protected Function1D<DoubleMatrix1D, DoubleMatrix1D> getModelValueFunction() {

    return new Function1D<DoubleMatrix1D, DoubleMatrix1D>() {
        @SuppressWarnings("synthetic-access")
        @Override
        public DoubleMatrix1D evaluate(final DoubleMatrix1D x) {
            final LinkedHashMap<String, InterpolatedDoublesCurve> curves = _curveBuilder.evaluate(x);

            Validate.isTrue(x.getNumberOfElements() == _nKnotPoints); //TODO remove when working properly

            final double[] res = new double[_nOptions];
            int index = 0;

            for (int i = 0; i < _nExpiries; i++) {
                final double t = _expiries[i];
                final double[] theta = new double[_nSmileModelParameters];
                int p = 0;
                for (final String name : _parameterNames) {
                    final Curve<Double, Double> curve = curves.get(name);
                    theta[p++] = curve.getYValue(t);
                }
                final T data = toSmileModelData(theta);
                final double[] temp = _volFuncs.get(i).evaluate(data);
                final int l = temp.length;
                System.arraycopy(temp, 0, res, index, l);
                index += l;
            }
            return new DoubleMatrix1D(res);
        }
    };
}

From source file:com.github.nmorel.gwtjackson.shared.advanced.jsontype.TypeNamesTester.java

public void testRoundTripMap(ObjectMapperTester<LinkedHashMap<String, Animal>> mapper) {
    LinkedHashMap<String, Animal> input = new LinkedHashMap<String, Animal>();
    input.put("venla", new MaineCoon("Venla", true));
    input.put("ama", new Dog("Amadeus", 13));

    String json = mapper.write(input);
    assertEquals("{\"venla\":{\"TypeNamesTester$MaineCoon\":{\"name\":\"Venla\",\"purrs\":true}}," + ""
            + "\"ama\":{\"doggy\":{\"ageInYears\":13,\"name\":\"Amadeus\"}}}", json);

    LinkedHashMap<String, Animal> output = mapper.read(json);
    assertNotNull(output);//from ww  w . j a  v  a 2s.  c o m
    assertEquals(input.size(), output.size());

    // for some reason, straight comparison won't work...
    for (String name : input.keySet()) {
        Animal in = input.get(name);
        Animal out = output.get(name);
        if (!in.equals(out)) {
            fail("Animal in input was [" + in + "]; output not matching: [" + out + "]");
        }
    }
}

From source file:net.sf.maltcms.chromaui.normalization.spi.charts.PeakGroupBoxPlot.java

public List<JFreeChart> createChart() {
    List<JFreeChart> charts = new ArrayList<>();
    LinkedHashSet<ITreatmentGroupDescriptor> treatmentGroups = new LinkedHashSet<>(
            project.getTreatmentGroups());
    List<CategoryPlot> plots = new LinkedList<>();
    for (IPeakGroupDescriptor pgd : pgdl) {
        LinkedHashMap<ITreatmentGroupDescriptor, HashSet<IPeakAnnotationDescriptor>> map = new LinkedHashMap<>();
        for (ITreatmentGroupDescriptor itgd : treatmentGroups) {
            map.put(itgd, new LinkedHashSet<IPeakAnnotationDescriptor>());
        }//from  w  w  w. ja va  2s.  c o m
        List<IPeakAnnotationDescriptor> descriptors = pgd.getPeakAnnotationDescriptors();

        DefaultBoxAndWhiskerCategoryDataset baw = new DefaultBoxAndWhiskerCategoryDataset();
        for (IPeakAnnotationDescriptor ipad : descriptors) {
            ITreatmentGroupDescriptor treatmentGroup = ipad.getChromatogramDescriptor().getTreatmentGroup();
            HashSet<IPeakAnnotationDescriptor> descr = map.get(treatmentGroup);
            if (descr == null) {
                descr = new HashSet<>();
                map.put(treatmentGroup, descr);
            }
            descr.add(ipad);
        }
        List<Color> colors = new LinkedList<>();
        for (ITreatmentGroupDescriptor tgd : map.keySet()) {
            String name = getPeakName(pgd);
            baw.add(createBoxAndWhiskerItem(map.get(tgd)), tgd.getName() + " (" + map.get(tgd).size() + ")",
                    name);
            colors.add(tgd.getColor());
        }
        BoxAndWhiskerRenderer renderer = new BoxAndWhiskerRenderer();
        renderer.setFillBox(true);
        renderer.setMeanVisible(false);
        renderer.setMedianVisible(true);
        renderer.setArtifactPaint(new Color(0, 0, 0, 128));
        renderer.setMaximumBarWidth(0.1);
        renderer.setUseOutlinePaintForWhiskers(false);
        //            renderer.setAutoPopulateSeriesFillPaint(true);
        //            renderer.setAutoPopulateSeriesPaint(true);
        //            renderer.setAutoPopulateSeriesOutlinePaint(true);
        CategoryPlot cp = new CategoryPlot(baw, new CategoryAxis("Treatment Groups"),
                new NumberAxis("Normalized Peak Area"), renderer);
        Logger.getLogger(getClass().getName()).log(Level.INFO, "Setting {0} colors!", colors.size());
        ChartCustomizer.setSeriesColors(cp, 0.6f, colors);
        //            ChartCustomizer.setSeriesColors(cp, 0.9f,colors);
        plots.add(cp);
        JFreeChart chart = new JFreeChart(cp);
        chart.setTitle(
                "Peak group " + pgd.getDisplayName() + " size: " + pgd.getPeakAnnotationDescriptors().size());
        charts.add(chart);
    }
    //        CategoryAxis ca = new CategoryAxis("Treatment Groups");
    //        NumberAxis va = new NumberAxis("Normalized Peak Area");
    //        CombinedDomainCategoryPlot cdcp = new CombinedDomainCategoryPlot(ca);
    //        for (CategoryPlot cp : plots) {
    //            cp.setRangeAxis(va);
    //            cdcp.add(cp);
    //            break;
    //        }
    //        return new JFreeChart(cdcp);
    return charts;
}