Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:org.kaaproject.kaa.client.transport.DesktopHttpClient.java

@Override
public byte[] executeHttpRequest(String uri, LinkedHashMap<String, byte[]> entity, boolean verifyResponse)
        throws Exception { //NOSONAR
    byte[] responseDataRaw = null;
    method = new HttpPost(url + uri);
    MultipartEntityBuilder builder = MultipartEntityBuilder.create();
    for (String key : entity.keySet()) {
        builder.addBinaryBody(key, entity.get(key));
    }/*  w w  w. java  2s .co m*/
    HttpEntity requestEntity = builder.build();
    method.setEntity(requestEntity);
    if (!Thread.currentThread().isInterrupted()) {
        LOG.debug("Executing request {}", method.getRequestLine());
        CloseableHttpResponse response = httpClient.execute(method);
        try {
            LOG.debug("Received {}", response.getStatusLine());
            int status = response.getStatusLine().getStatusCode();
            if (status >= 200 && status < 300) {
                responseDataRaw = getResponseBody(response, verifyResponse);
            } else {
                throw new TransportException(status);
            }
        } finally {
            response.close();
            method = null;
        }
    } else {
        method = null;
        throw new InterruptedException();
    }

    return responseDataRaw;
}

From source file:org.envirocar.wps.util.EnviroCarFeatureParser.java

/**
 * utility method for gathering properties of EnviroCar track features 
 * //  ww w .  j a  v a  2 s.  c om
 * @param features
 *          ArrayList containing parsed JSON features 
 * @return 
 *          set of strings representing the phenomena (e.g. speed, MAF, etc.)
 */
private Set<String> gatherPropertiesForFeatureTypeBuilder(ArrayList<?> features) {
    Set<String> distinctPhenomenonNames = new HashSet<String>();

    for (Object object : features) {

        if (object instanceof LinkedHashMap<?, ?>) {
            LinkedHashMap<?, ?> featureMap = (LinkedHashMap<?, ?>) object;

            Object propertiesObject = featureMap.get("properties");

            if (propertiesObject instanceof LinkedHashMap<?, ?>) {
                LinkedHashMap<?, ?> propertiesMap = (LinkedHashMap<?, ?>) propertiesObject;

                Object phenomenonsObject = propertiesMap.get("phenomenons");

                if (phenomenonsObject instanceof LinkedHashMap<?, ?>) {
                    LinkedHashMap<?, ?> phenomenonsMap = (LinkedHashMap<?, ?>) phenomenonsObject;

                    for (Object phenomenonKey : phenomenonsMap.keySet()) {

                        Object phenomenonValue = phenomenonsMap.get(phenomenonKey);

                        if (phenomenonValue instanceof LinkedHashMap<?, ?>) {
                            LinkedHashMap<?, ?> phenomenonValueMap = (LinkedHashMap<?, ?>) phenomenonValue;

                            String unit = phenomenonValueMap.get("unit").toString();

                            distinctPhenomenonNames.add(phenomenonKey.toString() + " (" + unit + ")");
                        }

                    }
                }

            }
        }
    }
    return distinctPhenomenonNames;
}

From source file:org.netflux.core.RecordMetadata.java

/**
 * Removes from this metadata all the field metadata with names included in the supplied collection.
 * //w  w  w  . j  av a2 s.  c  om
 * @param fieldNames the names of the field metadata to remove.
 * @throws NullPointerException if the specified collection is <code>null</code>.
 */
public void remove(Collection<String> fieldNames) {
    LinkedHashMap<String, Integer> fieldsToRemove = (LinkedHashMap<String, Integer>) this.fieldIndexes.clone();
    fieldsToRemove.keySet().retainAll(fieldNames);

    List<FieldMetadata> newFieldMetadata = (List<FieldMetadata>) this.fieldMetadata.clone();

    ListIterator<Integer> fieldIndexIterator = new ArrayList<Integer>(fieldsToRemove.values())
            .listIterator(fieldsToRemove.size());
    while (fieldIndexIterator.hasPrevious()) {
        newFieldMetadata.remove(fieldIndexIterator.previous());
    }

    this.setFieldMetadata(newFieldMetadata);
}

From source file:uk.ac.diamond.scisoft.ncd.calibration.CalibrationMethods.java

private LinkedHashMap<IPeak, HKL> indexPeaks(LinkedHashMap<HKL, Amount<Angle>> twoTheta) {
    LinkedHashMap<IPeak, HKL> indexedPeaks = new LinkedHashMap<IPeak, HKL>(peaks.size());
    CombinationGenerator<HKL> combinations = new CombinationGenerator<HKL>(twoTheta.keySet(), peaks.size());
    Double minVar = Double.MAX_VALUE;
    for (List<HKL> comb : combinations) {
        ArrayList<Double> distance = new ArrayList<Double>();
        LinkedHashMap<IPeak, HKL> tmpResult = new LinkedHashMap<IPeak, HKL>();
        for (int i = 0; i < comb.size(); i++) {
            IPeak peak = peaks.get(i);// w  w  w  .  jav a2  s  . c o m
            HKL tmpHKL = comb.get(i);
            distance.add(peak.getPosition() / Math.tan(twoTheta.get(tmpHKL).doubleValue(SI.RADIAN)));
            tmpResult.put(peak, tmpHKL);
        }
        double var = fitFunctionToData(tmpResult, false);
        if (var > minVar)
            continue;
        indexedPeaks = tmpResult;
        minVar = var;
    }

    indexedPeakList = new ArrayList<CalibrationPeak>();
    for (Entry<IPeak, HKL> peak : indexedPeaks.entrySet()) {
        double position = peak.getKey().getPosition();
        HKL idx = peak.getValue();
        Amount<Angle> angle = twoTheta.get(idx);
        indexedPeakList.add(new CalibrationPeak(position, angle, idx));
    }

    return indexedPeaks;
}

From source file:com.kunalkene1797.blackboxkit.utils.database.ProfileDB.java

public void putProfile(String name, LinkedHashMap<String, String> commands) {
    try {/*from   ww  w. ja  v a2  s . c  o m*/
        JSONObject items = new JSONObject();
        items.put("name", name);

        JSONArray commandArray = new JSONArray();
        for (int i = 0; i < commands.size(); i++) {
            JSONObject item = new JSONObject();
            item.put("path", commands.keySet().toArray()[i]);
            item.put("command", commands.values().toArray()[i]);
            commandArray.put(item);
        }

        items.put("commands", commandArray);

        putItem(items);
    } catch (JSONException e) {
        e.printStackTrace();
    }
}

From source file:gov.nih.nci.logging.api.persistence.LogMessageDAOImpl.java

/**
 * Based on SearchCriteria object populate the Hibernates Criteria Object with Expression and sort order details.
 * @param searchCriteria//from   ww w.  ja  va2  s. c o m
 * @param criteria
 */
private void populateCriteria(SearchCriteria searchCriteria, Criteria criteria) {

    criteria.add(createExpressionForDate(searchCriteria));

    if (!StringUtils.isBlank(searchCriteria.getApplication())) {
        criteria.add(Expression.eq(_APPLICATION, searchCriteria.getApplication().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getLogLevel())) {
        criteria.add(Expression.eq(_LOG_LEVEL, searchCriteria.getLogLevel().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getMessage())) {
        criteria.add(Expression.like(_MESSAGE, "%" + searchCriteria.getMessage() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getNdc())) {
        criteria.add(Expression.like(_NDC, "%" + searchCriteria.getNdc() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getObjectID())) {
        criteria.add(Expression.eq(_OBJECT_ID, searchCriteria.getObjectID().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getObjectName())) {
        criteria.add(Expression.eq(_OBJECT_NAME, searchCriteria.getObjectName().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getOperation())) {
        criteria.add(Expression.eq(_OPERATION, searchCriteria.getOperation().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getOrganization())) {
        criteria.add(Expression.like(_ORGANIZATION, "%" + searchCriteria.getOrganization() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getServer())) {
        criteria.add(Expression.eq(_SERVER, searchCriteria.getServer().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getSessionID())) {
        criteria.add(Expression.eq(_SESSION_ID, searchCriteria.getSessionID().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getThrowable())) {
        criteria.add(Expression.eq(_THROWABLE, searchCriteria.getThrowable().trim()));
    }
    if (!StringUtils.isBlank(searchCriteria.getThreadName())) {
        criteria.add(Expression.like(_THREAD, "%" + searchCriteria.getThreadName() + "%"));
    }
    if (!StringUtils.isBlank(searchCriteria.getUserName())) {
        criteria.add(Expression.eq(_USERNAME, searchCriteria.getUserName().trim()));
    }

    // Sort By criteria.
    LinkedHashMap lhp = (LinkedHashMap) searchCriteria.getSortByOrderSequence();
    if (lhp != null) {
        Iterator iter = lhp.keySet().iterator();
        while (iter.hasNext()) {
            String key = (String) iter.next();
            String value = (String) lhp.get(key);
            if (SORT_ORDER_ASCENDING.equalsIgnoreCase(value)) {
                criteria.addOrder(Order.asc(key));
            }
            if (SORT_ORDER_DESCENDING.equalsIgnoreCase(value)) {
                criteria.addOrder(Order.desc(key));
            }
        }
    }
}

From source file:com.allinfinance.dwr.system.SelectOptionsDWR.java

/**
 *@ ? // w  w w .  j av a  2 s  . c om
 **/
public String loadCupBrhIdOptData(String brhId) {
    String jsonData = "{data:[{'valueField':'','displayField':'?'}]}";
    try {
        log.info("brhId=" + brhId);
        Object[] params = new Object[1];
        params[0] = StringUtils.substring(brhId, brhId.length() - 4, brhId.length());
        LinkedHashMap<String, String> dataMap = SelectMethod.getCupBrh(params);
        Iterator<String> iter = dataMap.keySet().iterator();
        if (iter.hasNext()) {
            Map<String, Object> jsonDataMap = new HashMap<String, Object>();
            LinkedList<Object> jsonDataList = new LinkedList<Object>();
            Map<String, String> tmpMap = null;
            String key = null;
            while (iter.hasNext()) {
                tmpMap = new LinkedHashMap<String, String>();
                key = iter.next();
                tmpMap.put("valueField", key);
                tmpMap.put("displayField", dataMap.get(key));
                jsonDataList.add(tmpMap);
            }
            jsonDataMap.put("data", jsonDataList);
            jsonData = JSONBean.genMapToJSON(jsonDataMap);
        }
    } catch (Exception e) {
        e.printStackTrace();
        log.error(e.getMessage());
    }
    return jsonData;
}

From source file:org.kaaproject.kaa.client.transport.AndroidHttpClient.java

@Override
public byte[] executeHttpRequest(String uri, LinkedHashMap<String, byte[]> entity, boolean verifyResponse)
        throws Exception { //NOSONAR

    byte[] responseDataRaw = null;
    method = new HttpPost(url + uri);
    MultipartEntity requestEntity = new MultipartEntity();
    for (String key : entity.keySet()) {
        requestEntity.addPart(key, new ByteArrayBody(entity.get(key), null));
    }/*  w  ww  .java2 s  .  c o m*/
    method.setEntity(requestEntity);
    if (!Thread.currentThread().isInterrupted()) {
        LOG.debug("Executing request {}", method.getRequestLine());
        HttpResponse response = httpClient.execute(method);
        try {
            LOG.debug("Received {}", response.getStatusLine());
            int status = response.getStatusLine().getStatusCode();
            if (status >= 200 && status < 300) {
                responseDataRaw = getResponseBody(response, verifyResponse);
            } else {
                throw new TransportException(status);
            }
        } finally {
            method = null;
        }
    } else {
        method = null;
        throw new InterruptedException();
    }
    return responseDataRaw;
}

From source file:org.wso2.carbon.reporting.template.core.client.ReportingClient.java

private DataHandler generateCompositeReport(String reportName, String reportType) throws ReportingException {
    LinkedHashMap<String, String> report = new CompositeReportMetaDataHandler().getCompositeReport(reportName);
    ArrayList<ReportParamMap> mapList = new ArrayList<ReportParamMap>();

    int i = 0;//from w w w  . j  ava2s  . com
    for (String aReportName : report.keySet()) {
        String aReportType = MetadataFinder.findReportType(aReportName);
        Map[] data = null;
        if (aReportType.equalsIgnoreCase(ReportConstants.TABLE_TYPE)) {
            TableReportDTO tableReport = new TableReportMetaDataHandler().getTableReportMetaData(aReportName);
            data = new DataSourceHandler().createMapDataSource(tableReport);
        } else {
            ChartReportDTO chartReport = new ChartMetaDataHandler().getChartReportMetaData(aReportName);
            data = new DataSourceHandler().createMapDataSource(chartReport);
        }

        ReportDataSource dataSource = getReportDataSource(data);
        ReportParamMap map = new ReportParamMap();
        map.setParamKey(report.get(aReportName));
        map.setDataSource(dataSource);
        mapList.add(map);

        i++;
    }

    ReportParamMap[] maps = new ReportParamMap[mapList.size()];
    maps = mapList.toArray(maps);
    DataHandler dataHandler = null;
    try {
        byte[] data = dbReportingService.getJRDataSourceReport(null, reportName, maps, reportType);
        dataHandler = new DataHandler(data, "application/octet-stream");
        return dataHandler;
    } catch (JRException e) {
        log.error(e.getMessage(), e);
        throw new ReportingException(e.getMessage(), e);
    }

}

From source file:be.ugent.maf.cellmissy.gui.controller.analysis.doseresponse.area.AreaDRNormalizedController.java

/**
 * Prepare data for fitting starting from the analysis group.
 *
 * @param dRAnalysisGroup//from  w  w w  . j  a v  a  2 s.  c o  m
 * @return LinkedHashMap That maps the concentration (log-transformed!) to
 * the normalized replicate velocites
 */
private List<DoseResponsePair> prepareFittingData(AreaDoseResponseAnalysisGroup dRAnalysisGroup) {
    List<DoseResponsePair> result = new ArrayList<>();

    //!! control concentrations (10 * lower than lowest treatment conc) also need to be added
    List<List<Double>> allVelocities = new ArrayList<>();
    List<Double> allLogConcentrations = new ArrayList<>();

    //put concentrations of treatment to analyze (control not included!) in list
    LinkedHashMap<Double, String> nestedMap = dRAnalysisGroup.getConcentrationsMap()
            .get(dRAnalysisGroup.getTreatmentToAnalyse());
    for (Double concentration : nestedMap.keySet()) {
        String unit = nestedMap.get(concentration);

        Double logConcentration = AnalysisUtils.logTransform(concentration, unit);
        allLogConcentrations.add(logConcentration);
    }

    Double lowestLogConc = Collections.min(allLogConcentrations);
    //iterate through conditions
    int x = 0;
    for (PlateCondition plateCondition : dRAnalysisGroup.getVelocitiesMap().keySet()) {
        List<Double> replicateVelocities = dRAnalysisGroup.getVelocitiesMap().get(plateCondition);

        //normalize each value
        List<Double> normalizedVelocities = new ArrayList<>();
        for (Double value : replicateVelocities) {
            normalizedVelocities.add(normalize(value));
        }
        //check if this platecondition is the control
        for (Treatment treatment : plateCondition.getTreatmentList()) {
            if (treatment.getTreatmentType().getName().contains("ontrol")) {
                allLogConcentrations.add(x, lowestLogConc - 1.0);
            }
        }

        allVelocities.add(normalizedVelocities);
        x++;
    }
    for (int i = 0; i < allLogConcentrations.size(); i++) {
        result.add(new DoseResponsePair(allLogConcentrations.get(i), allVelocities.get(i)));
    }

    return result;
}