Example usage for java.util LinkedHashMap containsKey

List of usage examples for java.util LinkedHashMap containsKey

Introduction

In this page you can find the example usage for java.util LinkedHashMap containsKey.

Prototype

boolean containsKey(Object key);

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:pt.lsts.neptus.plugins.sunfish.awareness.SituationAwareness.java

private LinkedHashMap<String, Vector<AssetPosition>> positionsByType() {
    LinkedHashMap<String, Vector<AssetPosition>> ret = new LinkedHashMap<String, Vector<AssetPosition>>();

    for (AssetTrack t : assets.values()) {
        AssetPosition last = t.getLatest();
        if (!ret.containsKey(last.getType())) {
            ret.put(last.getType(), new Vector<AssetPosition>());
        }// w  ww  .  java 2s.co m
        ret.get(last.getType()).add(last);
    }
    return ret;
}

From source file:hydrograph.ui.graph.controller.ComponentEditPart.java

/**
 * Updates the status of a component./*  w w  w  .  j  a v  a 2s  .c  o m*/
 */
public void updateComponentStatus() {
    Component component = this.getCastedModel();
    LinkedHashMap<String, Object> properties = component.getProperties();
    String statusName = Component.Props.VALIDITY_STATUS.getValue();
    if (properties.containsKey(statusName)) {
        ((ComponentFigure) this.getFigure()).setPropertyStatus((String) properties.get(statusName));
        this.getFigure().repaint();
    }
}

From source file:org.bimserver.charting.SupportFunctions.java

public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByClassWithTreeStructure(
        String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) {
    // Derive the column name.
    String leafColumnName = structureKeyword;
    // Update the chart configuration.
    chart.setDimensionLookupKey(structureKeyword, leafColumnName);
    chart.setDimensionLookupKey("date", "date");
    chart.setDimensionLookupKey("size", "size");
    // Prepare to iterate the relationships.
    LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>();
    // Iterate only the relationships.
    for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model
            .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) {
        // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer.
        IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial();
        // If there was a material-like object, sum it across X.
        if (materialLike != null) {
            // Get material name, like: Brick (000000), Air (000001); or, Concrete (0000000).
            String materialName = getNameOfMaterialsFromMaterialLike(materialLike, true, true);
            // Use material name if available. Otherwise, use OID of top-level material-like object.
            String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid());
            // Add entry if it doesn't exist.
            if (!materialNameWithSizes.containsKey(name))
                materialNameWithSizes.put(name, new ArrayList<Double>());
            // Get existing size data.
            ArrayList<Double> sizes = materialNameWithSizes.get(name);
            // Iterate objects.
            EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects();
            for (IfcRoot ifcRoot : ifcRoots) {
                Double size = 0.0;
                if (ifcRoot instanceof IfcObjectDefinition) {
                    IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot;
                    if (ifcObjectDefinition instanceof IfcObject) {
                        IfcObject ifcObject = (IfcObject) ifcObjectDefinition;
                        if (ifcObject instanceof IfcProduct) {
                            IfcProduct ifcProduct = (IfcProduct) ifcObject;
                            Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct);
                            size = volume;
                        }//from  w  w w. ja  v  a 2 s.  com
                    }
                }
                if (size != null && size > 0)
                    sizes.add(size);
            }
        }
    }
    //
    subChartCount.setValue(materialNameWithSizes.size());
    //
    ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>();
    //
    for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) {
        String name = entry.getKey();
        // Get existing size data.
        ArrayList<Double> sizes = materialNameWithSizes.get(name);
        // Sort, value ascending.
        Collections.sort(sizes, sortSmallerValuesToFront);
        sizes.add(0, 0.0);
        if (sizes.size() == 1)
            sizes.add(0, 0.0);
        // Count including empty first entry.
        double count = Math.max(1, sizes.size() - 1);
        double step = 10000.0 / count;
        double runningSize = 0.0;
        // Add sum of zero at entry zero.
        int i = 0;
        // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X).
        for (Double size : sizes) {
            double someMeasurement = (size != null) ? size : 0.0;
            runningSize += someMeasurement;
            // Prepare to store this raw data entry.
            LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>();
            // Name the group.
            dataEntry.put(leafColumnName, name);
            dataEntry.put("date", i * step);
            dataEntry.put("size", runningSize);
            // Push the entry into the data pool.
            rawData.add(dataEntry);
            //
            i += 1;
        }
    }
    // Send it all back.
    return rawData;
}

From source file:org.rapidcontext.app.web.WebDavRequest.java

/**
 * Adds a resource to the result with the specified dates and size.
 *
 * @param href           the root-relative resource link
 * @param created        the resource creation date
 * @param modified       the resource modification date
 * @param size           the resource size (in bytes)
 *//* w  w w .j a v  a  2 s .  c  o  m*/
public void addResource(String href, Date created, Date modified, long size) {
    LinkedHashMap props = new LinkedHashMap();
    String name;
    String str;

    props.putAll(properties);
    name = StringUtils.removeEnd(href, "/");
    name = StringUtils.substringAfterLast(href, "/");
    if (props.containsKey(PROP_DISPLAY_NAME)) {
        props.put(PROP_DISPLAY_NAME, name);
    }
    if (props.containsKey(PROP_CREATION_DATE)) {
        str = CREATION_DATE_FORMAT.format(created);
        props.put(PROP_CREATION_DATE, str);
    }
    if (props.containsKey(PROP_LAST_MODIFIED)) {
        str = LAST_MODIFIED_DATE_FORMAT.format(modified);
        props.put(PROP_LAST_MODIFIED, str);
    }
    if (props.containsKey(PROP_CONTENT_TYPE)) {
        props.put(PROP_CONTENT_TYPE, href.endsWith("/") ? null : Mime.type(name));
    }
    if (href.endsWith("/")) {
        if (props.containsKey(PROP_RESOURCE_TYPE)) {
            props.put(PROP_RESOURCE_TYPE, "<D:collection/>");
        }
        if (props.containsKey(PROP_CONTENT_LENGTH)) {
            props.put(PROP_CONTENT_LENGTH, "0");
        }
        if (props.containsKey(PROP_ETAG)) {
            props.put(PROP_ETAG, null);
        }
    } else {
        if (props.containsKey(PROP_CONTENT_LENGTH)) {
            props.put(PROP_CONTENT_LENGTH, String.valueOf(size));
        }
        if (props.containsKey(PROP_ETAG)) {
            str = "W/\"" + size + "-" + modified.getTime() + "\"";
            props.put(PROP_ETAG, str);
        }
    }
    // Fake quota properties to enable read-write access
    if (props.containsKey(PROP_QUOTA_USED_BYTES)) {
        props.put(PROP_QUOTA_USED_BYTES, "0");
    }
    if (props.containsKey(PROP_QUOTA_AVAIL_BYTES)) {
        props.put(PROP_QUOTA_AVAIL_BYTES, "1000000000");
    }
    addResource(href, props);
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testContainsKey() {
    LinkedHashMap<String, Integer> hashMap = new LinkedHashMap<String, Integer>();
    checkEmptyLinkedHashMapAssumptions(hashMap);

    assertFalse(hashMap.containsKey(KEY_TEST_CONTAINS_KEY));
    hashMap.put(KEY_TEST_CONTAINS_KEY, VALUE_TEST_CONTAINS_KEY);
    assertTrue(hashMap.containsKey(KEY_TEST_CONTAINS_KEY));
    assertFalse(hashMap.containsKey(VALUE_TEST_CONTAINS_DOES_NOT_EXIST));

    assertFalse(hashMap.containsKey(null));
    hashMap.put(null, VALUE_TEST_CONTAINS_KEY);
    assertTrue(hashMap.containsKey(null));
}

From source file:com.gbcom.system.controller.SysInfoController.java

/**
 * ?/*w  w  w  .  ja v  a  2s.co m*/
 * 
 * @param request
 *            HttpServletRequest
 * @param uploadFile
 *            CommonsMultipartFile
 * @param response
 *            HttpServletResponse
 */
@RequestMapping
public void recovery(HttpServletRequest request,
        @RequestParam(value = "upFile", required = true) CommonsMultipartFile uploadFile,
        HttpServletResponse response) {
    String fileName = uploadFile.getOriginalFilename();
    try {
        if (fileName == null || fileName.trim().equals("")) {
            sendFailureJSON(response, "????");
            return;
        }
        String fileIp = fileName.substring(fileName.indexOf("-") + 1, fileName.lastIndexOf("-"));
        LinkedHashMap<String, String> ipMap = CmUtil.getLocalAddress();
        if (!ipMap.containsKey(fileIp)) {
            sendFailureJSON(response, "????!");
            return;
        }
    } catch (Exception e1) {
        sendFailureJSON(response, "????!");
        return;
    }

    String realPath = request.getSession().getServletContext().getRealPath("/upload");
    boolean isSuccess = false;
    File file = new File(realPath, fileName);
    try {
        FileUtils.copyInputStreamToFile(uploadFile.getInputStream(), file);
        String filePath = realPath + File.separator + uploadFile.getOriginalFilename();
        // ?
        final SqlImportManager sqlService = new SqlImportManager();
        isSuccess = sqlService.importSql(filePath);
    } catch (Exception e) {
        super.processException(response, e);
    } finally {
        try {
            file.delete();
        } catch (Exception e) {

        }
    }
    if (!isSuccess) {
        sendFailureJSON(response, "?");
    } else {
        sendSuccessJSON(response, "??");
    }
}

From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. If the
 * factor is continuous, there is just one chunk.
 *
 * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly.
 *///from www  . j a  v  a  2  s  .  c o m
private static LinkedHashMap<FactorValue, List<BioMaterial>> chunkOnFactor(ExperimentalFactor ef,
        List<BioMaterial> bms) {

    if (bms == null) {
        return null;
    }

    LinkedHashMap<FactorValue, List<BioMaterial>> chunks = new LinkedHashMap<>();

    /*
     * Get the factor values in the order we have things right now
     */
    for (BioMaterial bm : bms) {
        for (FactorValue fv : bm.getFactorValues()) {
            if (!ef.getFactorValues().contains(fv)) {
                continue;
            }
            if (chunks.keySet().contains(fv)) {
                continue;
            }
            chunks.put(fv, new ArrayList<BioMaterial>());
        }
    }

    /*
     * What if bm doesn't have a value for the factorvalue. Need a dummy value.
     */
    FactorValue dummy = FactorValue.Factory.newInstance(ef);
    dummy.setValue("");
    dummy.setId(-1L);
    chunks.put(dummy, new ArrayList<BioMaterial>());

    for (BioMaterial bm : bms) {
        boolean found = false;
        for (FactorValue fv : bm.getFactorValues()) {
            if (ef.getFactorValues().contains(fv)) {
                found = true;
                assert chunks.containsKey(fv);
                chunks.get(fv).add(bm);
            }
        }

        if (!found) {
            if (ExpressionDataMatrixColumnSort.log.isDebugEnabled())
                ExpressionDataMatrixColumnSort.log
                        .debug(bm + " has no value for factor=" + ef + "; using dummy value");
            chunks.get(dummy).add(bm);
        }

    }

    if (chunks.get(dummy).size() == 0) {
        if (ExpressionDataMatrixColumnSort.log.isDebugEnabled())
            ExpressionDataMatrixColumnSort.log.debug("removing dummy");
        chunks.remove(dummy);
    }

    ExpressionDataMatrixColumnSort.log
            .debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size());

    /*
     * Sanity check
     */
    int total = 0;
    for (FactorValue fv : chunks.keySet()) {
        List<BioMaterial> chunk = chunks.get(fv);
        total += chunk.size();
    }

    assert total == bms.size() : "expected " + bms.size() + ", got " + total;

    return chunks;
}

From source file:de.ipk_gatersleben.ag_pbi.mmd.visualisations.gradient.GradientDataChartComponent.java

private IntervalXYDataset createDataSet(SubstanceInterface xmldata, ChartOptions co) {

    YIntervalSeriesCollection dataset = new YIntervalSeriesCollection();

    LinkedHashMap<String, ArrayList<NumericMeasurementInterface>> name2measurement = new LinkedHashMap<String, ArrayList<NumericMeasurementInterface>>();

    for (NumericMeasurementInterface m : Substance3D.getAllFiles(new Experiment(xmldata))) {
        SampleInterface s = m.getParentSample();
        String name = s.getParentCondition().getExpAndConditionName() + ", " + ((Sample3D) s).getName();
        if (!name2measurement.containsKey(name))
            name2measurement.put(name, new ArrayList<NumericMeasurementInterface>());
        name2measurement.get(name).add(m);
        co.rangeAxis = (co.rangeAxis != null && co.rangeAxis.equals("[unit]")) ? m.getUnit() : co.rangeAxis;
        co.domainAxis = co.domainAxis != null && co.domainAxis.equals("[unit]")
                ? ((NumericMeasurement3D) m).getPositionUnit()
                : co.domainAxis;//from   ww w.ja  v a 2 s .c  o  m
    }

    for (String name : name2measurement.keySet()) {
        YIntervalSeries gradientvalues = new YIntervalSeries(name);
        ArrayList<NumericMeasurementInterface> measurements = name2measurement.get(name);
        if (measurements != null && measurements.size() > 0) {
            // calculate on the fly the mean value by putting together
            // measurements with the same position but different replicateID
            HashMap<Double, ArrayList<NumericMeasurementInterface>> position2measurement = new HashMap<Double, ArrayList<NumericMeasurementInterface>>();

            for (NumericMeasurementInterface m : measurements) {
                Double position = ((NumericMeasurement3D) m).getPosition();
                if (position != null) {
                    if (!position2measurement.containsKey(position))
                        position2measurement.put(position, new ArrayList<NumericMeasurementInterface>());
                    position2measurement.get(position).add(m);
                }
            }
            for (Double pos : position2measurement.keySet()) {
                double sum = 0;
                int cnt = 0;
                for (NumericMeasurementInterface m : position2measurement.get(pos)) {
                    sum += m.getValue();
                    cnt++;
                }
                if (cnt != 0) {
                    double mean = (1d * sum) / (1d * cnt);
                    double stddev = 0d;
                    for (NumericMeasurementInterface m : position2measurement.get(pos))
                        stddev += Math.pow(m.getValue() - mean, 2);
                    stddev = Math.sqrt(stddev);
                    if (stddev < 0)
                        stddev = 0;
                    gradientvalues.add(pos * 1d, mean, mean - stddev, mean + stddev);
                }
            }

        }

        dataset.addSeries(gradientvalues);
    }

    return dataset;
}

From source file:org.cateproject.features.FeatureStoreSpringJDBC.java

/** {@inheritDoc} */
@Override/*from   ww  w.jav a2  s .  co m*/
public Map<String, Feature> readAll() {
    LinkedHashMap<String, Feature> mapFP = new LinkedHashMap<String, Feature>();
    List<Feature> lFp = getJdbcTemplate().query(SQLQUERY_ALLFEATURES, MAPPER);
    for (Feature flipPoint : lFp) {
        mapFP.put(flipPoint.getUid(), flipPoint);
    }
    // Populating Roles
    RoleRowMapper rrm = new RoleRowMapper();
    getJdbcTemplate().query(SQL_GET_ALLROLES, rrm);
    Map<String, Set<String>> roles = rrm.getRoles();
    for (String featId : roles.keySet()) {
        if (mapFP.containsKey(featId)) {
            mapFP.get(featId).getPermissions().addAll(roles.get(featId));
        }
    }
    return mapFP;
}