Example usage for java.util HashMap containsKey

List of usage examples for java.util HashMap containsKey

Introduction

In this page you can find the example usage for java.util HashMap containsKey.

Prototype

public boolean containsKey(Object key) 

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:MSUmpire.LCMSPeakStructure.LCMSPeakMS1.java

public void RemoveContaminantPeaks(float proportion) {
    Logger.getRootLogger().info("Removing peak clusters whose m/z appear more than " + proportion * 100
            + "% chromatography. No. of peak clusters : " + PeakClusters.size());
    float minmz = Float.MAX_VALUE;
    float maxmz = Float.MIN_VALUE;
    float minrt = Float.MAX_VALUE;
    float maxrt = Float.MIN_VALUE;
    for (PeakCluster peak : PeakClusters) {
        if (peak.TargetMz() > maxmz) {
            maxmz = peak.TargetMz();//  ww w  .ja v a2  s .c  o  m
        }
        if (peak.TargetMz() < minmz) {
            minmz = peak.TargetMz();
        }
        if (peak.endRT > maxrt) {
            maxrt = peak.endRT;
        }
        if (peak.startRT < minrt) {
            minrt = peak.startRT;
        }
    }
    HashMap<Integer, ArrayList<PeakCluster>> map = new HashMap<>();

    float[] MzBin = new float[(int) Math.ceil((maxmz - minmz) * 10) + 1];
    for (PeakCluster peak : PeakClusters) {
        int binkey = (int) Math.ceil((peak.TargetMz() - minmz) * 10);
        MzBin[binkey] += peak.endRT - peak.startRT;
        if (!map.containsKey(binkey)) {
            map.put(binkey, new ArrayList<PeakCluster>());
        }
        map.get(binkey).add(peak);
    }
    float threshold = proportion * (maxrt - minrt);
    for (int i = 0; i < MzBin.length; i++) {
        if (MzBin[i] > threshold) {
            for (PeakCluster peakCluster : map.get(i)) {
                PeakClusters.remove(peakCluster);
                //Logger.getRootLogger().debug("Removing the cluster m/z: "+ peakCluster.TargetMz()+", StartRT: "+ peakCluster.startRT +", EndRT: "+peakCluster.endRT);
            }
        }
    }
    Logger.getRootLogger().info("Remaining peak clusters : " + PeakClusters.size());
}

From source file:com.searchcode.app.jobs.repository.IndexGitRepoJob.java

/**
 * Uses the inbuilt git//from w w w. j a v  a2  s  .c  o m
 * TODO this method appears to leak memory like crazy... need to investigate
 * TODO lots of hairy bits in here need tests to capture issues
 */
public List<CodeOwner> getBlameInfo(int codeLinesSize, String repoName, String repoLocations, String fileName) {
    List<CodeOwner> codeOwners = new ArrayList<>(codeLinesSize);
    try {
        // The / part is required due to centos bug for version 1.1.1
        // This appears to be correct
        String repoLoc = repoLocations + "/" + repoName + "/.git";

        Repository localRepository = new FileRepository(new File(repoLoc));
        BlameCommand blamer = new BlameCommand(localRepository);

        ObjectId commitID = localRepository.resolve("HEAD");

        if (commitID == null) {
            Singleton.getLogger().info("getBlameInfo commitID is null for " + repoLoc + " " + fileName);
            return codeOwners;
        }

        BlameResult blame;

        // Somewhere in here appears to be wrong...
        blamer.setStartCommit(commitID);
        blamer.setFilePath(fileName);
        blame = blamer.call();

        // Hail mary attempt to solve issue on CentOS Attempt to set at all costs
        if (blame == null) { // This one appears to solve the issue so don't remove it
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath(String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }
        if (blame == null) {
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath("/" + String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }

        if (blame == null) {
            Singleton.getLogger().info("getBlameInfo blame is null for " + repoLoc + " " + fileName);
        }

        if (blame != null) {
            // Get all the owners their number of commits and most recent commit
            HashMap<String, CodeOwner> owners = new HashMap<>();
            RevCommit commit;
            PersonIdent authorIdent;

            try {
                for (int i = 0; i < codeLinesSize; i++) {
                    commit = blame.getSourceCommit(i);
                    authorIdent = commit.getAuthorIdent();

                    if (owners.containsKey(authorIdent.getName())) {
                        CodeOwner codeOwner = owners.get(authorIdent.getName());
                        codeOwner.incrementLines();

                        int timestamp = codeOwner.getMostRecentUnixCommitTimestamp();

                        if (commit.getCommitTime() > timestamp) {
                            codeOwner.setMostRecentUnixCommitTimestamp(commit.getCommitTime());
                        }
                        owners.put(authorIdent.getName(), codeOwner);
                    } else {
                        owners.put(authorIdent.getName(),
                                new CodeOwner(authorIdent.getName(), 1, commit.getCommitTime()));
                    }
                }
            } catch (IndexOutOfBoundsException ex) {
                // Ignore this as its not really a problem or is it?
                Singleton.getLogger().info(
                        "IndexOutOfBoundsException when trying to get blame for " + repoName + " " + fileName);
            }

            codeOwners = new ArrayList<>(owners.values());
        }

    } catch (IOException ex) {
        Singleton.getLogger().info("IOException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (GitAPIException ex) {
        Singleton.getLogger().info("GitAPIException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (IllegalArgumentException ex) {
        Singleton.getLogger().info("IllegalArgumentException getBlameInfo when trying to get blame for "
                + repoName + " " + fileName + " " + ex.toString());
    }

    System.gc(); // Try to clean up
    return codeOwners;
}

From source file:com.mycsense.carbondb.domain.DimensionSet.java

public UnionResult union(DimensionSet dimSet) {
    UnionResult r = new UnionResult();

    r.dimSet = new DimensionSet();
    r.alpha = 0;/*from  www.  j a v a2  s.  c o m*/
    r.commonKeywords = new Dimension();

    HashMap<Keyword, Dimension> hashTableRhs = new HashMap<>();
    DimensionSet unusedDimsInRhs = new DimensionSet();

    for (Dimension dimension : dimSet.dimensions) {
        unusedDimsInRhs.add(dimension);
        for (Keyword keyword : dimension.keywords) {
            hashTableRhs.put(keyword, dimension);
        }
    }
    for (Dimension dimension : dimensions) {
        Dimension dimResultTemp = new Dimension();
        for (Keyword keyword : dimension.keywords) {
            if (hashTableRhs.containsKey(keyword)) {
                unusedDimsInRhs.remove(hashTableRhs.get(keyword));
                dimResultTemp.addKeyword(keyword);
                r.commonKeywords.addKeyword(keyword);
            }
        }
        if (dimResultTemp.isEmpty()) {
            r.dimSet.add(dimension);
        } else {
            r.dimSet.add(dimResultTemp);
            r.alpha++;
        }
    }
    for (Dimension dimension : unusedDimsInRhs.dimensions) {
        r.dimSet.add(dimension);
    }

    return r;
}

From source file:it.intecs.pisa.openCatalogue.solr.SolrHandler.java

private String prepareUrl(HashMap<String, String> request) throws UnsupportedEncodingException, Exception {
    String[] params = request.keySet().toArray(new String[0]);
    String fq = "";
    String q = this.solrHost + "/select?q=*%3A*&wt=xml&indent=true";

    if (request.containsKey("q") && (request.get("q").equals("*.*") == false)) {
        String newQ = request.get("q");
        if (null == newQ || newQ.isEmpty())
            newQ = "*:*";
        q = this.solrHost + "/select?q=" + URLDecoder.decode(newQ, "ISO-8859-1") + "&wt=xml&indent=true";
    }/*from   w  w  w  . jav  a  2  s.  co m*/

    String lat = null;
    String lon = null;
    String radius = null;

    for (String name : params) {
        String value = request.get(name);

        if (value != null && value.equals("") == false) {
            if (name.equals("count")) {
                q += "&rows=" + value;
            } else if (name.equals("startPage")) {
            } else if (name.equals("startIndex")) {
                q += "&start=" + (Integer.parseInt(value) - 1);
            } else if (name.equals("uid")) {
            } else if (name.equals("bbox")) {
                String[] values = value.split(",");
                if (values.length != 4) {
                    throw new Exception();
                }
                value = "[" + values[1] + "," + values[0] + " " + values[3] + "," + values[2] + "]";
                Log.debug("BBOX " + value);
                fq += " AND posList:" + URLDecoder.decode(value, "ISO-8859-1");
            } else if (name.equals("geom")) {
                fq += " AND posList :\"Intersects(" + (URLDecoder.decode(value, "ISO-8859-1")) + ")\"";
            } else if (name.equals("id")) {
                fq += " AND id:\"" + URLDecoder.decode(value, "ISO-8859-1") + "\"";
            } else if (name.equals("lat")) {
                lat = URLDecoder.decode(value, "ISO-8859-1");
            } else if (name.equals("lon")) {
                lon = URLDecoder.decode(value, "ISO-8859-1");
            } else if (name.equals("radius")) {
                radius = URLDecoder.decode(value, "ISO-8859-1");
            } else if (name.equals("startdate")) {
                //value = value.endsWith("Z") ? value : value.indexOf("T") == -1 ? value : value + "Z";
                fq += " AND beginPosition:[" + URLDecoder.decode(getDate(value), "ISO-8859-1") + " TO *]";
            } else if (name.equals("stopdate")) {
                //                    value = value.endsWith("Z") ? value : value.indexOf("T") == -1 ? value : value + "Z";
                fq += " AND endPosition:[* TO " + URLDecoder.decode(getDate(value), "ISO-8859-1") + "]";
            } else if (name.equals("q") || name.equals("recordSchema")) {
            }
            //Table 3 - OpenSearch Parameters for Collection Search
            else {
                fq += parse(name, value);
            }
        }
    }

    if ((lat != null) && (lon != null) && (radius != null)) {
        fq += " AND posList :\"Intersects(Circle(" + lon + "," + lat + " d=" + radius + "))\"";
    }

    String url = q;
    if (fq.length() > 1) {
        url += "&fq=" + URLEncoder.encode(fq.substring(5), "ISO-8859-1");
    }
    return url;
}

From source file:de.hbz.lobid.helper.CompareJsonMaps.java

/**
 * Construct a map with json paths as keys with aggregated values form json
 * nodes.//  w  w w. j  a v a2  s .  c o m
 * 
 * @param jnode the JsonNode which should be transformed into a map
 * @param map the map constructed out of the JsonNode
 */
public void extractFlatMapFromJsonNode(final JsonNode jnode, final HashMap<String, String> map) {
    if (jnode.getNodeType().equals(JsonNodeType.OBJECT)) {
        final Iterator<Map.Entry<String, JsonNode>> it = jnode.fields();
        while (it.hasNext()) {
            final Map.Entry<String, JsonNode> entry = it.next();
            stack.push(entry.getKey());
            extractFlatMapFromJsonNode(entry.getValue(), map);
            stack.pop();
        }
    } else if (jnode.isArray()) {
        final Iterator<JsonNode> it = jnode.iterator();
        while (it.hasNext()) {
            extractFlatMapFromJsonNode(it.next(), map);
        }
    } else if (jnode.isValueNode()) {
        String value = jnode.toString();
        if (map.containsKey(stack.toString()))
            value = map.get(stack.toString()).concat("," + jnode.toString());
        map.put(stack.toString(), value);
        CompareJsonMaps.logger.trace("Stored this path as key into map:" + stack.toString(), value);
    }
}

From source file:de.ipk_gatersleben.ag_pbi.mmd.visualisations.gradient.GradientDataChartComponent.java

private IntervalXYDataset createDataSet(SubstanceInterface xmldata, ChartOptions co) {

    YIntervalSeriesCollection dataset = new YIntervalSeriesCollection();

    LinkedHashMap<String, ArrayList<NumericMeasurementInterface>> name2measurement = new LinkedHashMap<String, ArrayList<NumericMeasurementInterface>>();

    for (NumericMeasurementInterface m : Substance3D.getAllFiles(new Experiment(xmldata))) {
        SampleInterface s = m.getParentSample();
        String name = s.getParentCondition().getExpAndConditionName() + ", " + ((Sample3D) s).getName();
        if (!name2measurement.containsKey(name))
            name2measurement.put(name, new ArrayList<NumericMeasurementInterface>());
        name2measurement.get(name).add(m);
        co.rangeAxis = (co.rangeAxis != null && co.rangeAxis.equals("[unit]")) ? m.getUnit() : co.rangeAxis;
        co.domainAxis = co.domainAxis != null && co.domainAxis.equals("[unit]")
                ? ((NumericMeasurement3D) m).getPositionUnit()
                : co.domainAxis;/*from   w  w  w. j  a  v  a  2  s. c om*/
    }

    for (String name : name2measurement.keySet()) {
        YIntervalSeries gradientvalues = new YIntervalSeries(name);
        ArrayList<NumericMeasurementInterface> measurements = name2measurement.get(name);
        if (measurements != null && measurements.size() > 0) {
            // calculate on the fly the mean value by putting together
            // measurements with the same position but different replicateID
            HashMap<Double, ArrayList<NumericMeasurementInterface>> position2measurement = new HashMap<Double, ArrayList<NumericMeasurementInterface>>();

            for (NumericMeasurementInterface m : measurements) {
                Double position = ((NumericMeasurement3D) m).getPosition();
                if (position != null) {
                    if (!position2measurement.containsKey(position))
                        position2measurement.put(position, new ArrayList<NumericMeasurementInterface>());
                    position2measurement.get(position).add(m);
                }
            }
            for (Double pos : position2measurement.keySet()) {
                double sum = 0;
                int cnt = 0;
                for (NumericMeasurementInterface m : position2measurement.get(pos)) {
                    sum += m.getValue();
                    cnt++;
                }
                if (cnt != 0) {
                    double mean = (1d * sum) / (1d * cnt);
                    double stddev = 0d;
                    for (NumericMeasurementInterface m : position2measurement.get(pos))
                        stddev += Math.pow(m.getValue() - mean, 2);
                    stddev = Math.sqrt(stddev);
                    if (stddev < 0)
                        stddev = 0;
                    gradientvalues.add(pos * 1d, mean, mean - stddev, mean + stddev);
                }
            }

        }

        dataset.addSeries(gradientvalues);
    }

    return dataset;
}

From source file:ANNFileDetect.detectFile.java

private void DetectionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_DetectionActionPerformed
    ResField.setText("");
    EncogTestClass ec = new EncogTestClass();
    String[] nets = sql.GetNetworkNames();
    HashMap resulthm = new HashMap();
    scores = new ArrayList();
    for (String net : nets) {
        ec.removeStdDirs();/* w  ww  . ja v  a 2 s.c o  m*/
        String netfile = sql.GetNetworkFile(net);
        String FPnetfile = sql.GetNetworkFPFile(net);
        ec.runNet(FileBox.getText(), nndir + "/" + netfile, false, 0);
        double out = ec.testSingleFPfile(nndir + "/" + FPnetfile);
        HashMap hm = sql.GetValuescore(net, out);
        resulthm.put(net, hm);
        scores.add("Net: " + net + " Score: " + out);
    }
    //make an inventory of all possible files
    Iterator it = resulthm.entrySet().iterator();
    HashMap inventory = new HashMap();
    while (it.hasNext()) {
        Map.Entry entries = (Map.Entry) it.next();
        Iterator itb = ((HashMap) entries.getValue()).entrySet().iterator();
        while (itb.hasNext()) {
            Map.Entry pair = (Map.Entry) itb.next();
            String file = (String) pair.getKey();
            String[] tmpvw = ((String) pair.getValue()).split(",");
            double score = Double.parseDouble(tmpvw[0]);
            double weight = Double.parseDouble(tmpvw[1]);

            if (inventory.containsKey(file)) {
                String caz = inventory.get(file).toString();
                double curscore = Double.parseDouble(caz);
                double out = 0.0;
                if (score > 1)
                    out = ((curscore + ((score * weight) / 100)) / 2);
                if (score == 0)
                    out = (curscore / 2);
                inventory.put(file, out);
            } else {
                inventory.put(file, (score * weight) / 100);
            }

        }
    }
    String file = sql.GetFinalResultsMetrics(inventory);
    if (file.length() > 0)
        ResField.setText("File is likely to be a " + file);
    else
        ResField.setText("No file detected");
    JFreeChart jf = new GraphingClass().chartOutcome(inventory);
    ChartPanel cp = new ChartPanel(jf);
    cp.setSize(new Dimension(GPanel.getWidth(), GPanel.getHeight()));
    cp.setVisible(true);
    GPanel.removeAll();
    GPanel.add(cp);
    GPanel.repaint();
    GPanel.setVisible(true);
    System.out.println();
    invt = resulthm;

}

From source file:org.hfoss.posit.android.web.Communicator.java

public String registerUser(String server, String firstname, String lastname, String email, String password,
        String check, String imei) {
    String url = server + "/api/registerUser";
    Log.i(TAG, "registerUser URL=" + url + "&imei=" + imei);
    HashMap<String, String> sendMap = new HashMap<String, String>();
    sendMap.put("email", email);
    sendMap.put("password1", password);
    sendMap.put("password2", check);
    sendMap.put("firstname", firstname);
    sendMap.put("lastname", lastname);
    try {//from  w ww .ja va 2 s  .co  m
        responseString = doHTTPPost(url, sendMap);
        Log.i(TAG, "registerUser Httpost responseString = " + responseString);
        if (responseString.contains("[ERROR]")) {
            Utils.showToast(mContext, Constants.AUTHN_FAILED + ":" + responseString);
            return Constants.AUTHN_FAILED + ":" + responseString;
        }
        ResponseParser parser = new ResponseParser(responseString);
        HashMap<String, Object> responseMap = parser.parseObject();
        if (responseMap.containsKey(ERROR_CODE))
            return responseMap.get(ERROR_CODE) + ":" + responseMap.get(ERROR_MESSAGE);
        else if (responseMap.containsKey(MESSAGE_CODE)) {
            if (responseMap.get(MESSAGE_CODE).equals(Constants.AUTHN_OK)) {
                return Constants.AUTHN_OK + ":" + responseMap.get(MESSAGE);
            }
        } else {
            return Constants.AUTHN_FAILED + ":" + "Malformed message from the server.";
        }
    } catch (Exception e) {
        Log.e(TAG, "registerUser " + e.getMessage() + " ");
        return Constants.AUTHN_FAILED + ":" + e.getMessage();
    }
    return null;
}

From source file:com.uber.hoodie.common.model.HoodieTableMetadata.java

/**
 * Takes a bunch of file versions, and returns a map keyed by fileId, with the necessary
 * version safety checking. Returns a map of commitTime and Sorted list of FileStats
 * ( by reverse commit time )/*www  .  j  a v  a 2s .  co  m*/
 *
 * @param maxCommitTime maximum permissible commit time
 *
 * @return
 */
private Map<String, List<FileStatus>> groupFilesByFileId(FileStatus[] files, String maxCommitTime)
        throws IOException {
    HashMap<String, List<FileStatus>> fileIdtoVersions = new HashMap<>();
    for (FileStatus file : files) {
        String filename = file.getPath().getName();
        String fileId = FSUtils.getFileId(filename);
        String commitTime = FSUtils.getCommitTime(filename);
        if (isCommitTsSafe(commitTime) && HoodieCommits.isCommit1BeforeOrOn(commitTime, maxCommitTime)) {
            if (!fileIdtoVersions.containsKey(fileId)) {
                fileIdtoVersions.put(fileId, new ArrayList<FileStatus>());
            }
            fileIdtoVersions.get(fileId).add(file);
        }
    }
    for (Map.Entry<String, List<FileStatus>> entry : fileIdtoVersions.entrySet()) {
        Collections.sort(fileIdtoVersions.get(entry.getKey()), new Comparator<FileStatus>() {
            @Override
            public int compare(FileStatus o1, FileStatus o2) {
                String o1CommitTime = FSUtils.getCommitTime(o1.getPath().getName());
                String o2CommitTime = FSUtils.getCommitTime(o2.getPath().getName());
                // Reverse the order
                return o2CommitTime.compareTo(o1CommitTime);
            }
        });
    }
    return fileIdtoVersions;
}

From source file:com.ibm.xsp.webdav.WebDavServlet.java

/**
 * @param req//  w ww .  ja v a  2 s . c o m
 *            The HTTP Request
 * @param repositoryName
 *            the repository to load
 * @return
 */
private IDAVRepository getRepository(HttpServletRequest req, String curPathFromServlet, String servletPath) {

    // The session where the Repository might be stored
    IDAVRepository result = null;
    DAVRepositoryMETA meta = null;
    String curRepositoryName = null;

    if (curPathFromServlet == null || curPathFromServlet.equals("/")) {
        curRepositoryName = "/"; // We make sure we have a legitimate value
    } else {
        // Find the name of the repository. First in the chain
        // [1] = second part since the path starts with / so [0] = ""
        // [1] = our value
        curRepositoryName = curPathFromServlet.split("/")[1];
    }

    // The HTTP Session to cache repositories
    @SuppressWarnings("unused")
    HttpSession hs = req.getSession();

    meta = this.getManager().getRepositoryMeta();

    if (curRepositoryName.equals("/")) {
        result = meta;
    } else {

        // TODO: Is that a good idea to save the session,
        // hs.get/setAttribute commented out for now
        HashMap<String, IDAVRepository> sessionRepositories = null;
        // Object rlObject = null;

        // Object rlObject = hs.getAttribute("repositoryList");

        // if (rlObject != null) {
        // sessionRepositories = (HashMap<String, IDAVRepository>) rlObject;
        // } else {
        sessionRepositories = new HashMap<String, IDAVRepository>();
        // hs.setAttribute("repositoryList", sessionRepositories);
        // }

        if (sessionRepositories.containsKey(curRepositoryName)) {
            result = sessionRepositories.get(curRepositoryName);
        } else {
            result = meta.loadRepository(curRepositoryName);
            // sessionRepositories.put(curRepositoryName, result);
        }

    }
    return result;

}