Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.sangupta.jerry.oauth.OAuthUtils.java

/**
 * Given a list of parameters (including the OAuth parameters) build the
 * unique parameter string that is used to generate the signable string.
 * /*from  ww  w. j av a2 s  .  co  m*/
 * @param params
 *            the request parameters if any
 * 
 * @param oauthParams
 *            the OAuth params
 * 
 * @return the parameters string to be used to generate the signable string
 */
public static String buildParamString(TreeMap<String, String> params, WebForm oauthParams) {
    StringBuilder builder = new StringBuilder(1024);

    // add all to the list of params
    for (NameValuePair pair : oauthParams.build()) {
        if (pair.getName().startsWith("oauth_")) {
            params.put(pair.getName(), pair.getValue());
        }
    }

    // build the string
    boolean first = true;
    for (String key : params.keySet()) {
        if (!first) {
            builder.append('&');
        } else {
            first = false;
        }

        builder.append(key);
        builder.append('=');
        builder.append(UriUtils.encodeURIComponent(params.get(key)));
    }

    return builder.toString();
}

From source file:org.rhwlab.ace3d.SegmentationLinePlot.java

public void setTree(BHCTree tree) {
    XYSeriesCollection collect = new XYSeriesCollection();
    XYSeries series = new XYSeries("");
    collect.addSeries(series);//from  w w w . j  ava  2 s  .c om

    TreeMap<Integer, TreeSet<NucleusLogNode>> map = tree.allTreeCuts(500);

    for (Integer i : map.keySet()) {
        TreeSet<NucleusLogNode> nodes = map.get(i);
        double lnP = nodes.first().getLogPosterior();
        series.add((double) i, Math.exp(lnP));

    }
    int t = tree.getTime();
    int nu = tree.getNu();

    JFreeChart chart = ChartFactory.createXYLineChart(
            String.format("Time=%d,nu=%d,alpha=%e", tree.getTime(), tree.getNu(), tree.getAlpha()), "Index",
            "Probability", collect, PlotOrientation.VERTICAL, false, true, true);
    XYPlot plot = (XYPlot) chart.getPlot();

    ChartPanel panel = new ChartPanel(chart);
    this.add(panel);
}

From source file:com.chatwingsdk.adapters.EmoticonsAdapter.java

public EmoticonsAdapter(FragmentManager fm, TreeMap<String, String> emoticonMap, int emoticonPerPage) {
    super(fm);// w w  w  .  j a va2s . c o m
    mEmoticonMap = emoticonMap;
    mEmoticons = new ArrayList<String>(emoticonMap.keySet());
    mEmoticonsPerPage = emoticonPerPage;
}

From source file:org.starfishrespect.myconsumption.server.business.repositories.repositoriesimpl.ValuesRepositoryImpl.java

@Override
public void insertOrUpdate(SensorDataset value) throws DaoException {
    Update update = new Update();
    Query existingQuery = new Query(new Criteria("timestamp").is(value.getTimestamp()));

    if (mongoOperation.exists(existingQuery, SensorDataset.class, collectionName)) {
        TreeMap<Integer, MinuteValues> minuteValues = value.getValues();
        for (Integer minuteTs : minuteValues.keySet()) {
            Query existingMinute = new Query(
                    new Criteria().andOperator(Criteria.where("timestamp").is(value.getTimestamp()),
                            Criteria.where("values." + minuteTs)));
            MinuteValues minute;/*from   w w  w .j  a v a  2s . com*/
            if (mongoOperation.exists(existingMinute, MinuteValues.class, collectionName)) {
                minute = mongoOperation.findOne(existingMinute, MinuteValues.class, collectionName);
                minute.merge(minuteValues.get(minuteTs));
            } else {
                minute = minuteValues.get(minuteTs);
            }
            update.set("values." + minuteTs, minute);
        }
        mongoOperation.updateFirst(existingQuery, update, collectionName);
    } else {
        mongoOperation.save(value, collectionName);
    }
}

From source file:com.eucalyptus.objectstorage.pipeline.handlers.ObjectStorageAuthenticationHandler.java

/**
 * Ensure that only one header for each name exists (i.e. not 2 Authorization headers)
 * Accomplish this by comma-delimited concatenating any duplicates found as per HTTP 1.1 RFC 2616 section 4.2
 * <p/>/*from w  w w  . jav  a 2  s. c o  m*/
 * TODO: Also, should convert all headers to lower-case for consistent processing later. This is okay since headers are case-insensitive.
 * <p/>
 * in HTTP
 *
 * @param httpRequest
 */
private static void canonicalizeHeaders(MappingHttpRequest httpRequest) {
    //Iterate through headers and find duplicates, concatenate their values together and remove from
    // request as we find them.
    TreeMap<String, String> headerMap = new TreeMap<String, String>();
    String value = null;

    //Construct a map of the normalized headers, cannot modify in-place since
    // conconcurrent-modify exception may result
    for (String header : httpRequest.getHeaderNames()) {
        //TODO: zhill, put in the map in lower-case form.
        headerMap.put(header, Joiner.on(',').join(httpRequest.getHeaders(header)));
    }

    //Remove *all* headers
    httpRequest.clearHeaders();

    //Add the normalized headers back into the request
    for (String foundHeader : headerMap.keySet()) {
        httpRequest.addHeader(foundHeader, headerMap.get(foundHeader).toString());
    }
}

From source file:org.unc.hive.services.rs.ConceptsResource.java

/**
 * Utility method to transform a map of String keys and QName values
 * into XML.//from  w ww. j a va  2s . c  om
 * 
 * @param    treeMap    the map of String keys and QName values
 * @return   xmlString  the XML string
 */
public static String conceptTreeMapToXML(TreeMap<String, QName> treeMap) {
    String xmlString = "";
    StringBuffer xmlStringBuffer = new StringBuffer(XML_DECLARATION);

    xmlStringBuffer.append("<concepts>\n");

    for (String s : treeMap.keySet()) {
        QName qName = treeMap.get(s);
        if (qName != null) {
            xmlStringBuffer.append("<concept>\n");
            xmlStringBuffer.append("  <prefLabel>" + s + "</prefLabel>\n");
            xmlStringBuffer.append("  <QName>\n");
            xmlStringBuffer.append("    <namespaceURI>" + qName.getNamespaceURI() + "</namespaceURI>\n");
            xmlStringBuffer.append("    <prefix>" + qName.getPrefix() + "</prefix>\n");
            xmlStringBuffer.append("    <localPart>" + qName.getLocalPart() + "</localPart>\n");
            xmlStringBuffer.append("    <string>" + qName.toString() + "</string>\n");
            xmlStringBuffer.append("  </QName>\n");
            xmlStringBuffer.append("</concept>\n");
        }
    }

    xmlStringBuffer.append("</concepts>");
    xmlString = xmlStringBuffer.toString();
    return xmlString;
}

From source file:net.triptech.buildulator.model.DataGrid.java

/**
 * Gets the row fields for the supplied index.
 *
 * @param rowNumber the row number/*from  w  w w . j  a  va 2 s . c  o  m*/
 * @return the row fields
 */
public List<String> getRowFields(final int rowNumber) {
    List<String> data = new ArrayList<String>();

    if (body.containsKey(rowNumber)) {
        TreeMap<Integer, String> row = body.get(rowNumber);
        for (int index : row.keySet()) {
            data.add(row.get(index));
        }
    }
    return data;
}

From source file:com.romeikat.datamessie.core.base.dao.impl.AbstractEntityWithIdAndVersionDaoTest.java

@Test
public void getIdsWithVersion_ids() {
    Collection<Long> ids = Lists.newArrayList();
    TreeMap<Long, Long> idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), ids);
    assertEquals(0, idsWithVersion.size());
    assertTrue(CollectionUtils.isEqualCollection(ids, idsWithVersion.keySet()));

    ids = Lists.newArrayList(1l, 2l, 3l);
    idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), ids);
    assertEquals(3, idsWithVersion.size());
    assertEquals(ids, Lists.newArrayList(idsWithVersion.keySet()));

    dbSetupTracker.skipNextLaunch();//from  w  ww. ja  v a2  s .c  o  m
}

From source file:au.org.ala.ecodata.IniReader.java

public void write(Map<String, String> doc, String filename) {
    try {//from  w  w w  . ja va 2  s  .c o  m
        PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(filename)));

        TreeMap<String, String> pmap = new TreeMap<String, String>(doc);
        Iterator<String> it = pmap.keySet().iterator();
        String currentSection = "";
        while (it.hasNext()) {
            String key = it.next();
            String[] sectionkey = key.split("\\\\");
            if (!currentSection.equals(sectionkey[0])) {
                currentSection = sectionkey[0];
                out.println("\n");
                out.println("[" + sectionkey[0] + "]");
            }
            out.println(sectionkey[1] + "=" + pmap.get(key));
        }
        out.close();

    } catch (Exception e) {
        logger.error("Unable to write ini to " + filename);
        e.printStackTrace(System.out);
    }
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function picks the best retention time among the best peaks from the standard wells. The algorithm is
 * looking for the following heuristics for standard well peak detection: a) a great peak profile
 * b) magnitude of peak is high c) the well is not from MeOH media. It implements this by picking the global
 * 3 best peaks from ALL the standard wells which are not in MeOH media using a peak feature detector. It then
 * compares overlaps between these peaks against the local 3 best peaks of the negative controls and positive samples.
 * If there is an overlap, we have detected a positive signal.
 * @param standardWells The list of standard wells to benchmark from
 * @param representativeMetlinIon This is the metlin ion that is used for the analysis, usually it is the best
 *                                metlin ion picked up an algorithm among the standard well scans.
 * @param positiveAndNegativeWells These are positive and negative wells against which the retention times are
 *                                 compared to see for overlaps.
 * @return A map of Scandata to XZ values for those signals where peaks match between the standard and pos/neg runs.
 *///ww w  .java 2 s  .com
public static Map<ScanData<LCMSWell>, XZ> pickBestRepresentativeRetentionTimeFromStandardWells(
        List<ScanData<StandardWell>> standardWells, String representativeMetlinIon,
        List<ScanData<LCMSWell>> positiveAndNegativeWells) {

    List<XZ> bestStandardPeaks = new ArrayList<>();
    for (ScanData<StandardWell> well : standardWells) {
        if (well.getWell() != null) {
            // For retention times, select standard runs where the media is not MeOH since
            // MeOH has a lot more skew in retention time than other media. Moreover, none
            // of the feeding runs have their media as MeOH.
            if (well.getWell().getMedia() == null || !well.getWell().getMedia().equals("MeOH")) {
                bestStandardPeaks.addAll(detectPeaksInIntensityTimeWaveform(
                        well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon),
                        PEAK_DETECTION_THRESHOLD));
            }
        }
    }

    // Sort in descending order of intensity
    Collections.sort(bestStandardPeaks, new Comparator<XZ>() {
        @Override
        public int compare(XZ o1, XZ o2) {
            return o2.getIntensity().compareTo(o1.getIntensity());
        }
    });

    Map<ScanData<LCMSWell>, XZ> result = new HashMap<>();

    // Select from the top peaks in the standards run
    for (ScanData<LCMSWell> well : positiveAndNegativeWells) {
        List<XZ> topPeaksOfSample = detectPeaksInIntensityTimeWaveform(
                well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon),
                PEAK_DETECTION_THRESHOLD);

        for (XZ topPeak : bestStandardPeaks.subList(0, NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1)) {
            int count = topPeaksOfSample.size() >= NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM
                    ? NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1
                    : topPeaksOfSample.size();

            // Collisions do not matter here since we are just going to pick the highest intensity peak match, so ties
            // are arbitarily broker based on the order for access in the for loop below.
            TreeMap<Double, XZ> intensityToIntensityTimeValue = new TreeMap<>(Collections.reverseOrder());

            for (int i = 0; i < count; i++) {
                if (topPeaksOfSample.get(i).getTime() > topPeak.getTime() - TIME_SKEW_CORRECTION
                        && topPeaksOfSample.get(i).getTime() < topPeak.getTime() + TIME_SKEW_CORRECTION) {
                    // There has been significant overlap in peaks between standard and sample.
                    intensityToIntensityTimeValue.put(topPeaksOfSample.get(i).getIntensity(),
                            topPeaksOfSample.get(i));
                }
            }

            if (intensityToIntensityTimeValue.keySet().size() > 0) {
                // Get the best peak overlap based on the largest magnitude intensity
                result.put(well, intensityToIntensityTimeValue.firstEntry().getValue());
            }
        }
    }

    return result;
}