Example usage for java.util LinkedHashMap put

List of usage examples for java.util LinkedHashMap put

Introduction

In this page you can find the example usage for java.util LinkedHashMap put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:edu.jhuapl.openessence.web.util.ControllerUtils.java

/**
 * Applies a limit to the map.  All values after limit will be summed and placed in the return map with key {@code
 * limitLabel}. Zero or NaN values do not count towards the limit.
 *
 * @param map The map to limit.  Assumed use case is that the map is already value sorted descending.
 * @return limited map./*www  . j a  va 2 s  .  c o m*/
 */
public static LinkedHashMap<String, Double> getLimitedMap(Map<String, Double> map, Integer limit,
        String limitLabel) {
    LinkedHashMap<String, Double> mapValueSort = new LinkedHashMap<String, Double>(limit);
    //add limit by default if the record size is greater than the limit.
    if (limit < map.size()) {
        mapValueSort.put(limitLabel, 0.0);
    }
    //combine any value after limit into a summed bucket
    int i = 0;
    for (String key : map.keySet()) {
        Double value = map.get(key);
        if (i < limit) {
            mapValueSort.put(key, value);
        } else {
            //we've hit the limit, now accumulate
            Double val = mapValueSort.get(limitLabel);
            if (value != null && !Double.isNaN(value)) {
                mapValueSort.put(limitLabel, val + value);
            }
        }
        //if it is not zero/null count it towards limit
        if (value != null && !Double.isNaN(value) && value > 0) {
            i++;
        }
    }
    return mapValueSort;
}

From source file:com.geewhiz.pacify.utils.ArchiveUtils.java

private static LinkedHashMap<PArchive, List<PArchive>> getParentArchives(List<PFile> replacePFiles) {
    LinkedHashMap<PArchive, List<PArchive>> parentArchives = new LinkedHashMap<PArchive, List<PArchive>>();
    // for performance get first all archives in an archive
    for (PFile pFile : replacePFiles) {
        if (!pFile.isArchiveFile()) {
            continue;
        }/* ww  w . j a  v a 2  s .  c o m*/
        PArchive pArchive = pFile.getPArchive();
        if (!pArchive.isArchiveFile()) {
            continue;
        }
        if (!parentArchives.containsKey(pArchive.getParentArchive())) {
            parentArchives.put(pArchive.getParentArchive(), new ArrayList<PArchive>());
        }
        List<PArchive> pArchivesToReplace = parentArchives.get(pArchive.getParentArchive());
        if (!pArchivesToReplace.contains(pArchive)) {
            pArchivesToReplace.add(pArchive);
        }
    }
    return parentArchives;
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function takes in a standard molecules's intensity vs time data and a collection of negative controls data
 * and plots the SNR value at each time period, assuming the time jitter effects are negligible (more info on this
 * is here: https://github.com/20n/act/issues/136). Based on the snr values, it rank orders the metlin ions of the
 * molecule./*from  w w w  . j  a  v  a  2s.co  m*/
 * @param ionToIntensityData A map of chemical to intensity/time data
 * @param standardChemical The chemical that is the standard of analysis
 * @return A sorted linked hash map of Metlin ion to (intensity, time) pairs from highest intensity to lowest
 */
public static LinkedHashMap<String, XZ> performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNR(
        ChemicalToMapOfMetlinIonsToIntensityTimeValues ionToIntensityData, String standardChemical,
        Map<String, List<Double>> restrictedTimeWindows) {

    TreeMap<Double, List<String>> sortedIntensityToIon = new TreeMap<>(Collections.reverseOrder());
    Map<String, XZ> ionToSNR = new HashMap<>();

    for (String ion : ionToIntensityData.getMetlinIonsOfChemical(standardChemical).keySet()) {

        // We first compress the ion spectra by 5 seconds (this number was gotten from trial and error on labelled
        // spectra). Then, we do feature detection of peaks in the compressed data.
        List<XZ> standardIntensityTime = detectPeaksInIntensityTimeWaveform(
                compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(standardChemical).get(ion),
                        COMPRESSION_CONSTANT).getLeft(),
                PEAK_DETECTION_THRESHOLD);

        List<List<XZ>> negativeIntensityTimes = new ArrayList<>();
        for (String chemical : ionToIntensityData.getIonList()) {
            if (!chemical.equals(standardChemical)) {
                negativeIntensityTimes.add(compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(chemical).get(ion), COMPRESSION_CONSTANT)
                                .getLeft());
            }
        }

        List<XZ> rmsOfNegativeValues = rmsOfIntensityTimeGraphs(negativeIntensityTimes);

        List<Double> listOfTimeWindows = new ArrayList<>();
        if (restrictedTimeWindows != null && restrictedTimeWindows.get(ion) != null) {
            listOfTimeWindows.addAll(restrictedTimeWindows.get(ion));
        }

        Boolean canUpdateMaxSNRAndTime = true;
        Boolean useRestrictedTimeWindowAnalysis = false;

        // If there are restricted time windows, set the default to not update SNR until certain conditions are met.
        if (listOfTimeWindows.size() > 0) {
            useRestrictedTimeWindowAnalysis = true;
            canUpdateMaxSNRAndTime = false;
        }

        Double maxSNR = 0.0;
        Double maxTime = 0.0;

        // For each of the peaks detected in the positive control, find the spectral intensity values from the negative
        // controls and calculate SNR based on that.
        for (XZ positivePosition : standardIntensityTime) {

            Double time = positivePosition.getTime();

            XZ negativeControlPosition = null;
            for (XZ position : rmsOfNegativeValues) {
                if (position.getTime() > time - POSITION_TIME_WINDOW_IN_SECONDS
                        && position.getTime() < time + POSITION_TIME_WINDOW_IN_SECONDS) {
                    negativeControlPosition = position;
                    break;
                }
            }

            Double snr = Math.pow(positivePosition.getIntensity() / negativeControlPosition.getIntensity(), 2);

            // If the given time point overlaps with one of the restricted time windows, we can update the snr calculations.
            for (Double restrictedTimeWindow : listOfTimeWindows) {
                if ((time > restrictedTimeWindow - RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)
                        && (time < restrictedTimeWindow + RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)) {
                    canUpdateMaxSNRAndTime = true;
                    break;
                }
            }

            if (canUpdateMaxSNRAndTime) {
                maxSNR = Math.max(maxSNR, snr);
                maxTime = Math.max(maxTime, time);
            }

            if (useRestrictedTimeWindowAnalysis) {
                canUpdateMaxSNRAndTime = false;
            }
        }

        ionToSNR.put(ion, new XZ(maxTime, maxSNR));

        List<String> ionValues = sortedIntensityToIon.get(maxSNR);
        if (ionValues == null) {
            ionValues = new ArrayList<>();
            sortedIntensityToIon.put(maxSNR, ionValues);
        }

        ionValues.add(ion);
    }

    LinkedHashMap<String, XZ> result = new LinkedHashMap<>(sortedIntensityToIon.size());
    for (Map.Entry<Double, List<String>> entry : sortedIntensityToIon.entrySet()) {
        List<String> ions = entry.getValue();
        for (String ion : ions) {
            result.put(ion, ionToSNR.get(ion));
        }
    }

    return result;
}

From source file:com.frostwire.search.youtube.jd.Request.java

/**
 * Gibt eine Hashmap mit allen key:value pairs im query zurck
 * //from w ww . ja v  a2  s.  co  m
 * @param query
 *            kann ein reines query ein (&key=value) oder eine url mit query
 * @return
 * @throws MalformedURLException
 */

public static LinkedHashMap<String, String> parseQuery(String query) throws MalformedURLException {
    if (query == null) {
        return null;
    }
    final LinkedHashMap<String, String> ret = new LinkedHashMap<String, String>();
    if (query.toLowerCase().trim().startsWith("http")) {
        query = new URL(query).getQuery();
    }

    if (query == null) {
        return ret;
    }
    final String[][] split = new Regex(query.trim(), "&?(.*?)=(.*?)($|&(?=.*?=.+))").getMatches();
    if (split != null) {
        for (String[] aSplit : split) {
            ret.put(aSplit[0], aSplit[1]);
        }
    }
    return ret;
}

From source file:Main.java

public static LinkedHashMap<String, String> convertBeans(Object bean) {
    if (bean == null)
        return null;
    try {//from   w  w w . j a  v a 2s.  c om
        LinkedHashMap<String, String> returnMap = new LinkedHashMap<String, String>();

        Class<? extends Object> clazz = bean.getClass();
        List<Field> fleids = new ArrayList<Field>();
        for (Class<?> c = clazz; c != Object.class; c = c.getSuperclass()) {
            fleids.addAll(Arrays.asList(c.getDeclaredFields()));
        }

        for (Field field : fleids) {
            String value = "";
            field.setAccessible(true);
            try {
                Object result = field.get(bean);
                if (result == null)
                    continue;
                value = result.toString();
            } catch (IllegalAccessException e) {
                e.printStackTrace();
            }
            //            MLogUtil.e("field.getName() "+field.getName());
            //            MLogUtil.e("value "+value);
            returnMap.put(field.getName(), value);
            field.setAccessible(false);
        }
        return returnMap;
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }
}

From source file:AndroidUninstallStock.java

public static LinkedHashMap<String, String> getLibFromPatternInclude(String adb, LinkedList<String> liblist,
        LinkedHashMap<String, String> apklist, LinkedList<AusInfo> section, String sectionname,
        boolean scanlibs) throws IOException {
    LinkedHashMap<String, String> libinclude = new LinkedHashMap<String, String>();
    if (scanlibs) {
        System.out.println();//from  www  .  j a va 2 s .c om
        System.out.println("Include libraries from packages (" + sectionname + "):");
        libinclude = getLibFromPackage(adb, liblist, apklist);
    }

    System.out.println();
    System.out.println("Include libraries from section (" + sectionname + "):");
    LinkedHashMap<String, String> maplist = new LinkedHashMap<String, String>();
    for (String path : liblist) {
        maplist.put(path, "");
    }
    for (AusInfo info : section) {
        System.out.println("* " + info.apk.get("name"));
        LinkedHashMap<String, String> inc = new LinkedHashMap<String, String>();
        for (HashMap<String, String> pattern : info.include) {
            inc.putAll(_getListFromPattern(maplist, pattern, info, "include: ", true));
        }
        for (HashMap<String, String> pattern : info.exclude) {
            if (getBoolean(pattern.get("global"))) {
                continue;
            }
            for (Map.Entry<String, String> exc : _getListFromPattern(inc, pattern, info, "exclude: ", true)
                    .entrySet()) {
                inc.remove(exc.getKey());
            }
        }
        libinclude.putAll(inc);
    }

    return libinclude;
}

From source file:AndroidUninstallStock.java

public static LinkedHashMap<String, String> sortByValues(Map<String, String> unsortMap) {
    // from http://www.mkyong.com/java/how-to-sort-a-map-in-java/
    LinkedList<Map.Entry<String, String>> list = new LinkedList<Map.Entry<String, String>>(
            unsortMap.entrySet());//from ww w.j a v a  2  s  .  c om

    Collections.sort(list, new Comparator<Map.Entry<String, String>>() {
        @Override
        public int compare(Map.Entry<String, String> o1, Map.Entry<String, String> o2) {
            return ((Comparable<String>) o1.getValue()).compareTo(o2.getValue());
        }
    });

    LinkedHashMap<String, String> sortedMap = new LinkedHashMap<String, String>();
    for (Map.Entry<String, String> entry : list) {
        sortedMap.put(entry.getKey(), entry.getValue());
    }
    return sortedMap;
}

From source file:net.openid.appauth.JsonUtil.java

@NonNull
public static Map<String, String> getStringMap(JSONObject json, String field) throws JSONException {
    LinkedHashMap<String, String> stringMap = new LinkedHashMap<>();
    checkNotNull(json, "json must not be null");
    checkNotNull(field, "field must not be null");
    if (!json.has(field)) {
        return stringMap;
    }//from w  ww. ja  v a 2s .com

    JSONObject mapJson = json.getJSONObject(field);
    Iterator<String> mapKeys = mapJson.keys();
    while (mapKeys.hasNext()) {
        String key = mapKeys.next();
        String value = checkNotNull(mapJson.getString(key), "additional parameter values must not be null");
        stringMap.put(key, value);
    }
    return stringMap;
}

From source file:net.sf.maltcms.chromaui.project.spi.DBProjectFactory.java

private static void initGroups(LinkedHashSet<String> groups, Map<File, String> fileToGroup,
        LinkedHashMap<String, Set<File>> groupToFile) {
    groups.addAll(fileToGroup.values());
    for (String group : groups) {
        for (File key : fileToGroup.keySet()) {
            if (fileToGroup.get(key).equals(group)) {
                if (groupToFile.containsKey(group)) {
                    Set<File> s = groupToFile.get(group);
                    s.add(key);/*  ww w  .j  a v a2s. c  om*/
                } else {
                    Set<File> s = new LinkedHashSet<>();
                    s.add(key);
                    groupToFile.put(group, s);
                }

            }
        }
    }
}

From source file:edu.jhuapl.openessence.web.util.ControllerUtils.java

public static LinkedHashMap<String, ChartData> getLimitedChartDataMap(Map<String, ChartData> map, Integer limit,
        String limitLabel) {//from   w w  w  .ja v a 2  s. c  om
    LinkedHashMap<String, ChartData> mapValueSort = new LinkedHashMap<String, ChartData>(limit);
    //add limit by default if the record size is greater than the limit.
    ChartData lastItem = new ChartData(limitLabel, limitLabel, 0.0);

    if (limit < map.size()) {
        mapValueSort.put(limitLabel, lastItem);
    }
    //combine any value after limit into a summed bucket
    int i = 0;
    for (String key : map.keySet()) {
        ChartData value = map.get(key);
        if (i < limit) {
            mapValueSort.put(key, value);
        } else {
            //we've hit the limit, now accumulate
            Double val = lastItem.getCount();
            if (value != null && value.getCount() != null && !Double.isNaN(value.getCount())) {
                lastItem.setCount(val + value.getCount());
            }
        }
        //if it is not zero/null count it towards limit
        if (value != null && value.getCount() != null && !Double.isNaN(value.getCount())
                && value.getCount() > 0) {
            i++;
        }
    }
    //        if(limit < map.size()){
    //            mapValueSort.put("", lastItem);
    //        }
    return mapValueSort;
}