Example usage for java.util TreeMap put

List of usage examples for java.util TreeMap put

Introduction

In this page you can find the example usage for java.util TreeMap put.

Prototype

public V put(K key, V value) 

Source Link

Document

Associates the specified value with the specified key in this map.

Usage

From source file:eu.europa.esig.dss.DSSASN1Utils.java

/**
 * This method can be removed the simple IssuerSerial verification can be
 * performed. In fact the hash verification is sufficient.
 *
 * @param generalNames//  w  w w.ja  v a  2s .c  o  m
 * @return
 */
public static String getCanonicalizedName(final GeneralNames generalNames) {
    GeneralName[] names = generalNames.getNames();
    TreeMap<String, String> treeMap = new TreeMap<String, String>();
    for (GeneralName name : names) {
        String ldapString = String.valueOf(name.getName());
        LOG.debug("ldapString to canonicalize: {} ", ldapString);
        try {
            LdapName ldapName = new LdapName(ldapString);
            List<Rdn> rdns = ldapName.getRdns();
            for (final Rdn rdn : rdns) {
                treeMap.put(rdn.getType().toLowerCase(), String.valueOf(rdn.getValue()).toLowerCase());
            }
        } catch (InvalidNameException e) {
            throw new DSSException(e);
        }
    }
    StringBuilder stringBuilder = new StringBuilder();
    for (Entry<String, String> entry : treeMap.entrySet()) {
        stringBuilder.append(entry.getKey()).append('=').append(entry.getValue()).append('|');
    }
    final String canonicalizedName = stringBuilder.toString();
    LOG.debug("canonicalizedName: {} ", canonicalizedName);
    return canonicalizedName;
}

From source file:com.sangupta.jerry.oauth.OAuthUtils.java

/**
 * Extract all the query parameters from the URI.
 * //from www .ja v  a2 s  .co  m
 * @param uri
 *            the {@link URI} from which the params need to be extracted
 * 
 * @return a {@link TreeMap} containing all query parameters. Never returns
 *         a <code>null</code>
 * 
 * @throws NullPointerException
 *             if {@link URI} presented is <code>null</code>
 */
public static TreeMap<String, String> extractURIParameters(URI uri) {
    final TreeMap<String, String> params = new TreeMap<String, String>();

    String query = uri.getQuery();
    if (AssertUtils.isEmpty(query)) {
        return params;
    }

    String[] pairs = query.split("&");
    for (String pair : pairs) {
        String[] tokens = pair.split("=");
        params.put(tokens[0], tokens[1]);
    }

    return params;
}

From source file:it.polimi.diceH2020.plugin.control.FileManager.java

private static void setMapJobProfile(InstanceDataMultiProvider data, Configuration conf) {
    // Set MapJobProfile
    JobProfilesMap classdesc = JobProfilesMapGenerator.build();

    Map<String, Map<String, Map<String, JobProfile>>> classMap = new HashMap<>();

    for (ClassDesc c : conf.getClasses()) {
        Map<String, Map<String, JobProfile>> alternative = new HashMap<>();

        for (String alt : c.getAltDtsm().keySet()) {
            TreeMap<String, Double> profile = new TreeMap<>();
            String split[] = alt.split("-");

            JobProfile jp;//from  w w w.j a  va  2 s  .  c  om
            if (conf.getTechnology().equals("Hadoop Map-reduce") || conf.getTechnology().equals("Spark")) {
                jp = JobProfileGenerator.build(c.getAltDtsmHadoop().get(alt).keySet().size() - 1);

                for (String par : c.getAltDtsmHadoop().get(alt).keySet()) {
                    if (!par.equals("file")) {
                        profile.put(par, Double.parseDouble(c.getAltDtsmHadoop().get(alt).get(par)));
                    }
                }
            } else {
                jp = JobProfileGenerator.build(3); // TODO: how many
                // parameters do we
                // need?
                profile.put("datasize", 66.6);
                profile.put("mavg", 666.6);
                profile.put("mmax", 666.6);
            }

            jp.setProfileMap(profile);

            final String provider = Configuration.getCurrent().getIsPrivate() ? "inHouse" : split[0];
            final String vmType = Configuration.getCurrent().getIsPrivate() ? split[0] : split[1];

            Map<String, JobProfile> profilemap = new HashMap<>();
            profilemap.put(vmType, jp);

            alternative.merge(provider, profilemap, (oldValue, newValue) -> {
                oldValue.putAll(newValue);
                return oldValue;
            });
        }

        classMap.put(String.valueOf(c.getId()), alternative);
    }

    classdesc.setMapJobProfile(classMap);
    data.setMapJobProfiles(classdesc);
}

From source file:com.sun.faces.generate.GeneratorUtil.java

public static Map<String, ArrayList<RendererBean>> getComponentFamilyRendererMap(FacesConfigBean configBean,
        String renderKitId) {//ww w.  ja  v a 2  s.  c om

    RenderKitBean renderKit = configBean.getRenderKit(renderKitId);
    if (renderKit == null) {
        throw new IllegalArgumentException("No RenderKit for id '" + renderKitId + '\'');
    }

    RendererBean[] renderers = renderKit.getRenderers();
    if (renderers == null) {
        throw new IllegalStateException("No Renderers for RenderKit id" + '"' + renderKitId + '"');
    }

    TreeMap<String, ArrayList<RendererBean>> result = new TreeMap<String, ArrayList<RendererBean>>();

    for (int i = 0, len = renderers.length; i < len; i++) {
        RendererBean renderer = renderers[i];

        if (renderer == null) {
            throw new IllegalStateException("no Renderer");
        }

        // if this is the first time we've encountered this
        // componentFamily
        String componentFamily = renderer.getComponentFamily();
        ArrayList<RendererBean> list = result.get(componentFamily);
        if (list == null) {
            // create a list for it
            list = new ArrayList<RendererBean>();
            list.add(renderer);
            result.put(componentFamily, list);
        } else {
            list.add(renderer);
        }
    }

    return result;

}

From source file:com.sshtools.common.vomanagementtool.common.VOHelper.java

private static TreeMap readVomsesFile(File file) {
    TreeMap vosInfo = new TreeMap<String, List>();

    BufferedReader br;/*  ww  w  .java2  s .  c  o  m*/
    try {
        br = new BufferedReader(new FileReader(file));

        String line;
        //int counter=0;
        while ((line = br.readLine()) != null) {
            if (!line.trim().equals("")) {
                String[] info = line.split("\" \"");
                TreeMap temp = null;

                String voname = "";
                for (int i = 0; i < info.length; i++) {
                    if (i == 0) {
                        temp = new TreeMap<String, String>();
                        voname = info[i].substring(1);
                    } else if (i == 4) {
                        temp.put("servervoname", info[i].substring(0, info[i].length() - 1));
                        //Find if the same voname already exists
                        if (vosInfo.containsKey(voname)) {
                            List multiValue = (List) vosInfo.get(voname);
                            multiValue.add(temp);
                            vosInfo.put(voname, multiValue);
                        } else {
                            List singleValue = new ArrayList();
                            singleValue.add(temp);
                            vosInfo.put(voname, singleValue);
                        }
                    } else {
                        if (i == 1) {
                            temp.put("server", info[i]);
                        } else if (i == 2) {
                            temp.put("port", info[i]);
                        } else if (i == 3) {
                            temp.put("dn", info[i]);
                        }
                    }
                }
                //counter++;
            }
        }
        br.close();

    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }

    return vosInfo;
}

From source file:org.socialhistoryservices.pid.controllers.AccessConfirmationController.java

@RequestMapping("/oauth/confirm_access")
public ModelAndView getAccessConfirmation(UnconfirmedAuthorizationCodeClientToken clientAuth) throws Exception {
    ClientDetails client = clientDetailsService.loadClientByClientId(clientAuth.getClientId());
    TreeMap<String, Object> model = new TreeMap<String, Object>();
    model.put("auth_request", clientAuth);
    model.put("client", client);
    return new ModelAndView("access_confirmation", model);
}

From source file:com.rizki.mufrizal.belajar.spring.data.mongodb.service.impl.CategoryServiceImpl.java

@Override
public TreeMap<String, Object> getCategories(Pageable pageable) {
    Page<Category> categories = categoryRepository.findAll(pageable);

    List<Category> categorys = new ArrayList<>();

    for (Category category : categories) {
        category.setDepartment(departmentRepository.findOne(category.getDepartmentId()));
        categorys.add(category);/*from   w  w  w .  j a v a  2s  .  c  o m*/
    }

    TreeMap<String, Object> map = new TreeMap<>();
    map.put("content", categorys);
    map.put("last", categories.isLast());
    map.put("totalPages", categories.getTotalPages());
    map.put("totalElements", categories.getTotalElements());
    map.put("size", categories.getSize());
    map.put("number", categories.getNumber());
    map.put("sort", categories.getSort());
    map.put("first", categories.isFirst());
    map.put("numberOfElements", categories.getNumberOfElements());

    return map;
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function picks the best retention time among the best peaks from the standard wells. The algorithm is
 * looking for the following heuristics for standard well peak detection: a) a great peak profile
 * b) magnitude of peak is high c) the well is not from MeOH media. It implements this by picking the global
 * 3 best peaks from ALL the standard wells which are not in MeOH media using a peak feature detector. It then
 * compares overlaps between these peaks against the local 3 best peaks of the negative controls and positive samples.
 * If there is an overlap, we have detected a positive signal.
 * @param standardWells The list of standard wells to benchmark from
 * @param representativeMetlinIon This is the metlin ion that is used for the analysis, usually it is the best
 *                                metlin ion picked up an algorithm among the standard well scans.
 * @param positiveAndNegativeWells These are positive and negative wells against which the retention times are
 *                                 compared to see for overlaps.
 * @return A map of Scandata to XZ values for those signals where peaks match between the standard and pos/neg runs.
 *//*www  .  ja  v a2s  .c  om*/
public static Map<ScanData<LCMSWell>, XZ> pickBestRepresentativeRetentionTimeFromStandardWells(
        List<ScanData<StandardWell>> standardWells, String representativeMetlinIon,
        List<ScanData<LCMSWell>> positiveAndNegativeWells) {

    List<XZ> bestStandardPeaks = new ArrayList<>();
    for (ScanData<StandardWell> well : standardWells) {
        if (well.getWell() != null) {
            // For retention times, select standard runs where the media is not MeOH since
            // MeOH has a lot more skew in retention time than other media. Moreover, none
            // of the feeding runs have their media as MeOH.
            if (well.getWell().getMedia() == null || !well.getWell().getMedia().equals("MeOH")) {
                bestStandardPeaks.addAll(detectPeaksInIntensityTimeWaveform(
                        well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon),
                        PEAK_DETECTION_THRESHOLD));
            }
        }
    }

    // Sort in descending order of intensity
    Collections.sort(bestStandardPeaks, new Comparator<XZ>() {
        @Override
        public int compare(XZ o1, XZ o2) {
            return o2.getIntensity().compareTo(o1.getIntensity());
        }
    });

    Map<ScanData<LCMSWell>, XZ> result = new HashMap<>();

    // Select from the top peaks in the standards run
    for (ScanData<LCMSWell> well : positiveAndNegativeWells) {
        List<XZ> topPeaksOfSample = detectPeaksInIntensityTimeWaveform(
                well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon),
                PEAK_DETECTION_THRESHOLD);

        for (XZ topPeak : bestStandardPeaks.subList(0, NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1)) {
            int count = topPeaksOfSample.size() >= NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM
                    ? NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1
                    : topPeaksOfSample.size();

            // Collisions do not matter here since we are just going to pick the highest intensity peak match, so ties
            // are arbitarily broker based on the order for access in the for loop below.
            TreeMap<Double, XZ> intensityToIntensityTimeValue = new TreeMap<>(Collections.reverseOrder());

            for (int i = 0; i < count; i++) {
                if (topPeaksOfSample.get(i).getTime() > topPeak.getTime() - TIME_SKEW_CORRECTION
                        && topPeaksOfSample.get(i).getTime() < topPeak.getTime() + TIME_SKEW_CORRECTION) {
                    // There has been significant overlap in peaks between standard and sample.
                    intensityToIntensityTimeValue.put(topPeaksOfSample.get(i).getIntensity(),
                            topPeaksOfSample.get(i));
                }
            }

            if (intensityToIntensityTimeValue.keySet().size() > 0) {
                // Get the best peak overlap based on the largest magnitude intensity
                result.put(well, intensityToIntensityTimeValue.firstEntry().getValue());
            }
        }
    }

    return result;
}

From source file:com.acc.oauth2.controller.OAuth2AccessController.java

@RequestMapping(value = "/oauth/confirm_access", method = RequestMethod.GET)
public ModelAndView getAccessConfirmation(@ModelAttribute final AuthorizationRequest clientAuth)
        throws Exception {
    final ClientDetails client = clientDetailsService.loadClientByClientId(clientAuth.getClientId());
    final TreeMap<String, Object> model = new TreeMap<String, Object>();
    model.put("auth_request", clientAuth);
    model.put("client", client);
    return new ModelAndView("access_confirmation", model);
}

From source file:de.rahn.finances.commons.metrics.MetricsExporterServiceTest.java

private <V> SortedMap<String, V> singletonMap(String name, V value) {
    TreeMap<String, V> map = new TreeMap<>();
    map.put(name, value);

    return map;/*from  ww  w.  j  a  v a 2  s.co m*/
}