List of usage examples for java.util TreeMap keySet
public Set<K> keySet()
From source file:cit360.sandbox.BackEndMenu.java
public static void ticketPrices() { TreeMap ageGroup = new TreeMap(); // Add some ageGroup. ageGroup.put("Adult", 8.75); ageGroup.put("Child", 5.50); ageGroup.put("Senior Citizen", 5.25); ageGroup.put("Military Veteran", 5.00); // Iterate over all ageGroup, using the keySet method. for (Object key : ageGroup.keySet()) System.out.println(key + " - $" + ageGroup.get(key)); System.out.println();//from ww w.j a va 2 s. c om System.out.println("Highest key: " + ageGroup.lastKey()); System.out.println("Lowest key: " + ageGroup.firstKey()); System.out.println("\nPrinting all values: "); for (Object val : ageGroup.values()) System.out.println("$" + val); System.out.println(); // Clear all values. ageGroup.clear(); // Equals to zero. System.out.println("After clear operation, size: " + ageGroup.size()); }
From source file:org.apache.hadoop.hive.ql.exec.ToolBox.java
static double calDistincValue(TreeMap<String, Integer> para, int num_sampled_rows) { int num_multiple = 0; int num_distinct = para.keySet().size(); LOG.debug("num_distinct: " + num_distinct); double stat_distinct_values; for (String s : para.keySet()) { if (para.get(s) > 1) { num_multiple++;// w w w.ja v a 2 s. c o m } } LOG.debug("num_multiple: " + num_multiple); if (num_multiple == 0) { stat_distinct_values = -1; return stat_distinct_values; } else if (num_multiple == num_distinct) { stat_distinct_values = num_distinct; return stat_distinct_values; } int totalrows = num_sampled_rows; int f1 = num_distinct - num_multiple; int d = num_distinct; int numer = num_sampled_rows * d; int denom = (num_sampled_rows - f1) + f1 * num_sampled_rows / totalrows; LOG.debug("numer: " + numer); LOG.debug("denom: " + denom); int distinct_values = numer / denom; if (distinct_values < d) { distinct_values = d; } else if (distinct_values > totalrows) { distinct_values = totalrows; } LOG.debug("distinct_values: " + distinct_values); LOG.debug("totalrows: " + totalrows); stat_distinct_values = Math.floor(distinct_values + 0.5); if (stat_distinct_values > 0.1 * totalrows) { stat_distinct_values = -(stat_distinct_values / totalrows); } LOG.debug("stat_distinct_values: " + stat_distinct_values); return stat_distinct_values; }
From source file:org.apache.hadoop.raid.CorruptFileCounterServlet.java
public static String generateTable(Map<String, Map<CorruptFileStatus, Long>> corruptFilesCounterMap, String infoAddr, double numDetectionsPerSec, RaidNode raidNode) throws UnsupportedEncodingException, IOException { StringBuilder htmlSb = new StringBuilder(); int imageSize = 30; htmlSb.append(JspUtils.tr(JspUtils.td("Root Directory") + JspUtils.td(JspUtils.image(raidNode, "RURF.jpg", imageSize, imageSize), "Raid Unrecoverable Files") + JspUtils.td(JspUtils.image(raidNode, "NRURF.jpg", imageSize, imageSize), "Not-Raid Unrecoverable Files") + JspUtils.td(JspUtils.image(raidNode, "PURF.jpg", imageSize, imageSize), "Potential Unrecoverable Files") + JspUtils.td(JspUtils.image(raidNode, "RF.jpg", imageSize, imageSize), "Recoverable Files") + JspUtils.td(JspUtils.image(raidNode, "DL.jpg", imageSize, imageSize), "Detection Lag(s)") + JspUtils.td(//w w w. j a va 2s . c om JspUtils.image(raidNode, "RL.jpg", imageSize, imageSize) + " " + getPercentHeader(raidNode), "Recovery Lag(s)") + JspUtils.td(JspUtils.image(raidNode, "RFF.jpg", imageSize, imageSize), "Recovery Failed Files"))); for (String path : corruptFilesCounterMap.keySet()) { Map<CorruptFileStatus, Long> counters = corruptFilesCounterMap.get(path); StringBuilder oneRow = new StringBuilder(); TreeMap<Long, BlockFixStatus> countersMap = raidNode.getBlockIntegrityMonitor().getBlockFixStatus(path, System.currentTimeMillis()); int windowSize = countersMap.keySet().size(); oneRow.append(JspUtils.th(windowSize, path)); // Append corrupt file counters for (CorruptFileStatus cfs : columns) { Long count = counters.get(cfs); String counterDisplay = ""; if (count == null || count <= 0) { counterDisplay = "0"; } else { StringBuffer url = new StringBuffer("http://" + infoAddr + "/corruptfilecounter"); url.append("?root="); url.append(URLEncoder.encode(path, "UTF-8")); url.append("&status="); url.append(URLEncoder.encode(cfs.name(), "UTF-8")); counterDisplay = getHTMLLinksText(url.toString(), String.valueOf(count)); } oneRow.append(JspUtils.th(windowSize, counterDisplay)); } // Append detection lag Long potentialCorruptFiles = counters.get(CorruptFileStatus.POTENTIALLY_CORRUPT); String detectionLag = ""; if (potentialCorruptFiles == null || potentialCorruptFiles <= 0) { detectionLag = "0"; } else if (numDetectionsPerSec < 1e-6) { detectionLag = "-"; } else { long costTime = (long) Math.ceil((double) potentialCorruptFiles / numDetectionsPerSec); detectionLag = Long.toString(costTime); } oneRow.append(JspUtils.th(windowSize, detectionLag)); oneRow.append(JspUtils.tdWithClass(getRecoveryLag(countersMap.firstKey(), countersMap, path, infoAddr), JspUtils.SMALL_CELL)); oneRow.append(JspUtils.tdWithClass(getFailedFiles(countersMap.firstKey(), countersMap, path, infoAddr), JspUtils.SMALL_CELL)); htmlSb.append(JspUtils.tr(oneRow.toString())); // Append recovery lags boolean head = true; for (Long window : countersMap.keySet()) { if (head) { head = false; continue; } StringBuilder sb = new StringBuilder(); sb.append(JspUtils.tdWithClass(getRecoveryLag(window, countersMap, path, infoAddr), JspUtils.SMALL_CELL)); sb.append(JspUtils.tdWithClass(getFailedFiles(window, countersMap, path, infoAddr), JspUtils.SMALL_CELL)); htmlSb.append(JspUtils.tr(sb.toString())); } } return JspUtils.smallTable(htmlSb.toString()); }
From source file:com.sfs.whichdoctor.xml.writer.PersonXmlWriter.java
/** * Gets the mentors xml./*from w w w . ja v a 2 s.c o m*/ * * @param mentors the mentors * * @return the mentors xml */ private static String getMentorsXml(final TreeMap<String, ItemBean> mentors) { final XmlWriter xmlwriter = new XmlWriter(); xmlwriter.writeEntity("mentors"); for (String orderIndex : mentors.keySet()) { ItemBean item = mentors.get(orderIndex); xmlwriter.writeEntity("mentor").writeAttribute("personId", item.getObject2GUID()); xmlwriter.writeEntity("name").writeText(item.getName()).endEntity(); if (item.getStartDate() != null) { xmlwriter.writeEntity("startDate").writeText(Formatter.convertDate(item.getStartDate())) .endEntity(); } if (item.getEndDate() != null) { xmlwriter.writeEntity("endDate").writeText(Formatter.convertDate(item.getEndDate())).endEntity(); } if (item.getTitle() != null) { xmlwriter.writeEntity("description").writeText(item.getTitle()).endEntity(); } xmlwriter.endEntity(); } xmlwriter.endEntity(); return xmlwriter.getXml(); }
From source file:com.sfs.whichdoctor.xml.writer.PersonXmlWriter.java
/** * Gets the employers xml./* ww w .j a v a2 s . co m*/ * * @param employers the employers * * @return the employers xml */ private static String getEmployersXml(final TreeMap<String, ItemBean> employers) { final XmlWriter xmlwriter = new XmlWriter(); xmlwriter.writeEntity("employers"); for (String orderIndex : employers.keySet()) { ItemBean item = employers.get(orderIndex); xmlwriter.writeEntity("employer").writeAttribute("GUID", item.getObject1GUID()); xmlwriter.writeEntity("name").writeText(item.getName()).endEntity(); if (item.getStartDate() != null) { xmlwriter.writeEntity("startDate").writeText(Formatter.convertDate(item.getStartDate())) .endEntity(); } if (item.getEndDate() != null) { xmlwriter.writeEntity("endDate").writeText(Formatter.convertDate(item.getEndDate())).endEntity(); } if (item.getTitle() != null) { xmlwriter.writeEntity("description").writeText(item.getTitle()).endEntity(); } xmlwriter.endEntity(); } xmlwriter.endEntity(); return xmlwriter.getXml(); }
From source file:cooccurrence.Omer_Levy.java
/** * Method that will extract top D singular values from CoVariance Matrix * It will then identify the corresponding columns from U and V and add it to new matrices * @param U/*w w w . java 2 s . c o m*/ * @param V * @param coVariance * @param matrixUd * @param matrixVd * @param coVarD * @param indicesD */ private static void getTopD(RealMatrix U, RealMatrix V, RealMatrix coVariance, RealMatrix matrixUd, RealMatrix matrixVd, RealMatrix coVarD, ArrayList<Integer> indicesD) { TreeMap<Double, Set<Integer>> tmap = new TreeMap<>(); for (int i = 0; i < coVariance.getRowDimension(); i++) { double val = coVariance.getEntry(i, i); if (tmap.containsKey(val)) { Set<Integer> temp = tmap.get(val); temp.add(i); } else { Set<Integer> temp = new HashSet<>(); temp.add(i); tmap.put(val, temp); } } Iterator iter = tmap.keySet().iterator(); while (iter.hasNext()) { Double val = (Double) iter.next(); Set<Integer> indices = tmap.get(val); for (int i = 0; i < indices.size(); i++) { Iterator iterIndices = indices.iterator(); while (iterIndices.hasNext()) { int index = (Integer) iterIndices.next(); indicesD.add(index); coVarD.addToEntry(index, index, val); matrixUd.setColumnVector(index, U.getColumnVector(index)); matrixVd.setColumnVector(index, V.getColumnVector(index)); } } } }
From source file:org.apache.hadoop.hive.ql.exec.ToolBox.java
static <T> ArrayList<TreeMap<String, T>> aggregateKey(TreeMap<String, T> para, String delimiter, int idx) { ArrayList<TreeMap<String, T>> a = new ArrayList<TreeMap<String, T>>(); String prekey = null;//from w w w. j av a 2 s . co m TreeMap<String, T> h = null; for (String s : para.keySet()) { if (prekey == null) { prekey = retrieveComponent(s, delimiter, idx); h = new TreeMap<String, T>(); h.put(s, para.get(s)); } else if (prekey.equals(s)) { h.put(s, para.get(s)); } else { prekey = retrieveComponent(s, delimiter, idx); ; a.add(h); h = new TreeMap<String, T>(); h.put(s, para.get(s)); } } a.add(h); return a; }
From source file:com.sfs.whichdoctor.xml.writer.helper.AccreditationXmlHelper.java
/** * Output the training summary as an XML string. * * @param trainingSummary the training summary * @param type the type/*from w w w .j a v a2s.c o m*/ * * @return the xml string */ public static String getSummaryXml(final TreeMap<String, AccreditationBean[]> trainingSummary, final String type) { final XmlWriter xmlwriter = new XmlWriter(); if (trainingSummary.size() > 0) { int totalCore = 0; int totalNonCore = 0; TreeMap<String, ArrayList<AccreditationBean[]>> summaryTreemap = new TreeMap<String, ArrayList<AccreditationBean[]>>(); for (String summaryKey : trainingSummary.keySet()) { AccreditationBean[] details = trainingSummary.get(summaryKey); AccreditationBean core = details[0]; AccreditationBean nonCore = details[1]; totalCore += core.getWeeksCertified(); totalNonCore += nonCore.getWeeksCertified(); if (StringUtils.isNotBlank(core.getSpecialtyType())) { ArrayList<AccreditationBean[]> summaries = new ArrayList<AccreditationBean[]>(); if (!summaryTreemap.containsKey(core.getSpecialtyType())) { /* New type of specialty */ summaries.add(details); } else { /* Existing specialty */ summaries = summaryTreemap.get(core.getSpecialtyType()); summaries.add(details); } summaryTreemap.put(core.getSpecialtyType(), summaries); } } xmlwriter.writeEntity("trainingSummary"); xmlwriter.writeAttribute("type", type); xmlwriter.writeAttribute("totalCore", Formatter.getWholeMonths(totalCore)); xmlwriter.writeAttribute("totalNonCore", Formatter.getWholeMonths(totalNonCore)); xmlwriter.writeEntity("specialtyTraining"); for (String specialtyType : summaryTreemap.keySet()) { ArrayList<AccreditationBean[]> summaries = summaryTreemap.get(specialtyType); int typeCoreWeeks = 0; int typeNCWeeks = 0; if (summaries != null) { // For each accredited specialty create an element xmlwriter.writeEntity("specialty"); xmlwriter.writeEntity("subtypes"); String division = ""; String abbreviation = ""; String specialtytype = ""; String typeAbbreviation = ""; for (Object[] summary : summaries) { boolean blSubType = false; AccreditationBean core = (AccreditationBean) summary[0]; AccreditationBean nonCore = (AccreditationBean) summary[1]; division = core.getAccreditationClass(); abbreviation = core.getAbbreviation(); specialtytype = core.getSpecialtyType(); typeAbbreviation = core.getSpecialtyTypeAbbreviation(); if (StringUtils.isNotBlank(core.getSpecialtySubType())) { blSubType = true; xmlwriter.writeEntity("subtype"); xmlwriter.writeEntity("name").writeText(core.getSpecialtySubType()).endEntity(); xmlwriter.writeEntity("coreMonths") .writeText(Formatter.getWholeMonths(core.getWeeksCertified())).endEntity(); xmlwriter.writeEntity("nonCoreMonths") .writeText(Formatter.getWholeMonths(nonCore.getWeeksCertified())).endEntity(); xmlwriter.endEntity(); typeCoreWeeks += core.getWeeksCertified(); typeNCWeeks += nonCore.getWeeksCertified(); } if (!blSubType) { xmlwriter.writeEntity("subtype"); xmlwriter.writeEntity("coreMonths") .writeText(Formatter.getWholeMonths(core.getWeeksCertified())).endEntity(); xmlwriter.writeEntity("nonCoreMonths") .writeText(Formatter.getWholeMonths(nonCore.getWeeksCertified())).endEntity(); xmlwriter.endEntity(); typeCoreWeeks += core.getWeeksCertified(); typeNCWeeks += nonCore.getWeeksCertified(); } } xmlwriter.endEntity(); xmlwriter.writeEntity("division").writeText(division).endEntity(); xmlwriter.writeEntity("abbreviation").writeText(abbreviation).endEntity(); xmlwriter.writeEntity("type").writeText(specialtytype).endEntity(); xmlwriter.writeEntity("typeAbbreviation").writeText(typeAbbreviation).endEntity(); xmlwriter.writeEntity("coreMonths").writeText(Formatter.getWholeMonths(typeCoreWeeks)) .endEntity(); xmlwriter.writeEntity("nonCoreMonths").writeText(Formatter.getWholeMonths(typeNCWeeks)) .endEntity(); xmlwriter.endEntity(); } } xmlwriter.endEntity(); xmlwriter.endEntity(); } return xmlwriter.getXml(); }
From source file:org.apache.hadoop.hive.ql.exec.ToolBox.java
@Deprecated static ArrayList<TreeMap<String, String>> aggregateKey_string(TreeMap<String, String> para, String delimiter, int idx) { ArrayList<TreeMap<String, String>> a = new ArrayList<TreeMap<String, String>>(); String prekey = null;/*from ww w. j ava2s . co m*/ TreeMap<String, String> h = null; for (String s : para.keySet()) { if (prekey == null) { prekey = retrieveComponent(s, delimiter, idx); h = new TreeMap<String, String>(); h.put(s, para.get(s)); } else if (prekey.equals(s)) { h.put(s, para.get(s)); } else { prekey = retrieveComponent(s, delimiter, idx); ; a.add(h); h = new TreeMap<String, String>(); h.put(s, para.get(s)); } } a.add(h); return a; }
From source file:org.apache.hadoop.hive.ql.exec.ToolBox.java
@Deprecated static ArrayList<TreeMap<String, Integer>> aggregateKey_Integer(TreeMap<String, Integer> para, String delimiter, int idx) { ArrayList<TreeMap<String, Integer>> a = new ArrayList<TreeMap<String, Integer>>(); String prekey = null;//from ww w. j a v a 2s. c o m TreeMap<String, Integer> h = null; for (String s : para.keySet()) { if (prekey == null) { prekey = retrieveComponent(s, delimiter, idx); h = new TreeMap<String, Integer>(); h.put(s, para.get(s)); } else if (prekey.equals(s)) { h.put(s, para.get(s)); } else { prekey = retrieveComponent(s, delimiter, idx); ; a.add(h); h = new TreeMap<String, Integer>(); h.put(s, para.get(s)); } } a.add(h); return a; }