List of usage examples for java.util SortedMap entrySet
Set<Map.Entry<K, V>> entrySet();
From source file:de.appsolve.padelcampus.controller.events.EventsController.java
private ModelAndView getGroupGameView(Long eventId, Integer roundNumber) { Event event = eventDAO.findByIdFetchWithGames(eventId); if (event == null) { throw new ResourceNotFoundException(); }// w w w . j a v a 2 s .com ModelAndView mav = new ModelAndView("events/groupknockout/groupgames", "Model", event); event = eventDAO.findByIdFetchWithGames(eventId); SortedMap<Integer, List<Game>> groupGameMap = eventsUtil.getGroupGameMap(event, roundNumber); SortedMap<Integer, List<Game>> roundGameMap; if (roundNumber == null) { roundGameMap = eventsUtil.getRoundGameMap(event); } else { roundGameMap = eventsUtil.getRoundGameMap(event, roundNumber + 1); } //Group // Participant // Game // GameResult SortedMap<Integer, Map<Participant, Map<Game, String>>> groupParticipantGameResultMap = new TreeMap<>(); Iterator<Map.Entry<Integer, List<Game>>> iterator = groupGameMap.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Integer, List<Game>> entry = iterator.next(); Map<Participant, Map<Game, String>> participantGameResultMap = gameUtil .getParticipantGameResultMap(entry.getValue(), false); Integer group = entry.getKey(); groupParticipantGameResultMap.put(group, participantGameResultMap); } mav.addObject("GroupParticipantGameResultMap", groupParticipantGameResultMap); mav.addObject("RoundGameMap", roundGameMap); mav.addObject("Round", roundNumber); mav.addObject("GameResultMap", gameUtil.getGameResultMap(event.getGames())); return mav; }
From source file:testful.coverage.behavior.AbstractorNumber.java
@Override public Abstraction get(Map<String, Object> ctx) { // evaluate the expression and retrieve the result (obj) Object obj = evaluateExpression(ctx); if (obj == null) return new AbstractionObjectReference(expression, true); if (!(obj instanceof Number)) { logger.warning("Expected a number in AbstractorNumber!"); return new Abstraction.AbstractionError(expression); }//from w w w . j a va 2 s .co m // if there are no intervals, then use the "default" abstraction if (intervalsString == null) return get(expression, obj); double elem = ((Number) obj).doubleValue(); // ensure that this.intervals contains parsed expressions getIntervals(); // retrieve the value of intervals. it may contain null values Double[] values = evaluateIntervals(ctx); // if the value is NaN, calculate the return value if (Double.isNaN(elem)) { String label = null; for (int i = 0; i < values.length; i++) if (values[i] != null && Double.isNaN(values[i])) { if (label == null) label = intervalsString[i]; else label += "," + intervalsString[i]; } if (label == null) return new AbstractionNumber(expression, expression + " is " + AbstractionNumber.NaN); return new AbstractionNumber(expression, expression + " = " + label); } // build the sortedValues Map SortedMap<Double, String> sortedValues = new TreeMap<Double, String>(); for (int i = 0; i < values.length; i++) if (values[i] != null && !Double.isNaN(values[i])) { String label = sortedValues.get(values[i]); if (label == null) label = intervalsString[i]; else label += "," + intervalsString[i]; sortedValues.put(values[i], label); } if (!sortedValues.containsKey(Double.POSITIVE_INFINITY)) sortedValues.put(Double.POSITIVE_INFINITY, AbstractionNumber.P_INF); if (!sortedValues.containsKey(Double.NEGATIVE_INFINITY)) sortedValues.put(Double.NEGATIVE_INFINITY, AbstractionNumber.N_INF); // calculate the interval String prev = null; for (Entry<Double, String> entry : sortedValues.entrySet()) { if (prev != null && elem < entry.getKey()) return new AbstractionNumber(expression, prev + " < " + expression + " < " + entry.getValue()); if (elem == entry.getKey()) return new AbstractionNumber(expression, expression + " = " + entry.getValue()); prev = entry.getValue(); } if (TestFul.DEBUG) TestFul.debug("This point was not supposed to be reachable."); return new AbstractionNumber(expression, expression + " = " + AbstractionNumber.P_INF); }
From source file:com.cloudera.flume.reporter.ReportEvent.java
/** * Defaults to printing data out as a bunch of html table cells. *//*from ww w . j a va 2 s .c om*/ public void toHtml(Writer o) throws IOException { PrintWriter pw = new PrintWriter(o); pw.print("<table>"); pw.print("<tr>"); pw.print("<th>host</th>"); pw.print("<td>"); pw.print(getHost()); pw.println("</td>"); pw.print("</tr>"); // get the attributes, filter, sort and output them SortedMap<String, String> reportAttrs = new TreeMap<String, String>(); for (String attr : getAttrs().keySet()) { reportAttrs.put(attr, Attributes.toString(this, attr)); } for (Entry<String, Long> e : getAllLongMetrics().entrySet()) { reportAttrs.put(e.getKey(), e.getValue().toString()); } for (Entry<String, Double> e : getAllDoubleMetrics().entrySet()) { reportAttrs.put(e.getKey(), e.getValue().toString()); } for (Entry<String, String> e : getAllStringMetrics().entrySet()) { reportAttrs.put(e.getKey(), e.getValue()); } for (Entry<String, String> a : reportAttrs.entrySet()) { pw.println("<tr><th>" + a.getKey() + "</th>"); pw.print("<td>"); pw.print("<div class=\"" + a.getKey() + "\">"); pw.print(a.getValue()); pw.print("</div>"); pw.println("</td>"); pw.println("</tr>"); } pw.print("</table>"); }
From source file:org.rm3l.ddwrt.tiles.status.wan.WANMonthlyTrafficTile.java
private Intent renderTraffDateForMonth(@NotNull final String monthFormatted) { Log.d(LOG_TAG, "renderTraffDateForMonth: " + monthFormatted); if (traffData == null) { return null; }/*from w ww. j av a 2s . c om*/ final ImmutableMap<Integer, ArrayList<Double>> row = traffData.row(monthFormatted); if (row == null) { return null; } final SortedMap<Integer, ArrayList<Double>> dailyTraffMap = new TreeMap<>(row); if (dailyTraffMap.isEmpty()) { return null; } Log.d(LOG_TAG, "renderTraffDateForMonth: " + monthFormatted + " / dailyTraffMap=" + dailyTraffMap); final int size = dailyTraffMap.size(); final int[] days = new int[size]; final double[] inData = new double[size]; final double[] outData = new double[size]; int i = 0; for (final Map.Entry<Integer, ArrayList<Double>> dailyTraffMapEntry : dailyTraffMap.entrySet()) { final ArrayList<Double> dailyTraffMapEntryValue = dailyTraffMapEntry.getValue(); if (dailyTraffMapEntryValue == null || dailyTraffMapEntryValue.size() < 2) { continue; } final Double in = dailyTraffMapEntryValue.get(0); final Double out = dailyTraffMapEntryValue.get(1); if (in == null || out == null) { continue; } days[i] = dailyTraffMapEntry.getKey(); inData[i] = in; outData[i] = out; i++; } // Creating an XYSeries for Inbound final XYSeries inboundSeries = new XYSeries("Inbound"); // Creating an XYSeries for Outbound final XYSeries outboundSeries = new XYSeries("Outbound"); // Adding data to In and Out Series for (int j = 0; j < i; j++) { inboundSeries.add(j, inData[j]); outboundSeries.add(j, outData[j]); } // Creating a dataset to hold each series final XYMultipleSeriesDataset dataset = new XYMultipleSeriesDataset(); // Adding inbound Series to the dataset dataset.addSeries(inboundSeries); // Adding outbound Series to dataset dataset.addSeries(outboundSeries); // Creating XYSeriesRenderer to customize inboundSeries final XYSeriesRenderer inboundRenderer = new XYSeriesRenderer(); inboundRenderer.setColor(Color.rgb(130, 130, 230)); inboundRenderer.setFillPoints(true); inboundRenderer.setLineWidth(2); inboundRenderer.setDisplayChartValues(true); // Creating XYSeriesRenderer to customize outboundSeries final XYSeriesRenderer outboundRenderer = new XYSeriesRenderer(); outboundRenderer.setColor(Color.rgb(220, 80, 80)); outboundRenderer.setFillPoints(true); outboundRenderer.setLineWidth(2); outboundRenderer.setDisplayChartValues(true); // Creating a XYMultipleSeriesRenderer to customize the whole chart final XYMultipleSeriesRenderer multiRenderer = new XYMultipleSeriesRenderer(); multiRenderer.setXLabels(i); multiRenderer.setChartTitle("Traffic Data for '" + monthFormatted + "'"); multiRenderer.setXTitle("Days"); multiRenderer.setYTitle("Traffic (MB)"); multiRenderer.setZoomButtonsVisible(true); for (int k = 0; k < i; k++) { multiRenderer.addXTextLabel(k, String.valueOf(days[k])); } // for(int k = 0; k < days.length; k++) { // multiRenderer.addXTextLabel(k, String.valueOf(k)); // } // Adding inboundRenderer and outboundRenderer to multipleRenderer // Note: The order of adding dataseries to dataset and renderers to multipleRenderer // should be same multiRenderer.addSeriesRenderer(inboundRenderer); multiRenderer.addSeriesRenderer(outboundRenderer); return ChartFactory.getBarChartIntent(this.mParentFragmentActivity, dataset, multiRenderer, BarChart.Type.DEFAULT); }
From source file:org.alfresco.repo.domain.node.NodePropertyHelper.java
private Serializable collapsePropertiesWithSameQName(PropertyDefinition propertyDef, SortedMap<NodePropertyKey, NodePropertyValue> sortedPropertyValues) { Serializable result = null;/* www . java 2 s .c o m*/ Collection<Serializable> collectionResult = null; // A working map. Ordering is not important for this map. Map<NodePropertyKey, NodePropertyValue> scratch = new HashMap<NodePropertyKey, NodePropertyValue>(3); // Iterate (sorted) over the map entries and extract values with the same list index Integer currentListIndex = Integer.MIN_VALUE; Iterator<Map.Entry<NodePropertyKey, NodePropertyValue>> iterator = sortedPropertyValues.entrySet() .iterator(); while (true) { Integer nextListIndex = null; NodePropertyKey nextPropertyKey = null; NodePropertyValue nextPropertyValue = null; // Record the next entry's values if (iterator.hasNext()) { Map.Entry<NodePropertyKey, NodePropertyValue> entry = iterator.next(); nextPropertyKey = entry.getKey(); nextPropertyValue = entry.getValue(); nextListIndex = nextPropertyKey.getListIndex(); } // If the list index is going to change, and we have some entries to process, then process them. if (scratch.size() > 0 && (nextListIndex == null || !nextListIndex.equals(currentListIndex))) { // We have added something to the scratch properties but the index has just changed Serializable collapsedValue = collapsePropertiesWithSameQNameAndListIndex(propertyDef, scratch); // Store. If there is a value already, then we must build a collection. if (result == null) { result = collapsedValue; } else if (collectionResult != null) { // We have started a collection, so just add the value to it. collectionResult.add(collapsedValue); } else { // We already had a result, and now have another. A collection has not been // started. We start a collection and explicitly keep track of it so that // we don't get mixed up with collections of collections (ETHREEOH-2064). collectionResult = new ArrayList<Serializable>(20); collectionResult.add(result); // Add the first result collectionResult.add(collapsedValue); // Add the new value result = (Serializable) collectionResult; } // Reset scratch.clear(); } if (nextListIndex != null) { // Add to the current entries scratch.put(nextPropertyKey, nextPropertyValue); currentListIndex = nextListIndex; } else { // There is no next value to process break; } } // Make sure that multi-valued properties are returned as a collection if (propertyDef != null && propertyDef.isMultiValued() && result != null && !(result instanceof Collection<?>)) { // Can't use Collections.singletonList: ETHREEOH-1172 ArrayList<Serializable> collection = new ArrayList<Serializable>(1); collection.add(result); result = collection; } // Done return result; }
From source file:org.eurocarbdb.application.glycoworkbench.plugin.PeakListChartPanel.java
private Peak findNearestPeak(double mz, double intensity, double mz_toll, double int_toll) { java.util.SortedMap<Double, Double> submap = visibleData.subMap(mz - mz_toll, mz + mz_toll); Peak found = null;//from ww w. j a v a 2 s . co m double min_diff = 0.; for (Map.Entry<Double, Double> e : submap.entrySet()) { double diff = Math.abs(e.getValue() - intensity); if (diff <= int_toll) { if (found == null || diff < min_diff) found = new Peak(e.getKey(), e.getValue()); } } return found; }
From source file:de.appsolve.padelcampus.admin.controller.events.AdminEventsController.java
private List<Participant> getRankedGroupParticipants(SortedMap<Integer, List<Game>> groupGames, Integer numberOfGroups, Integer maxWinnersPerGroup) throws Exception { Map<Integer, List<Participant>> rankedGroupParticipants = new TreeMap<>(); Iterator<Map.Entry<Integer, List<Game>>> iterator = groupGames.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Integer, List<Game>> entry = iterator.next(); Integer groupNumber = entry.getKey(); List<Game> games = entry.getValue(); //determine participant based on games to filter out participants who did not play Set<Participant> participants = new HashSet<>(); List<Game> playedGames = new ArrayList<>(); for (Game game : games) { if (!game.getGameSets().isEmpty()) { participants.addAll(game.getParticipants()); playedGames.add(game);/*from w w w.jav a 2s.c om*/ } } if (participants.isEmpty() || playedGames.isEmpty()) { throw new Exception(msg.get("CannotEndGroupGames")); } //get list of score entries sorted by rank List<ScoreEntry> scoreEntries = rankingUtil.getScores(participants, playedGames); for (int groupPos = 0; groupPos < maxWinnersPerGroup; groupPos++) { List<Participant> rankedParticipants = rankedGroupParticipants.get(groupNumber); if (rankedParticipants == null) { rankedParticipants = new ArrayList<>(); } Participant p = null; try { p = scoreEntries.get(groupPos).getParticipant(); } catch (IndexOutOfBoundsException e) { //could happen when not enough games were played in one group } rankedParticipants.add(p); rankedGroupParticipants.put(groupNumber, rankedParticipants); } } //sort participants so that group winners are first List<Participant> rankedParticipants = new ArrayList<>(); for (int groupPos = 0; groupPos < maxWinnersPerGroup; groupPos++) { for (int group = 0; group < numberOfGroups; group++) { rankedParticipants.add(rankedGroupParticipants.get(group).get(groupPos)); } } return rankedParticipants; }
From source file:lisong_mechlab.view.graphs.DpsGraph.java
private TableXYDataset getSeries() { final Collection<Modifier> modifiers = loadout.getModifiers(); SortedMap<Weapon, Integer> multiplicity = new TreeMap<Weapon, Integer>(new Comparator<Weapon>() { @Override//from ww w . j a va 2s . co m public int compare(Weapon aO1, Weapon aO2) { int comp = Double.compare(aO2.getRangeMax(modifiers), aO1.getRangeMax(modifiers)); if (comp == 0) return aO1.compareTo(aO2); return comp; } }); for (Weapon weapon : loadout.items(Weapon.class)) { if (!weapon.isOffensive()) continue; if (!multiplicity.containsKey(weapon)) { multiplicity.put(weapon, 0); } int v = multiplicity.get(weapon); multiplicity.put(weapon, v + 1); } List<Weapon> orderedWeapons = new ArrayList<>(); Double[] ranges = WeaponRanges.getRanges(multiplicity.keySet(), modifiers); DefaultTableXYDataset dataset = new DefaultTableXYDataset(); for (Map.Entry<Weapon, Integer> e : multiplicity.entrySet()) { Weapon weapon = e.getKey(); int mult = e.getValue(); XYSeries series = new XYSeries(weapon.getName(), true, false); for (double range : ranges) { final double dps = weapon.getStat("d/s", modifiers); final double rangeEff = weapon.getRangeEffectivity(range, modifiers); series.add(range, dps * rangeEff * mult); } dataset.addSeries(series); orderedWeapons.add(e.getKey()); } Collections.reverse(orderedWeapons); colours.updateColoursToMatch(orderedWeapons); return dataset; }
From source file:com.aurel.track.report.dashboard.BurnDownChart.java
private String convertResultMapToJSONData(SortedMap<Date, EarnedValueTimeSlice> earnedValueTimeSliceMap, Double maxValPlanned, Double maxEarned, int reportingInterval, int effortType, Integer timeInterval) { StringBuilder local = new StringBuilder(); local.append("["); boolean last = false; for (Map.Entry<Date, EarnedValueTimeSlice> entry : earnedValueTimeSliceMap.entrySet()) { local.append("{"); JSONUtility.appendStringValue(local, "date", getDateBasedOnReportInterval(entry.getKey(), reportingInterval)); Double plannedVal = entry.getValue().getPlannedValue(); if (effortType == EFFORT_TYPE.TIME) { plannedVal = maxValPlanned - entry.getValue().getPlannedValue(); }//w w w. j a va 2 s . c o m JSONUtility.appendDoubleValue(local, "plannedValue", plannedVal, true); Double earnValue = 0.0; if (entry.getValue().getEarnedvalue() != null) { earnValue = entry.getValue().getEarnedvalue(); } if (effortType == EFFORT_TYPE.TIME) { earnValue = maxEarned - earnValue; } int calendarInterval = EarnedValueDatasource.getCalendarInterval(timeInterval); Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date()); calendar.add(calendarInterval, 1); if (DateTimeUtils.lessOrEqual(entry.getKey(), calendar.getTime())) { local.append(","); JSONUtility.appendDoubleValue(local, "earnedValue", earnValue, true); } local.append("},"); } if (local != null && local.length() > 0) { if (local.toString().endsWith(",")) { local = new StringBuilder(local.substring(0, local.length() - 1)); } } local.append("]"); return local.toString(); }
From source file:org.apache.hadoop.hdfs.server.namenode.TestFSEditLogLoader.java
@Test public void testValidateEditLogWithCorruptBody() throws IOException { File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptBody"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); final int NUM_TXNS = 20; File logFile = prepareUnfinalizedTestEditLog(testDir, NUM_TXNS, offsetToTxId); // Back up the uncorrupted log File logFileBak = new File(testDir, logFile.getName() + ".bak"); Files.copy(logFile, logFileBak); EditLogValidation validation = EditLogFileInputStream.validateEditLog(logFile); assertTrue(!validation.hasCorruptHeader()); // We expect that there will be an OP_START_LOG_SEGMENT, followed by // NUM_TXNS opcodes, followed by an OP_END_LOG_SEGMENT. assertEquals(NUM_TXNS + 1, validation.getEndTxId()); // Corrupt each edit and verify that validation continues to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); corruptByteInFile(logFile, txOffset); validation = EditLogFileInputStream.validateEditLog(logFile); long expectedEndTxId = (txId == (NUM_TXNS + 1)) ? NUM_TXNS : (NUM_TXNS + 1); assertEquals("Failed when corrupting txn opcode at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); }//from ww w. ja v a 2 s. co m // Truncate right before each edit and verify that validation continues // to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); truncateFile(logFile, txOffset); validation = EditLogFileInputStream.validateEditLog(logFile); long expectedEndTxId = (txId == 0) ? HdfsServerConstants.INVALID_TXID : (txId - 1); assertEquals("Failed when corrupting txid " + txId + " txn opcode " + "at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); } }