List of usage examples for java.util Collections min
public static <T extends Object & Comparable<? super T>> T min(Collection<? extends T> coll)
From source file:org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender.java
private List<Integer> getCounts(SortedMap<Long, Integer> map, List<Long> cutoffs) { List<Integer> ret = new ArrayList<>(); Long largestCutoff = Collections.min(cutoffs); for (int i = 0; i < cutoffs.size(); ++i) { ret.add(0);/*from w ww . ja va2 s . c om*/ } synchronized (lock) { Map<Long, Integer> submap = map.tailMap(largestCutoff); for (Map.Entry<Long, Integer> entry : submap.entrySet()) { for (int i = 0; i < cutoffs.size(); ++i) { if (entry.getKey() >= cutoffs.get(i)) { int tmp = ret.get(i); ret.set(i, tmp + entry.getValue()); } } } } return ret; }
From source file:org.fenixedu.academic.service.services.resourceAllocationManager.SearchExecutionCourses.java
private List<InfoExecutionCourse> fillInfoExecutionCourses(final AcademicInterval academicInterval, List<ExecutionCourse> executionCourses) { List<InfoExecutionCourse> result; result = (List<InfoExecutionCourse>) CollectionUtils.collect(executionCourses, new Transformer() { @Override//from w ww . j a v a 2 s . c om public Object transform(Object arg0) { InfoExecutionCourse infoExecutionCourse = null; infoExecutionCourse = getOccupancyLevels(arg0); return infoExecutionCourse; } private InfoExecutionCourse getOccupancyLevels(Object arg0) { InfoExecutionCourse infoExecutionCourse; ExecutionCourse executionCourse = (ExecutionCourse) arg0; Integer theoreticalCapacity = Integer.valueOf(0); Integer theoPraticalCapacity = Integer.valueOf(0); Integer praticalCapacity = Integer.valueOf(0); Integer labCapacity = Integer.valueOf(0); Integer doubtsCapacity = Integer.valueOf(0); Integer reserveCapacity = Integer.valueOf(0); Integer semCapacity = Integer.valueOf(0); Integer probCapacity = Integer.valueOf(0); Integer fieldCapacity = Integer.valueOf(0); Integer trainCapacity = Integer.valueOf(0); Integer tutCapacity = Integer.valueOf(0); Set<Shift> shifts = executionCourse.getAssociatedShifts(); Iterator<Shift> iterator = shifts.iterator(); while (iterator.hasNext()) { Shift shift = iterator.next(); if (shift.containsType(ShiftType.TEORICA)) { theoreticalCapacity = Integer .valueOf(theoreticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TEORICO_PRATICA)) { theoPraticalCapacity = Integer .valueOf(theoPraticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.DUVIDAS)) { doubtsCapacity = Integer.valueOf(doubtsCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.LABORATORIAL)) { labCapacity = Integer.valueOf(labCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.PRATICA)) { praticalCapacity = Integer .valueOf(praticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.RESERVA)) { reserveCapacity = Integer .valueOf(reserveCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.SEMINARY)) { semCapacity = Integer.valueOf(semCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.PROBLEMS)) { probCapacity = Integer.valueOf(probCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.FIELD_WORK)) { fieldCapacity = Integer.valueOf(fieldCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TRAINING_PERIOD)) { trainCapacity = Integer.valueOf(trainCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TUTORIAL_ORIENTATION)) { tutCapacity = Integer.valueOf(tutCapacity.intValue() + shift.getLotacao().intValue()); } } infoExecutionCourse = InfoExecutionCourse.newInfoFromDomain(executionCourse); List<Integer> capacities = new ArrayList<Integer>(); if (theoreticalCapacity.intValue() != 0) { capacities.add(theoreticalCapacity); } if (theoPraticalCapacity.intValue() != 0) { capacities.add(theoPraticalCapacity); } if (doubtsCapacity.intValue() != 0) { capacities.add(doubtsCapacity); } if (labCapacity.intValue() != 0) { capacities.add(labCapacity); } if (praticalCapacity.intValue() != 0) { capacities.add(praticalCapacity); } if (reserveCapacity.intValue() != 0) { capacities.add(reserveCapacity); } if (semCapacity.intValue() != 0) { capacities.add(semCapacity); } if (probCapacity.intValue() != 0) { capacities.add(probCapacity); } if (fieldCapacity.intValue() != 0) { capacities.add(fieldCapacity); } if (trainCapacity.intValue() != 0) { capacities.add(trainCapacity); } if (tutCapacity.intValue() != 0) { capacities.add(tutCapacity); } int total = 0; if (!capacities.isEmpty()) { total = (Collections.min(capacities)).intValue(); } if (total == 0) { infoExecutionCourse.setOccupancy(Double.valueOf(-1)); } else { infoExecutionCourse.setOccupancy(NumberUtils.formatNumber(Double.valueOf( (Double.valueOf(executionCourse.getAttendsSet().size()).floatValue() * 100 / total)), 1)); } return infoExecutionCourse; } }); return result; }
From source file:org.hawkular.client.android.fragment.AlertDetailFragment.java
private long getAlertStartTimestamp(Alert alert) { List<Long> alertStartTimestamps = new ArrayList<>(); for (List<AlertEvaluation> alertEvaluations : alert.getEvaluations()) { for (AlertEvaluation alertEvaluation : alertEvaluations) { alertStartTimestamps.add(alertEvaluation.getDataTimestamp()); }//w w w . j a v a 2 s .co m } return Collections.min(alertStartTimestamps); }
From source file:Simulator.PerformanceCalculation.java
JPanel minmaxwaitTime1(boolean minCheck) { LinkedHashSet no = new LinkedHashSet(); LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>(); for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) { TraceObject traceObject = entry.getValue(); if (wait1.get(traceObject.getSurgeonId()) == null) { ArrayList details = new ArrayList(); details.add(traceObject.getWaitTime1()); wait1.put(traceObject.getSurgeonId(), details); } else {/* ww w .j a v a 2s. c o m*/ wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime1()); } no.add(traceObject.getSurgeonId()); } XYSeriesCollection dataset = new XYSeriesCollection(); LinkedHashMap<Integer, Double> average = new LinkedHashMap<>(); for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) { Integer integer = entry.getKey(); ArrayList<Double> arrayList = entry.getValue(); double value = 0; if (minCheck) { value = Collections.min(arrayList); value = value / 600; } else { value = Collections.max(arrayList); value = value / 600; } average.put(integer, value); } XYSeries series = new XYSeries("Surgeon Minimum Wait Time 1"); for (int i = 1; i <= average.size(); i++) { series.add(i, average.get(i)); } dataset.addSeries(series); String name; if (minCheck) { name = "Minimum"; } else { name = "Maximum"; } // Generate the graph JFreeChart chart = ChartFactory.createXYLineChart(name + " Wait Time 1 For Patients", // Title "Surgeon ID", // x-axis Label "Time (Days)", // y-axis Label dataset, // Dataset PlotOrientation.VERTICAL, // Plot Orientation true, // Show Legend true, // Use tooltips false // Configure chart to generate URLs? ); XYPlot xyPlot = (XYPlot) chart.getPlot(); XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) xyPlot.getRenderer(); renderer.setBaseShapesVisible(true); NumberAxis domain = (NumberAxis) xyPlot.getDomainAxis(); domain.setVerticalTickLabels(true); return new ChartPanel(chart); }
From source file:de.tud.kom.p2psim.impl.network.gnp.topology.GnpSpace.java
/** * /*from w w w .j a v a 2 s. com*/ * @param noOfDimensions * number of Dimensions must be smaller than number of Monitors * @param monitorResheduling * number of rescheduling the downhill simplex * @param mapRef * reference to HostMap * @return optimized positions for Monitors */ private static GnpSpace getGnpWithDownhillSimplex(int noOfDimensions, int monitorResheduling, HostMap mapRef) { GnpSpace.calculationStepStatus = 1; GnpSpace.calculationInProgress = true; double alpha = 1.0; double beta = 0.5; double gamma = 2; double maxDiversity = 0.5; // N + 1 initial random Solutions int dhs_N = mapRef.getNoOfMonitors(); ArrayList<GnpSpace> solutions = new ArrayList<GnpSpace>(dhs_N + 1); for (int c = 0; c < dhs_N + 1; c++) solutions.add(new GnpSpace(noOfDimensions, mapRef)); // best and worst solution GnpSpace bestSolution = Collections.min(solutions); GnpSpace worstSolution = Collections.max(solutions); double bestError = bestSolution.getObjectiveValueMonitor(); double worstError = worstSolution.getObjectiveValueMonitor(); for (int z = 0; z < monitorResheduling; z++) { GnpSpace.calculationProgressStatus = z; // resheduling int count = 0; for (GnpSpace gnp : solutions) { if (gnp != bestSolution) { GnpPosition monitor = gnp.getMonitorPosition(count); monitor.diversify(gnp.getDimension(), maxDiversity); count++; } } // best and worst solution bestSolution = Collections.min(solutions); worstSolution = Collections.max(solutions); bestError = bestSolution.getObjectiveValueMonitor(); worstError = worstSolution.getObjectiveValueMonitor(); // stop criterion while (worstError - bestError > 0.00001 && calculationInProgress) { // move to center ... GnpSpace center = GnpSpace.getCenterSolution(solutions); GnpSpace newSolution1 = GnpSpace.getMovedSolution(worstSolution, center, 1 + alpha); double newError1 = newSolution1.getObjectiveValueMonitor(); if (newError1 <= bestError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); GnpSpace newSolution2 = GnpSpace.getMovedSolution(worstSolution, center, 1 + alpha + gamma); double newError2 = newSolution2.getObjectiveValueMonitor(); if (newError2 <= newError1) { solutions.set(IndexOfWorstSolution, newSolution2); bestError = newError2; } else { solutions.set(IndexOfWorstSolution, newSolution1); bestError = newError1; } bestSolution = solutions.get(IndexOfWorstSolution); } else if (newError1 < worstError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); solutions.set(IndexOfWorstSolution, newSolution1); } else { // ... or contract around best solution for (int c = 0; c < solutions.size(); c++) { if (solutions.get(c) != bestSolution) solutions.set(c, GnpSpace.getMovedSolution(solutions.get(c), bestSolution, beta)); } bestSolution = Collections.min(solutions); bestError = bestSolution.getObjectiveValueMonitor(); } worstSolution = Collections.max(solutions); worstError = worstSolution.getObjectiveValueMonitor(); } } // Set the Coordinate Reference to the Peer for (int c = 0; c < bestSolution.getNumberOfMonitors(); c++) { bestSolution.getMonitorPosition(c).getHostRef() .setPositionReference(bestSolution.getMonitorPosition(c)); } // GnpSpace.calculationStepStatus = 0; // GnpSpace.calculationInProgress = false; return bestSolution; }
From source file:be.ugent.maf.cellmissy.gui.controller.analysis.doseresponse.area.AreaDRNormalizedController.java
/** * Prepare data for fitting starting from the analysis group. * * @param dRAnalysisGroup/*from w w w. j av a2 s . c o m*/ * @return LinkedHashMap That maps the concentration (log-transformed!) to * the normalized replicate velocites */ private List<DoseResponsePair> prepareFittingData(AreaDoseResponseAnalysisGroup dRAnalysisGroup) { List<DoseResponsePair> result = new ArrayList<>(); //!! control concentrations (10 * lower than lowest treatment conc) also need to be added List<List<Double>> allVelocities = new ArrayList<>(); List<Double> allLogConcentrations = new ArrayList<>(); //put concentrations of treatment to analyze (control not included!) in list LinkedHashMap<Double, String> nestedMap = dRAnalysisGroup.getConcentrationsMap() .get(dRAnalysisGroup.getTreatmentToAnalyse()); for (Double concentration : nestedMap.keySet()) { String unit = nestedMap.get(concentration); Double logConcentration = AnalysisUtils.logTransform(concentration, unit); allLogConcentrations.add(logConcentration); } Double lowestLogConc = Collections.min(allLogConcentrations); //iterate through conditions int x = 0; for (PlateCondition plateCondition : dRAnalysisGroup.getVelocitiesMap().keySet()) { List<Double> replicateVelocities = dRAnalysisGroup.getVelocitiesMap().get(plateCondition); //normalize each value List<Double> normalizedVelocities = new ArrayList<>(); for (Double value : replicateVelocities) { normalizedVelocities.add(normalize(value)); } //check if this platecondition is the control for (Treatment treatment : plateCondition.getTreatmentList()) { if (treatment.getTreatmentType().getName().contains("ontrol")) { allLogConcentrations.add(x, lowestLogConc - 1.0); } } allVelocities.add(normalizedVelocities); x++; } for (int i = 0; i < allLogConcentrations.size(); i++) { result.add(new DoseResponsePair(allLogConcentrations.get(i), allVelocities.get(i))); } return result; }
From source file:org.libreplan.business.workingday.EffortDuration.java
public static EffortDuration min(EffortDuration... durations) { return Collections.min(Arrays.asList(durations)); }
From source file:org.peerfact.impl.network.gnp.topology.GnpSpace.java
/** * /*from w ww .j av a 2s . c o m*/ * @param noOfDimensions * number of Dimensions must be smaller than number of Monitors * @param monitorResheduling * number of rescheduling the downhill simplex * @param mapRef * reference to HostMap * @return optimized positions for Monitors */ private static GnpSpace getGnpWithDownhillSimplex(int noOfDimensions, int monitorResheduling, HostMap mapRef) { GnpSpace.calculationStepStatus = 1; GnpSpace.calculationInProgress = true; double alpha = 1.0; double beta = 0.5; double gamma = 2; double maxDiversity = 0.5; // N + 1 initial random Solutions int dhs_N = mapRef.getNoOfMonitors(); ArrayList<GnpSpace> solutions = new ArrayList<GnpSpace>(dhs_N + 1); for (int c = 0; c < dhs_N + 1; c++) { solutions.add(new GnpSpace(noOfDimensions, mapRef)); } // best and worst solution GnpSpace bestSolution = Collections.min(solutions); GnpSpace worstSolution = Collections.max(solutions); double bestError = bestSolution.getObjectiveValueMonitor(); double worstError = worstSolution.getObjectiveValueMonitor(); for (int z = 0; z < monitorResheduling; z++) { GnpSpace.calculationProgressStatus = z; // resheduling int count = 0; for (GnpSpace gnp : solutions) { if (gnp != bestSolution) { GnpPosition monitor = gnp.getMonitorPosition(count); monitor.diversify(gnp.getDimension(), maxDiversity); count++; } } // best and worst solution bestSolution = Collections.min(solutions); worstSolution = Collections.max(solutions); bestError = bestSolution.getObjectiveValueMonitor(); worstError = worstSolution.getObjectiveValueMonitor(); // stop criterion while (worstError - bestError > 0.00001 && calculationInProgress) { // move to center ... GnpSpace center = GnpSpace.getCenterSolution(solutions); GnpSpace newSolution1 = GnpSpace.getMovedSolution(worstSolution, center, 1 + alpha); double newError1 = newSolution1.getObjectiveValueMonitor(); if (newError1 <= bestError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); GnpSpace newSolution2 = GnpSpace.getMovedSolution(worstSolution, center, 1 + alpha + gamma); double newError2 = newSolution2.getObjectiveValueMonitor(); if (newError2 <= newError1) { solutions.set(IndexOfWorstSolution, newSolution2); bestError = newError2; } else { solutions.set(IndexOfWorstSolution, newSolution1); bestError = newError1; } bestSolution = solutions.get(IndexOfWorstSolution); } else if (newError1 < worstError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); solutions.set(IndexOfWorstSolution, newSolution1); } else { // ... or contract around best solution for (int c = 0; c < solutions.size(); c++) { if (solutions.get(c) != bestSolution) { solutions.set(c, GnpSpace.getMovedSolution(solutions.get(c), bestSolution, beta)); } } bestSolution = Collections.min(solutions); bestError = bestSolution.getObjectiveValueMonitor(); } worstSolution = Collections.max(solutions); worstError = worstSolution.getObjectiveValueMonitor(); } } // Set the Coordinate Reference to the Peer for (int c = 0; c < bestSolution.getNumberOfMonitors(); c++) { bestSolution.getMonitorPosition(c).getHostRef() .setPositionReference(bestSolution.getMonitorPosition(c)); } // GnpSpace.calculationStepStatus = 0; // GnpSpace.calculationInProgress = false; return bestSolution; }
From source file:io.hummer.util.test.GenericTestResult.java
public List<Double> getValues(String valueNameOrPattern, boolean addZeroes, boolean treatNameAsPattern) { List<Double> result = new LinkedList<Double>(); for (IterationResult r : iterations) { for (Entry e : r.getEntries()) { if ((!treatNameAsPattern && e.getName().equals(valueNameOrPattern)) || (treatNameAsPattern && e.getName().matches(valueNameOrPattern))) { double val = e.getValue(); if (addZeroes || val != 0.0) result.add(val); }/*from ww w . j ava 2 s . c o m*/ } } if (result.size() > eliminateXhighestValues) { for (int i = 0; i < eliminateXhighestValues; i++) { Double max = Collections.max(result); result.remove(max); } } if (result.size() > eliminateXlowestValues) { for (int i = 0; i < eliminateXlowestValues; i++) { Double min = Collections.min(result); result.remove(min); } } return result; }
From source file:org.apache.lens.cube.parse.StorageCandidate.java
/** * Sets Storage candidates start and end time based on underlying storage-tables * * CASE 1//www. j av a2 s .com * If has Storage has single storage table* * Storage start time = max(storage start time , fact start time) * Storage end time = min(storage end time , fact start time) * * CASE 2 * If the Storage has multiple Storage Tables (one per update period)* * update Period start Time = Max(update start time, fact start time) * update Period end Time = Min(update end time, fact end time) * Stoarge start and end time is derived form the underlying update period start and end times. * Storage start time = min(update1 start time ,...., updateN start time) * Storage end time = max(update1 end time ,...., updateN end time) * * Note in Case 2 its assumed that the time range supported by different update periods are either * overlapping(Example 2) or form a non overlapping but continuous chain(Example 1) as illustrated * in examples below * * Example 1 * A Storage has 2 Non Oevralpping but continuous Update Periods. * MONTHLY with start time as now.month -13 months and end time as now.month -2months and * DAILY with start time as now.day and end time as now.month -2months * Then this Sorage will have an implied start time as now.month -13 month and end time as now.day * * Example 2 * A Storage has 2 Overlapping Update Periods. * MONTHLY with start time as now.month -13 months and end time as now.month -1months and * DAILY with start time as now.day and end time as now.month -2months * Then this Sorage will have an implied start time as now.month -13 month and end time as now.day * * @throws LensException */ void setStorageStartAndEndDate() throws LensException { if (this.startTime != null && !this.isStorageTblsAtUpdatePeriodLevel) { //If the times are already set and are not dependent of update period, no point setting times again. return; } List<Date> startDates = new ArrayList<>(); List<Date> endDates = new ArrayList<>(); for (String storageTablePrefix : getValidStorageTableNames()) { startDates.add(getCubeMetastoreClient().getStorageTableStartDate(storageTablePrefix, fact.getSourceFactName())); endDates.add( getCubeMetastoreClient().getStorageTableEndDate(storageTablePrefix, fact.getSourceFactName())); } this.startTime = Collections.min(startDates); this.endTime = Collections.max(endDates); }