List of usage examples for java.util Collections min
public static <T extends Object & Comparable<? super T>> T min(Collection<? extends T> coll)
From source file:org.openbaton.autoscaling.core.detection.DetectionEngine.java
public double calculateMeasurementResult(ScalingAlarm alarm, List<Item> measurementResults) { log.debug("Calculating final measurement result ..."); double result; List<Double> consideredResults = new ArrayList<>(); for (Item measurementResult : measurementResults) { consideredResults.add(Double.parseDouble(measurementResult.getValue())); }//from ww w .java 2s . c o m switch (alarm.getStatistic()) { case "avg": double sum = 0; for (Double consideredResult : consideredResults) { sum += consideredResult; } result = sum / measurementResults.size(); break; case "min": result = Collections.min(consideredResults); break; case "max": result = Collections.max(consideredResults); break; default: result = -1; break; } return result; }
From source file:net.librec.recommender.AbstractRecommender.java
/** * setup/*from w w w. j a v a 2 s .c om*/ * * @throws LibrecException if error occurs during setup */ protected void setup() throws LibrecException { conf = context.getConf(); isRanking = conf.getBoolean("rec.recommender.isranking"); if (isRanking) { topN = conf.getInt("rec.recommender.ranking.topn", 10); if (this.topN <= 0) { throw new IndexOutOfBoundsException("rec.recommender.ranking.topn should be more than 0!"); } } earlyStop = conf.getBoolean("rec.recommender.earlystop", false); verbose = conf.getBoolean("rec.recommender.verbose", true); trainMatrix = (SparseMatrix) getDataModel().getTrainDataSet(); testMatrix = (SparseMatrix) getDataModel().getTestDataSet(); validMatrix = (SparseMatrix) getDataModel().getValidDataSet(); userMappingData = getDataModel().getUserMappingData(); itemMappingData = getDataModel().getItemMappingData(); numUsers = trainMatrix.numRows(); numItems = trainMatrix.numColumns(); numRates = trainMatrix.size(); ratingScale = new ArrayList<>(trainMatrix.getValueSet()); Collections.sort(ratingScale); maxRate = Collections.max(trainMatrix.getValueSet()); minRate = Collections.min(trainMatrix.getValueSet()); globalMean = trainMatrix.mean(); int[] numDroppedItemsArray = new int[numUsers]; // for AUCEvaluator int maxNumTestItemsByUser = 0; //for idcg for (int userIdx = 0; userIdx < numUsers; ++userIdx) { numDroppedItemsArray[userIdx] = numItems - trainMatrix.rowSize(userIdx); int numTestItemsByUser = testMatrix.rowSize(userIdx); maxNumTestItemsByUser = maxNumTestItemsByUser < numTestItemsByUser ? numTestItemsByUser : maxNumTestItemsByUser; } conf.setInts("rec.eval.auc.dropped.num", numDroppedItemsArray); conf.setInt("rec.eval.item.test.maxnum", maxNumTestItemsByUser); }
From source file:org.libreplan.business.calendars.entities.CombinedWorkHours.java
@Override protected EffortDuration updateCapacity(EffortDuration current, EffortDuration each) { return Collections.min(asList(current, each)); }
From source file:gr.iti.mklab.reveal.forensics.maps.dq.DQExtractor.java
public void detectDQDiscontinuities() { int imWidth = dcts.length; int imHeight = dcts[0].length; int[] p_h_avg = new int[maxCoeffs]; int[] p_h_fft = new int[maxCoeffs]; int[] p_final = new int[maxCoeffs]; double[][] pTampered = new double[maxCoeffs][]; double[][] pUntampered = new double[maxCoeffs][]; for (int coeffIndex = 0; coeffIndex < maxCoeffs; coeffIndex++) { int coe = coeff[coeffIndex]; int startY = coe % 8 - 1; if (startY == -1) { startY = 8;//from w w w . ja va 2s . c om } int startX = (int) Math.floor((coe - 1) / 8); List<Integer> selectedCoeffs = new ArrayList<Integer>(); for (int ii = startX; ii < imWidth; ii += 8) { for (int jj = startY; jj < imHeight; jj += 8) { selectedCoeffs.add(dcts[ii][jj]); } } int minCoeffValue = Collections.min(selectedCoeffs); int maxCoeffValue = Collections.max(selectedCoeffs); int s_0; Double[] coeffHist = new Double[0]; if (maxCoeffValue - minCoeffValue > 0) { //will be a power of 2 to allow for fft (zero padded) int trueHistRange = maxCoeffValue - minCoeffValue + 1; //int histLength = trueHistRange; int histLength = (int) Math.pow(2, Math.ceil(Math.log(trueHistRange) / Math.log(2))); coeffHist = new Double[histLength]; for (int ii = 0; ii < coeffHist.length; ii++) { coeffHist[ii] = 0.0; } for (Integer selectedCoeff : selectedCoeffs) { coeffHist[selectedCoeff - minCoeffValue] += 1; } List<Double> coeffHistList = Arrays.asList(coeffHist); s_0 = coeffHistList.indexOf(Collections.max(coeffHistList)); List<Double> h = new ArrayList<>(); DescriptiveStatistics vals; for (int coeffInd = 1; coeffInd < coeffHistList.size(); coeffInd++) { vals = new DescriptiveStatistics(); for (int leapInd = s_0; leapInd < coeffHistList.size(); leapInd += coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } for (int leapInd = s_0 - coeffInd; leapInd >= 0; leapInd -= coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } h.add(vals.getMean()); } p_h_avg[coeffIndex] = (h.indexOf(Collections.max(h))); FastFourierTransformer fastFourierTransformer = new FastFourierTransformer( DftNormalization.STANDARD); Complex[] fft = fastFourierTransformer.transform(ArrayUtils.toPrimitive(coeffHist), TransformType.FORWARD); double[] power = new double[fft.length]; for (int ii = 0; ii < power.length; ii++) { power[ii] = fft[ii].abs(); } //Find first local minimum, to bypass DC peak double DC = power[0]; int FreqValley = 1; while (FreqValley < power.length - 1 & power[FreqValley] >= power[FreqValley + 1]) { FreqValley++; } int maxFFTInd = 0; double maxFFTVal = 0; double minFFTVal = Double.MAX_VALUE; for (int ii = FreqValley; ii < power.length / 2; ii++) { if (power[ii] > maxFFTVal) { maxFFTInd = ii; maxFFTVal = power[ii]; } if (power[ii] < minFFTVal) { minFFTVal = power[ii]; } } if (maxFFTInd == 0 | maxFFTVal < (DC / 5) | minFFTVal / maxFFTVal > 0.9) { p_h_fft[coeffIndex] = 1; } else { p_h_fft[coeffIndex] = Math.round(coeffHist.length / maxFFTInd); } } else { p_h_avg[coeffIndex] = 1; p_h_fft[coeffIndex] = 1; s_0 = 0; } if (p_h_avg[coeffIndex] < p_h_fft[coeffIndex]) { p_final[coeffIndex] = p_h_avg[coeffIndex]; } else { p_final[coeffIndex] = p_h_fft[coeffIndex]; } pTampered[coeffIndex] = new double[selectedCoeffs.size()]; pUntampered[coeffIndex] = new double[selectedCoeffs.size()]; int[] adjustedCoeffs = new int[selectedCoeffs.size()]; int[] period_start = new int[selectedCoeffs.size()]; int[] period; int[] num = new int[selectedCoeffs.size()]; int[] denom = new int[selectedCoeffs.size()]; double[] P_u = new double[selectedCoeffs.size()]; double[] P_t = new double[selectedCoeffs.size()]; if (p_final[coeffIndex] != 1) { for (int ii = 0; ii < adjustedCoeffs.length; ii++) { adjustedCoeffs[ii] = selectedCoeffs.get(ii) - minCoeffValue; period_start[ii] = adjustedCoeffs[ii] - rem(adjustedCoeffs[ii] - s_0, p_final[coeffIndex]); } for (int kk = 0; kk < selectedCoeffs.size(); kk++) { if (period_start[kk] > s_0) { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] + ii; if (period[ii] >= coeffHist.length) { period[ii] = period[ii] - p_final[coeffIndex]; } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } else { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] - ii; if (period_start[kk] - p_final[coeffIndex] + 1 <= 0) { if (period[ii] <= 0) { period[ii] = period[ii] + p_final[coeffIndex]; } } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } P_u[kk] = ((double) num[kk] / denom[kk]); P_t[kk] = (1.0 / p_final[coeffIndex]); if (P_u[kk] + P_t[kk] != 0) { pTampered[coeffIndex][kk] = P_t[kk] / (P_u[kk] + P_t[kk]); pUntampered[coeffIndex][kk] = P_u[kk] / (P_u[kk] + P_t[kk]); } else { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } else { for (int kk = 0; kk < selectedCoeffs.size(); kk++) { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } double[] pTamperedOverall = new double[pTampered[0].length]; double pTamperedProd; double pUntamperedProd; for (int locationIndex = 0; locationIndex < pTampered[0].length; locationIndex++) { pTamperedProd = 1; pUntamperedProd = 1; for (int coeffIndex = 0; coeffIndex < pTampered.length; coeffIndex++) { pTamperedProd = pTamperedProd * pTampered[coeffIndex][locationIndex]; pUntamperedProd = pUntamperedProd * pUntampered[coeffIndex][locationIndex]; } if (pTamperedProd + pUntamperedProd != 0) { pTamperedOverall[locationIndex] = pTamperedProd / (pTamperedProd + pUntamperedProd); } else { pTamperedOverall[locationIndex] = 0; } } int blocksH = imWidth / 8; int blocksV = imHeight / 8; double[][] outputMap = new double[blocksV][blocksH]; for (int kk = 0; kk < pTamperedOverall.length; kk++) { outputMap[kk % blocksV][(int) Math.floor(kk / blocksV)] = pTamperedOverall[kk]; if (pTamperedOverall[kk] > maxProbValue) { maxProbValue = pTamperedOverall[kk]; } if (pTamperedOverall[kk] < minProbValue) { minProbValue = pTamperedOverall[kk]; } } probabilityMap = outputMap; BufferedImage outputIm = visualizeWithJet(outputMap); // output displaySurface = outputIm; }
From source file:com.tiendd.uet.predicting.AbstractRecommender.java
/** * setup/*from ww w . j a va 2 s. c o m*/ * * @throws LibrecException * if error occurs during setup */ protected void setup() throws LibrecException { conf = context.getConf(); // isRanking = conf.getBoolean("rec.recommender.isranking"); // if (isRanking) { // topN = conf.getInt("rec.recommender.ranking.topn", 10); // if (this.topN <= 0) { // throw new IndexOutOfBoundsException("rec.recommender.ranking.topn should be more than 0!"); // } // } earlyStop = conf.getBoolean("rec.recommender.earlystop", false); verbose = conf.getBoolean("rec.recommender.verbose", true); trainMatrix = (SparseMatrix) getDataModel().getTrainDataSet(); testMatrix = (SparseMatrix) getDataModel().getTestDataSet(); validMatrix = (SparseMatrix) getDataModel().getValidDataSet(); userMappingData = getDataModel().getUserMappingData(); itemMappingData = getDataModel().getItemMappingData(); numUsers = trainMatrix.numRows(); numItems = trainMatrix.numColumns(); numRates = trainMatrix.size(); ratingScale = new ArrayList<>(trainMatrix.getValueSet()); Collections.sort(ratingScale); maxRate = Collections.max(trainMatrix.getValueSet()); minRate = Collections.min(trainMatrix.getValueSet()); globalMean = trainMatrix.mean(); int[] numDroppedItemsArray = new int[numUsers]; // for AUCEvaluator int maxNumTestItemsByUser = 0; // for idcg for (int userIdx = 0; userIdx < numUsers; ++userIdx) { numDroppedItemsArray[userIdx] = numItems - trainMatrix.rowSize(userIdx); int numTestItemsByUser = testMatrix.rowSize(userIdx); maxNumTestItemsByUser = maxNumTestItemsByUser < numTestItemsByUser ? numTestItemsByUser : maxNumTestItemsByUser; } conf.setInts("rec.eval.auc.dropped.num", numDroppedItemsArray); conf.setInt("rec.eval.item.test.maxnum", maxNumTestItemsByUser); }
From source file:be.ugent.maf.cellmissy.analysis.singlecell.preprocessing.impl.SingleCellWellPreProcessorImpl.java
@Override public void generateRawCoordinatesRanges(SingleCellWellDataHolder singleCellWellDataHolder) { Double[][] transposedMatrix = AnalysisUtils .transpose2DArray(singleCellWellDataHolder.getRawTrackCoordinatesMatrix()); // compute the min and the max coordinates Double xMin = Collections.min(Arrays.asList(transposedMatrix[0])); Double xMax = Collections.max(Arrays.asList(transposedMatrix[0])); Double yMin = Collections.min(Arrays.asList(transposedMatrix[1])); Double yMax = Collections.max(Arrays.asList(transposedMatrix[1])); Double[][] rawCoordinatesRanges = new Double[2][2]; rawCoordinatesRanges[0] = new Double[] { xMin, xMax }; rawCoordinatesRanges[1] = new Double[] { yMin, yMax }; singleCellWellDataHolder.setRawCoordinatesRanges(rawCoordinatesRanges); }
From source file:org.encuestame.core.cron.CalculateHashTagSize.java
/** * Calculate all hashtag size./* w ww . j a v a 2 s . c o m*/ */ // @Scheduled(cron = "${cron.calculateReindex}") public void calculate() { if (EnMePlaceHolderConfigurer.getSystemInitialized()) { log.info("calculate hastahg rating ..."); double average = 0; int total = 0; double score = 0; double scoreRank = 0; double averageHashTagRanking = 0; Date currentDate = DateUtil.getCurrentCalendarDate(); //store the max min values final List<Long> maxMinTotal = new ArrayList<Long>(); final List<HashTag> tags = getHashTagDao().getHashTags(null, 0, ""); log.debug("HashTag to process " + tags.size()); total = tags.size(); final List<Object[]> maxMin = getHashTagDao().getMaxMinTagFrecuency(); long maxFrecuency = 0; long minFrecuency = 0; if (maxMin.get(0) != null) { maxFrecuency = (Long) maxMin.get(0)[0]; // Max minFrecuency = (Long) maxMin.get(0)[1]; // Min } List<HashTagRanking> hashTagRankingList = getHashTagDao().getHashTagRankStats(currentDate); for (HashTag hashTag : tags) { final HashTagRanking tagRanking; log.debug("Calculate for: " + hashTag.getHashTag() + " size after calculate: " + hashTag.getSize()); long tagFrecuency = getHashTagFrecuency(hashTag.getHashTag(), this.INIT_RESULTS, this.MAX_RESULTS); log.debug("-------- tag frecuency: " + tagFrecuency); long relevance = (tagFrecuency + (hashTag.getHits() == null ? 0 : hashTag.getHits())); long logFrecuency = Math.round(EnMeUtils.calculateSizeTag(relevance, maxFrecuency, minFrecuency)); score += logFrecuency; scoreRank = Math.round((double) relevance / (double) total); averageHashTagRanking = scoreRank < 1 ? 1 : Math.round(scoreRank); maxMinTotal.add(logFrecuency); hashTag.setSize(Long.valueOf(logFrecuency)); log.debug("Calculate for: " + hashTag.getHashTag() + " size before calculate: " + logFrecuency); hashTag.setCreatedAt(Calendar.getInstance().getTime()); getHashTagDao().saveOrUpdate(hashTag); // Save table if (hashTagRankingList.size() == 0) { tagRanking = this.createHashTagRanking(averageHashTagRanking, hashTag, currentDate); getHashTagDao().saveOrUpdate(tagRanking); } else { log.debug("Process has been executed today`s date"); } } average = (double) score / (double) total; log.info("*******************************"); log.info("******* Resume of Process *****"); log.info("-------------------------------"); log.info("| Max Frec : " + maxFrecuency + " |"); log.info("| Min Frec : " + minFrecuency + " |"); log.info("| Total : " + total + " |"); log.info("| Score : " + Math.round(score) + " |"); log.info("| Average : " + Math.round(average) + " |"); log.info("| Max : " + Collections.max(maxMinTotal) + " |"); log.info("| Min : " + Collections.min(maxMinTotal) + " |"); log.info("-------------------------------"); log.info("*******************************"); log.info("************ Finished Start hashtag calculate job **************"); } }
From source file:org.nuclos.common2.LangUtils.java
/** * @param ac one or more <code>Comparable</code>s, none of which may be <code>null</code>. * @return the minimum of the given <code>Comparable</code>s. * @precondition ac != null//from w w w . j av a 2 s. c o m */ public static <C extends Comparable<? super C>> C min(C... ac) { return Collections.min(Arrays.asList(ac)); }
From source file:com.marvelution.jira.plugins.hudson.charts.BuildTestResultsRatioChartGenerator.java
/** * {@inheritDoc}/*from w ww.j a v a 2 s. c o m*/ */ @Override public ChartHelper generateChart() { buildMap = new HashMap<Integer, Build>(); final CategoryTableXYDataset dataset = new CategoryTableXYDataset(); for (Build build : builds) { final TestResult results = build.getTestResult(); double percentagePass = 0.0D, percentageFail = 0.0D, percentageSkipped = 0.0D; if (results != null && results.getTotal() > 0) { percentagePass = Double.valueOf(results.getPassed()) / Double.valueOf(results.getTotal()) * 100.0D; percentageFail = Double.valueOf(results.getFailed()) / Double.valueOf(results.getTotal()) * 100.0D; percentageSkipped = Double.valueOf(results.getSkipped()) / Double.valueOf(results.getTotal()) * 100.0D; } dataset.add(Double.valueOf(build.getBuildNumber()), percentagePass, seriesNames[0]); dataset.add(Double.valueOf(build.getBuildNumber()), percentageFail, seriesNames[1]); dataset.add(Double.valueOf(build.getBuildNumber()), percentageSkipped, seriesNames[2]); buildMap.put(Integer.valueOf(build.getBuildNumber()), build); } final JFreeChart chart = ChartFactory.createStackedXYAreaChart("", "", getI18n().getText("hudson.charts.tests"), dataset, PlotOrientation.VERTICAL, false, false, false); chart.setBackgroundPaint(Color.WHITE); chart.setBorderVisible(false); XYPlot xyPlot = chart.getXYPlot(); xyPlot.setDataset(1, dataset); if (dataset.getItemCount() > 0) { XYLineAndShapeRenderer shapeRenderer = new XYLineAndShapeRenderer(false, true); shapeRenderer.setSeriesShapesVisible(1, false); shapeRenderer.setSeriesLinesVisible(1, false); shapeRenderer.setSeriesShapesVisible(2, false); shapeRenderer.setSeriesLinesVisible(2, false); shapeRenderer.setSeriesShape(0, new Ellipse2D.Double(-3.0D, -3.0D, 6.0D, 6.0D)); shapeRenderer.setSeriesPaint(0, GREEN_PAINT); shapeRenderer.setSeriesShapesFilled(0, true); shapeRenderer.setBaseToolTipGenerator(this); shapeRenderer.setBaseItemLabelFont(ChartDefaults.defaultFont); shapeRenderer.setBaseItemLabelsVisible(false); xyPlot.setRenderer(0, shapeRenderer); StackedXYAreaRenderer2 renderer = new StackedXYAreaRenderer2(); renderer.setSeriesPaint(0, GREEN_PAINT); renderer.setSeriesPaint(1, RED_PAINT); renderer.setSeriesPaint(2, YELLOW_PAINT); renderer.setBaseItemLabelFont(ChartDefaults.defaultFont); renderer.setBaseItemLabelsVisible(false); xyPlot.setRenderer(1, renderer); renderer.setBaseToolTipGenerator(this); } ValueAxis rangeAxis = xyPlot.getRangeAxis(); rangeAxis.setLowerBound(0.0D); rangeAxis.setUpperBound(100.0D); final NumberAxis domainAxis = new NumberAxis(); domainAxis.setLowerBound(Collections.min(buildMap.keySet())); domainAxis.setUpperBound(Collections.max(buildMap.keySet())); final TickUnitSource ticks = NumberAxis.createIntegerTickUnits(); domainAxis.setStandardTickUnits(ticks); xyPlot.setDomainAxis(domainAxis); ChartUtil.setupPlot(xyPlot); return new ChartHelper(chart); }
From source file:com.gargoylesoftware.htmlunit.Cache.java
/** * Truncates the cache to the maximal number of entries. *//*from w w w. ja v a2 s . co m*/ protected void deleteOverflow() { synchronized (entries_) { while (entries_.size() > maxSize_) { final Entry oldestEntry = Collections.min(entries_.values()); entries_.remove(oldestEntry.key_); if (oldestEntry.response_ != null) { oldestEntry.response_.cleanUp(); } } } }