List of usage examples for java.util Collections max
public static <T extends Object & Comparable<? super T>> T max(Collection<? extends T> coll)
From source file:face4j.model.Face.java
public Guess getGuess() { try {/* w w w .j a va 2 s. c o m*/ return Collections.max(guesses); } catch (NoSuchElementException nsee) { return null; } }
From source file:org.openbaton.autoscaling.core.detection.DetectionEngine.java
public double calculateMeasurementResult(ScalingAlarm alarm, List<Item> measurementResults) { log.debug("Calculating final measurement result ..."); double result; List<Double> consideredResults = new ArrayList<>(); for (Item measurementResult : measurementResults) { consideredResults.add(Double.parseDouble(measurementResult.getValue())); }// ww w. java 2s. c o m switch (alarm.getStatistic()) { case "avg": double sum = 0; for (Double consideredResult : consideredResults) { sum += consideredResult; } result = sum / measurementResults.size(); break; case "min": result = Collections.min(consideredResults); break; case "max": result = Collections.max(consideredResults); break; default: result = -1; break; } return result; }
From source file:com.tiendd.uet.predicting.AbstractRecommender.java
/** * setup/*from w w w .j a v a2 s. com*/ * * @throws LibrecException * if error occurs during setup */ protected void setup() throws LibrecException { conf = context.getConf(); // isRanking = conf.getBoolean("rec.recommender.isranking"); // if (isRanking) { // topN = conf.getInt("rec.recommender.ranking.topn", 10); // if (this.topN <= 0) { // throw new IndexOutOfBoundsException("rec.recommender.ranking.topn should be more than 0!"); // } // } earlyStop = conf.getBoolean("rec.recommender.earlystop", false); verbose = conf.getBoolean("rec.recommender.verbose", true); trainMatrix = (SparseMatrix) getDataModel().getTrainDataSet(); testMatrix = (SparseMatrix) getDataModel().getTestDataSet(); validMatrix = (SparseMatrix) getDataModel().getValidDataSet(); userMappingData = getDataModel().getUserMappingData(); itemMappingData = getDataModel().getItemMappingData(); numUsers = trainMatrix.numRows(); numItems = trainMatrix.numColumns(); numRates = trainMatrix.size(); ratingScale = new ArrayList<>(trainMatrix.getValueSet()); Collections.sort(ratingScale); maxRate = Collections.max(trainMatrix.getValueSet()); minRate = Collections.min(trainMatrix.getValueSet()); globalMean = trainMatrix.mean(); int[] numDroppedItemsArray = new int[numUsers]; // for AUCEvaluator int maxNumTestItemsByUser = 0; // for idcg for (int userIdx = 0; userIdx < numUsers; ++userIdx) { numDroppedItemsArray[userIdx] = numItems - trainMatrix.rowSize(userIdx); int numTestItemsByUser = testMatrix.rowSize(userIdx); maxNumTestItemsByUser = maxNumTestItemsByUser < numTestItemsByUser ? numTestItemsByUser : maxNumTestItemsByUser; } conf.setInts("rec.eval.auc.dropped.num", numDroppedItemsArray); conf.setInt("rec.eval.item.test.maxnum", maxNumTestItemsByUser); }
From source file:gr.iti.mklab.reveal.forensics.maps.dq.DQExtractor.java
public void detectDQDiscontinuities() { int imWidth = dcts.length; int imHeight = dcts[0].length; int[] p_h_avg = new int[maxCoeffs]; int[] p_h_fft = new int[maxCoeffs]; int[] p_final = new int[maxCoeffs]; double[][] pTampered = new double[maxCoeffs][]; double[][] pUntampered = new double[maxCoeffs][]; for (int coeffIndex = 0; coeffIndex < maxCoeffs; coeffIndex++) { int coe = coeff[coeffIndex]; int startY = coe % 8 - 1; if (startY == -1) { startY = 8;/*from w w w . j av a 2s .c om*/ } int startX = (int) Math.floor((coe - 1) / 8); List<Integer> selectedCoeffs = new ArrayList<Integer>(); for (int ii = startX; ii < imWidth; ii += 8) { for (int jj = startY; jj < imHeight; jj += 8) { selectedCoeffs.add(dcts[ii][jj]); } } int minCoeffValue = Collections.min(selectedCoeffs); int maxCoeffValue = Collections.max(selectedCoeffs); int s_0; Double[] coeffHist = new Double[0]; if (maxCoeffValue - minCoeffValue > 0) { //will be a power of 2 to allow for fft (zero padded) int trueHistRange = maxCoeffValue - minCoeffValue + 1; //int histLength = trueHistRange; int histLength = (int) Math.pow(2, Math.ceil(Math.log(trueHistRange) / Math.log(2))); coeffHist = new Double[histLength]; for (int ii = 0; ii < coeffHist.length; ii++) { coeffHist[ii] = 0.0; } for (Integer selectedCoeff : selectedCoeffs) { coeffHist[selectedCoeff - minCoeffValue] += 1; } List<Double> coeffHistList = Arrays.asList(coeffHist); s_0 = coeffHistList.indexOf(Collections.max(coeffHistList)); List<Double> h = new ArrayList<>(); DescriptiveStatistics vals; for (int coeffInd = 1; coeffInd < coeffHistList.size(); coeffInd++) { vals = new DescriptiveStatistics(); for (int leapInd = s_0; leapInd < coeffHistList.size(); leapInd += coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } for (int leapInd = s_0 - coeffInd; leapInd >= 0; leapInd -= coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } h.add(vals.getMean()); } p_h_avg[coeffIndex] = (h.indexOf(Collections.max(h))); FastFourierTransformer fastFourierTransformer = new FastFourierTransformer( DftNormalization.STANDARD); Complex[] fft = fastFourierTransformer.transform(ArrayUtils.toPrimitive(coeffHist), TransformType.FORWARD); double[] power = new double[fft.length]; for (int ii = 0; ii < power.length; ii++) { power[ii] = fft[ii].abs(); } //Find first local minimum, to bypass DC peak double DC = power[0]; int FreqValley = 1; while (FreqValley < power.length - 1 & power[FreqValley] >= power[FreqValley + 1]) { FreqValley++; } int maxFFTInd = 0; double maxFFTVal = 0; double minFFTVal = Double.MAX_VALUE; for (int ii = FreqValley; ii < power.length / 2; ii++) { if (power[ii] > maxFFTVal) { maxFFTInd = ii; maxFFTVal = power[ii]; } if (power[ii] < minFFTVal) { minFFTVal = power[ii]; } } if (maxFFTInd == 0 | maxFFTVal < (DC / 5) | minFFTVal / maxFFTVal > 0.9) { p_h_fft[coeffIndex] = 1; } else { p_h_fft[coeffIndex] = Math.round(coeffHist.length / maxFFTInd); } } else { p_h_avg[coeffIndex] = 1; p_h_fft[coeffIndex] = 1; s_0 = 0; } if (p_h_avg[coeffIndex] < p_h_fft[coeffIndex]) { p_final[coeffIndex] = p_h_avg[coeffIndex]; } else { p_final[coeffIndex] = p_h_fft[coeffIndex]; } pTampered[coeffIndex] = new double[selectedCoeffs.size()]; pUntampered[coeffIndex] = new double[selectedCoeffs.size()]; int[] adjustedCoeffs = new int[selectedCoeffs.size()]; int[] period_start = new int[selectedCoeffs.size()]; int[] period; int[] num = new int[selectedCoeffs.size()]; int[] denom = new int[selectedCoeffs.size()]; double[] P_u = new double[selectedCoeffs.size()]; double[] P_t = new double[selectedCoeffs.size()]; if (p_final[coeffIndex] != 1) { for (int ii = 0; ii < adjustedCoeffs.length; ii++) { adjustedCoeffs[ii] = selectedCoeffs.get(ii) - minCoeffValue; period_start[ii] = adjustedCoeffs[ii] - rem(adjustedCoeffs[ii] - s_0, p_final[coeffIndex]); } for (int kk = 0; kk < selectedCoeffs.size(); kk++) { if (period_start[kk] > s_0) { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] + ii; if (period[ii] >= coeffHist.length) { period[ii] = period[ii] - p_final[coeffIndex]; } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } else { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] - ii; if (period_start[kk] - p_final[coeffIndex] + 1 <= 0) { if (period[ii] <= 0) { period[ii] = period[ii] + p_final[coeffIndex]; } } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } P_u[kk] = ((double) num[kk] / denom[kk]); P_t[kk] = (1.0 / p_final[coeffIndex]); if (P_u[kk] + P_t[kk] != 0) { pTampered[coeffIndex][kk] = P_t[kk] / (P_u[kk] + P_t[kk]); pUntampered[coeffIndex][kk] = P_u[kk] / (P_u[kk] + P_t[kk]); } else { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } else { for (int kk = 0; kk < selectedCoeffs.size(); kk++) { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } double[] pTamperedOverall = new double[pTampered[0].length]; double pTamperedProd; double pUntamperedProd; for (int locationIndex = 0; locationIndex < pTampered[0].length; locationIndex++) { pTamperedProd = 1; pUntamperedProd = 1; for (int coeffIndex = 0; coeffIndex < pTampered.length; coeffIndex++) { pTamperedProd = pTamperedProd * pTampered[coeffIndex][locationIndex]; pUntamperedProd = pUntamperedProd * pUntampered[coeffIndex][locationIndex]; } if (pTamperedProd + pUntamperedProd != 0) { pTamperedOverall[locationIndex] = pTamperedProd / (pTamperedProd + pUntamperedProd); } else { pTamperedOverall[locationIndex] = 0; } } int blocksH = imWidth / 8; int blocksV = imHeight / 8; double[][] outputMap = new double[blocksV][blocksH]; for (int kk = 0; kk < pTamperedOverall.length; kk++) { outputMap[kk % blocksV][(int) Math.floor(kk / blocksV)] = pTamperedOverall[kk]; if (pTamperedOverall[kk] > maxProbValue) { maxProbValue = pTamperedOverall[kk]; } if (pTamperedOverall[kk] < minProbValue) { minProbValue = pTamperedOverall[kk]; } } probabilityMap = outputMap; BufferedImage outputIm = visualizeWithJet(outputMap); // output displaySurface = outputIm; }
From source file:br.ufpe.soot.instrument.bitrep.BitConfigRep.java
@Override public IConfigRep union(IConfigRep rep) { if (rep instanceof BitConfigRep) { BitConfigRep bitRep = (BitConfigRep) rep; return BitVectorConfigRep.union(this, bitRep, Collections.max(((Collection<Integer>) atoms.values())), atoms);//ww w. j a v a 2 s . c o m } else { throw new UnsupportedOperationException(); } }
From source file:org.kuali.kra.questionnaire.QuestionnaireLookupableHelperServiceImpl.java
protected Questionnaire getQuestionnaireById(String questionnaireId) { Questionnaire questionnaire = null;/*from w ww . ja va 2 s . com*/ if (questionnaireId != null) { Map<String, Object> fieldValues = new HashMap<String, Object>(); fieldValues.put(QUESTIONNAIRE_ID, questionnaireId); Collection<Questionnaire> questionnaires = getBusinessObjectService().findMatching(Questionnaire.class, fieldValues); if (questionnaires.size() > 0) { questionnaire = (Questionnaire) Collections.max(questionnaires); } } return questionnaire; }
From source file:com.github.mhendred.face4j.model.Face.java
/** * @return the {@link Guess} with the highest confidence for this face *//*www .j a v a 2 s . c o m*/ public Guess getGuess() { try { return Collections.max(guesses); } catch (NoSuchElementException nsee) { return new Guess(); } }
From source file:br.ufpe.cin.emergo.instrument.bitrep.BitConfigRep.java
@Override public IConfigRep union(IConfigRep rep) { if (rep instanceof BitConfigRep) { BitConfigRep bitRep = (BitConfigRep) rep; int size = 1; if (!atoms.values().isEmpty()) { size = Collections.max(((Collection<Integer>) atoms.values())); }//from w ww. j a v a2s. c om return BitVectorConfigRep.union(this, bitRep, size, atoms); } else { throw new UnsupportedOperationException(); } }
From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java
static TarReader openRO(Map<Character, File> files, boolean memoryMapping, boolean recover) throws IOException { // for readonly store only try the latest generation of a given // tar file to prevent any rollback or rewrite File file = files.get(Collections.max(files.keySet())); TarReader reader = openFirstFileWithValidIndex(singletonList(file), memoryMapping); if (reader != null) { return reader; }/*from www.ja v a 2s. co m*/ if (recover) { log.warn("Could not find a valid tar index in {}, recovering read-only", file); // collecting the entries (without touching the original file) and // writing them into an artificial tar file '.ro.bak' LinkedHashMap<UUID, byte[]> entries = newLinkedHashMap(); collectFileEntries(file, entries, false); file = findAvailGen(file, ".ro.bak"); generateTarFile(entries, file); reader = openFirstFileWithValidIndex(singletonList(file), memoryMapping); if (reader != null) { return reader; } } throw new IOException("Failed to open tar file " + file); }
From source file:pt.webdetails.cda.utils.TableModelUtils.java
/** * @param table//from ww w . j a v a2s . co m * @param outputIndexes * @param rowFilter (optional) * @return * @throws InvalidOutputIndexException */ private static TableModel filterTable(final TableModel table, List<Integer> outputIndexes, final DataTableFilter rowFilter) throws InvalidOutputIndexException { int columnCount = outputIndexes.size(); if (columnCount == 0 && rowFilter != null) { //still have to go through the motions if we need to filter rows for (int i = 0; i < table.getColumnCount(); i++) { outputIndexes.add(i); } columnCount = outputIndexes.size(); } if (columnCount != 0) { //logger.info(Collections.max(outputIndexes)+" "+table.getColumnCount()); if ((Collections.max(outputIndexes) > table.getColumnCount() - 1)) { String errorMessage = String.format( "Output index higher than number of columns in tableModel. %s > %s", Collections.max(outputIndexes), table.getColumnCount()); logger.error(errorMessage); if (table.getColumnCount() > 0) { throw new InvalidOutputIndexException(errorMessage, null); } else { logger.warn( "Unable to validate output indexes because table metadata is empty. Returning table."); return table; } } final int rowCount = table.getRowCount(); logger.debug(rowCount == 0 ? "No data found" : "Found " + rowCount + " rows"); final Class<?>[] colTypes = new Class[columnCount]; final String[] colNames = new String[columnCount]; //just set the number of rows/columns final TypedTableModel typedTableModel = new TypedTableModel(colNames, colTypes, rowCount); for (int rowIn = 0, rowOut = 0; rowIn < rowCount; rowIn++, rowOut++) { //filter rows if (rowFilter != null && !rowFilter.rowContainsSearchTerms(table, rowIn)) { rowOut--; continue; } //filter columns for (int j = 0; j < outputIndexes.size(); j++) { final int outputIndex = outputIndexes.get(j); typedTableModel.setValueAt(table.getValueAt(rowIn, outputIndex), rowOut, j); } } //since we set the calculated table model to infer types, they will be available after rows are evaluated for (int i = 0; i < outputIndexes.size(); i++) { final int outputIndex = outputIndexes.get(i); typedTableModel.setColumnName(i, table.getColumnName(outputIndex)); typedTableModel.setColumnType(i, table.getColumnClass(outputIndex)); } return typedTableModel; } return table; }