Example usage for java.util Collections reverseOrder

List of usage examples for java.util Collections reverseOrder

Introduction

In this page you can find the example usage for java.util Collections reverseOrder.

Prototype

@SuppressWarnings("unchecked")
public static <T> Comparator<T> reverseOrder() 

Source Link

Document

Returns a comparator that imposes the reverse of the natural ordering on a collection of objects that implement the Comparable interface.

Usage

From source file:com.vgi.mafscaling.OpenLoop.java

private void calculateCorrectedGS(TreeMap<Integer, ArrayList<Double>> result) {
    ArrayList<Double> closestVolatageArray;
    double gs = 0;
    double avgError = 0;
    int lastErrIndex = 0;
    int i;/*from w  w  w  .  j a  v  a 2 s.co m*/
    gsCorrected.addAll(gsArray);
    for (i = 0; i < gsCorrected.size(); ++i) {
        gs = gsCorrected.get(i);
        avgError = 0;
        closestVolatageArray = result.get(i);
        if (closestVolatageArray != null) {
            for (int j = 0; j < closestVolatageArray.size(); ++j)
                avgError += closestVolatageArray.get(j);
            avgError /= closestVolatageArray.size();
            lastErrIndex = i;
        }
        gsCorrected.set(i, gs * (1 + avgError / 100.0));
    }
    avgError = 0;
    ArrayList<Double> sortedAfrArray = result.get(lastErrIndex);
    Collections.sort(sortedAfrArray, Collections.reverseOrder());
    for (i = 0; i < 10 && i < sortedAfrArray.size(); ++i)
        avgError += sortedAfrArray.get(i);
    if (i > 0)
        avgError /= i;
    for (i = lastErrIndex + 1; i < gsCorrected.size(); ++i) {
        gs = gsCorrected.get(i);
        gsCorrected.set(i, gs + (gs * 0.01 * avgError));
    }
}

From source file:org.fenixedu.academic.domain.Teacher.java

public Stream<TeacherAuthorization> getTeacherAuthorizationStream() {
    return getAuthorizationSet().stream().sorted(Collections.reverseOrder());
}

From source file:eu.planets_project.tb.gui.backing.data.DigitalObjectCompare.java

/**
 * @return/*  w ww  . j  av  a2 s.co  m*/
 */
public List<MeasuredComparisonBean> getMeasurementComparisons() {
    List<MeasuredComparisonBean> ms = new ArrayList<MeasuredComparisonBean>();
    ResultsForDigitalObjectBean res = new ResultsForDigitalObjectBean(this.getDobUri1());
    if (res == null || res.getExecutionRecord() == null) {
        if (this.me != null) {
            log.info("Pulling getExperimentMeasurements from the temporary space.");
            ms.addAll(MeasuredComparisonBean.createFromEvents(this.getDobUri1(), this.getDobUri2(), null,
                    this.me));
        }
        log.info("Got getExperimentMeasurements " + ms.size());
        return ms;
    }
    // Otherwise, pull from DB:
    log.info("Pulling getExperimentMeasurements from the DB.");
    Set<MeasurementEventImpl> measurementEvents = res.getExecutionRecord().getMeasurementEvents();
    List<MeasurementEventImpl> mevl = new ArrayList<MeasurementEventImpl>(measurementEvents);
    Collections.sort(mevl, Collections.reverseOrder());
    ms.addAll(MeasuredComparisonBean.createFromEvents(this.getDobUri1(), this.getDobUri2(),
            res.getExecutionRecord().getPropertyEvaluation(),
            mevl.toArray(new MeasurementEventImpl[mevl.size()])));
    log.info("Got getExperimentMeasurements from Events, " + ms.size() + " out of " + mevl.size());
    return ms;
}

From source file:org.csml.tommo.sugar.analysis.OpenedFileCache.java

public Integer[] getAvailableQualityThresholds(SequenceFile selectedSequenceFile, int matrixSize) {

    List<Integer> result = new ArrayList<Integer>();

    for (CachedFile cachedFile : cache.keySet()) {
        if (cachedFile.getFile().equals(selectedSequenceFile.getFile())
                && cachedFile.getMatrixSize() == matrixSize) {
            result.add(cachedFile.getQualityThreshold());
        }//from w  w  w .  ja va  2s.c om
    }

    Collections.sort(result, Collections.reverseOrder());

    return result.toArray(new Integer[0]);
}

From source file:org.dkpro.similarity.experiments.rte.util.Evaluator.java

public static void runEvaluationMetric(WekaClassifier wekaClassifier, EvaluationMetric metric, Dataset dataset)
        throws IOException {
    StringBuilder sb = new StringBuilder();

    if (metric == Accuracy) {
        // Read gold scores
        List<String> goldScores = FileUtils.readLines(new File(GOLD_DIR + "/" + dataset.toString() + ".txt"));

        // Read the experimental scores
        List<String> expScores = FileUtils.readLines(new File(OUTPUT_DIR + "/" + dataset.toString() + "/"
                + wekaClassifier.toString() + "/" + dataset.toString() + ".csv"));

        // Compute the accuracy
        double acc = 0.0;
        for (int i = 0; i < goldScores.size(); i++) {
            // The predictions have a max length of 8 characters...
            if (goldScores.get(i).substring(0, Math.min(goldScores.get(i).length(), 8))
                    .equals(expScores.get(i).substring(0, Math.min(expScores.get(i).length(), 8))))
                acc++;//from   ww w.j a v  a  2 s .co  m
        }
        acc = acc / goldScores.size();

        sb.append(acc);
    }
    if (metric == CWS) {
        // Read gold scores
        List<String> goldScores = FileUtils.readLines(new File(GOLD_DIR + "/" + dataset.toString() + ".txt"));

        // Read the experimental scores
        List<String> expScores = FileUtils.readLines(new File(OUTPUT_DIR + "/" + dataset.toString() + "/"
                + wekaClassifier.toString() + "/" + dataset.toString() + ".csv"));

        // Read the confidence scores
        List<String> probabilities = FileUtils.readLines(new File(OUTPUT_DIR + "/" + dataset.toString() + "/"
                + wekaClassifier.toString() + "/" + dataset.toString() + ".probabilities.csv"));

        // Combine the data
        List<CwsData> data = new ArrayList<CwsData>();

        for (int i = 0; i < goldScores.size(); i++) {
            CwsData cws = (new Evaluator()).new CwsData(Double.parseDouble(probabilities.get(i)),
                    goldScores.get(i), expScores.get(i));
            data.add(cws);
        }

        // Sort in descending order
        Collections.sort(data, Collections.reverseOrder());

        // Compute the CWS score
        double cwsScore = 0.0;
        for (int i = 0; i < data.size(); i++) {
            double cws_sub = 0.0;
            for (int j = 0; j <= i; j++) {
                if (data.get(j).isCorrect())
                    cws_sub++;
            }
            cws_sub /= (i + 1);

            cwsScore += cws_sub;
        }
        cwsScore /= data.size();

        sb.append(cwsScore);
    }
    if (metric == AveragePrecision) {
        // Read gold scores
        List<String> goldScores = FileUtils.readLines(new File(GOLD_DIR + "/" + dataset.toString() + ".txt"));

        // Trim to 8 characters
        for (int i = 0; i < goldScores.size(); i++)
            if (goldScores.get(i).length() > 8)
                goldScores.set(i, goldScores.get(i).substring(0, 8));

        // Read the experimental scores
        List<String> expScores = FileUtils.readLines(new File(OUTPUT_DIR + "/" + dataset.toString() + "/"
                + wekaClassifier.toString() + "/" + dataset.toString() + ".csv"));

        // Trim to 8 characters
        for (int i = 0; i < expScores.size(); i++)
            if (expScores.get(i).length() > 8)
                expScores.set(i, expScores.get(i).substring(0, 8));

        // Read the confidence scores
        List<String> probabilities = FileUtils.readLines(new File(OUTPUT_DIR + "/" + dataset.toString() + "/"
                + wekaClassifier.toString() + "/" + dataset.toString() + ".probabilities.csv"));

        // Conflate UNKONWN + CONTRADICTION classes for 3-way classifications
        if (RteUtil.hasThreeWayClassification(dataset)) {
            // Gold
            for (int i = 0; i < goldScores.size(); i++)
                if (goldScores.get(i).equals("CONTRADI") || goldScores.get(i).equals("NO")
                        || goldScores.get(i).equals("FALSE"))
                    goldScores.set(i, "FALSE");

            // Experimental
            for (int i = 0; i < expScores.size(); i++)
                if (expScores.get(i).equals("CONTRADI") || expScores.get(i).equals("NO")
                        || expScores.get(i).equals("FALSE"))
                    expScores.set(i, "FALSE");
        }

        // Combine the data
        List<CwsData> data = new ArrayList<CwsData>();

        for (int i = 0; i < goldScores.size(); i++) {
            CwsData cws = (new Evaluator()).new CwsData(Double.parseDouble(probabilities.get(i)),
                    goldScores.get(i), expScores.get(i));
            data.add(cws);
        }

        // Sort in descending order
        Collections.sort(data, Collections.reverseOrder());

        // Compute the average precision
        double avgPrec = 0.0;
        int numPositive = 0;
        for (int i = 0; i < data.size(); i++) {
            double ap_sub = 0.0;
            if (data.get(i).isPositivePair()) {
                numPositive++;

                for (int j = 0; j <= i; j++) {
                    if (data.get(j).isCorrect())
                        ap_sub++;
                }
                ap_sub /= (i + 1);
            }

            avgPrec += ap_sub;
        }
        avgPrec /= numPositive;

        sb.append(avgPrec);
    }

    FileUtils.writeStringToFile(new File(OUTPUT_DIR + "/" + dataset.toString() + "/" + wekaClassifier.toString()
            + "/" + dataset.toString() + "_" + metric.toString() + ".txt"), sb.toString());

    System.out.println("[" + wekaClassifier.toString() + "] " + metric.toString() + ": " + sb.toString());
}

From source file:info.extensiblecatalog.OAIToolkit.api.Importer.java

private void convert() {
    if (!configuration.checkSourceDir() || !configuration.checkDestinationDir()
            || !configuration.checkDestinationXmlDir() || !configuration.errorDir()
            || !configuration.errorXmlDir()) {
        return;/*from  w  w w  .  ja  va2 s.c  om*/
    }

    // if there is a load command, change the xml destination dir to
    // temporary xml dir and store the original destination xml dir
    //String originalDestinationXmlDir = null;
    if (configuration.isNeedLoad()) {
        //originalDestinationXmlDir = configuration.getDestinationXmlDir();
        //Log.info("originalDestinationXmlDir: " + originalDestinationXmlDir);
        //File original = new File(originalDestinationXmlDir);
        //Log.info(original.getAbsolutePath());
        //File parent = original.getParentFile();
        //File tempXml = new File(parent, "tempXml");
        File tempXml = dirNameGiver.getConvertTarget();
        if (!tempXml.exists()) {
            boolean created = tempXml.mkdir();
            if (!created) {
                prglog.error("[PRG] Unable to create temporary dir: " + tempXml);
            }
        }
        //configuration.setDestinationXmlDir(tempXml.getName());
    }

    if (configuration.isNeedLogDetail()) {
        libconvertlog.info(" *********** START OF CONVERT PROCESS ************ \n");
        prglog.info("[PRG] Start conversion from MARC files " + "at " + dirNameGiver.getConvertSource()
                + " to MARCXML files at " + dirNameGiver.getConvertTarget());
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        Date convStartDate = new Date();
        libconvertlog.info("[LIB] Conversion started at " + dateFormat.format(convStartDate));
        libconvertlog.info("[LIB] Start conversion from MARC files " + "at " + dirNameGiver.getConvertSource()
                + " to MARCXML files at " + dirNameGiver.getConvertTarget() + "\n\n");
    }

    Converter converter = new Converter();
    if (null != configuration.getMarcEncoding()) {
        converter.setEncoding(configuration.getMarcEncoding());
    }

    if (null != configuration.getCharConversion()) {
        converter.setConvertEncoding(configuration.getCharConversion());
    }

    if (configuration.isNeedModify() && configuration.isProductionMode()) {
        converter.setModifier(new Modifier(configuration));
    }

    if (configuration.isNeedLoad() && configuration.isProductionMode()) {
        initRecordImporter();
        converter.setRecordImporter(recordImporter);
        importStatistics = new LoadStatistics();
    }

    converter.setSplitSize(configuration.getSplitSize());
    converter.setDoIndentXml(configuration.isDoIndentXml());
    converter.setErrorDir(dirNameGiver.getConvertError().getAbsolutePath());
    converter.setCreateXml11(configuration.isCreateXml11());
    converter.setTranslateLeaderBadCharsToZero(configuration.isTranslateLeaderBadCharsToZero());
    converter.setTranslateNonleaderBadCharsToSpaces(configuration.isTranslateNonleaderBadCharsToSpaces());
    converter.setIgnoreRepositoryCode(configuration.doesIgnoreRepositoryCode());
    converter.setDefaultRepositoryCode(configuration.getDefaultRepositoryCode());

    prglog.info("[PRG] " + converter.getSettings());
    File[] files = null;

    File fSourceDir = dirNameGiver.getConvertSource();
    FileListing f1 = new FileListing();
    try {
        List<File> fileslist = f1.getFileListing(fSourceDir);
        int filesize = fileslist.size();
        files = new File[filesize];
        files = fileslist.toArray(files);
    } catch (FileNotFoundException fe) {
        prglog.error("Exception" + fe);
    }

    //File[] files = fSourceDir.listFiles(new MARCFileNameFilter());
    if (0 == files.length) {
        prglog.warn("[PRG] There's no MARC file in the source directory: " + configuration.getSourceDir());
    }
    Arrays.sort(files, new FileNameComparator());

    conversionStatistics = new ConversionStatistics();

    for (File marcFile : files) {
        ConversionStatistics fileStatistics = null;

        File xmlFile = new File(configuration.getDestinationXmlDir(),
                marcFile.getName().replaceAll(".mrc$", ".xml"));
        try {
            // setting the XML file
            if (configuration.isNeedLogDetail()) {
                prglog.info("[PRG] Converting " + marcFile.getName() + " to " + xmlFile.getName());
                libconvertlog
                        .info("[LIB] Converting " + marcFile.getName() + " to " + xmlFile.getName() + "\n\n");
            }

            // CONVERT !!!!
            fileStatistics = converter.convert(marcFile, xmlFile);

            if (configuration.isNeedLogDetail()) {
                prglog.info("[PRG] " + fileStatistics.toString(marcFile.getName()));
                if (importStatistics != null) {
                    prglog.info("[PRG] " + converter.getLoadStatistics().toString(marcFile.getName()));
                }
            }

            if (configuration.isNeedLogDetail()) {
                prglog.info(
                        "[PRG] Moving " + marcFile.getName() + " to " + dirNameGiver.getConvertDestination());
            }
            // setting the destination file
            File successFile = new File(dirNameGiver.getConvertDestination(), marcFile.getName());

            // delete if exists (otherwise the moving won't success)
            if (successFile.exists()) {
                boolean deleted = successFile.delete();
                prglog.info("[PRG] Delete " + successFile + " - " + deleted);
            }

            // remove
            boolean remove = marcFile.renameTo(successFile);
            if (configuration.isNeedLogDetail()) {
                prglog.info("[PRG] remove marc file (" + marcFile.getName() + ") to "
                        + dirNameGiver.getConvertDestination() + ": " + remove);
            }

        } catch (Exception e) {
            if (e instanceof MarcException) {
                prglog.error(
                        "[PRG] " + e.getMessage() + ". The last successfully read record's Control Number is "
                                + converter.getControlNumberOfLastReadRecord()
                                + ". The error may be in the next record.");
            } else {
                e.printStackTrace();
                prglog.error("[PRG] " + e);
            }
            // copy marcFile -> errorDir
            File errorFile = new File(configuration.getErrorDir(), marcFile.getName());
            if (errorFile.exists()) {
                boolean deleted = errorFile.delete();
                if (deleted) {
                    prglog.info("[PRG] Delete " + errorFile + ".");
                } else {
                    prglog.error("[PRG] Unable to delete " + errorFile + ".");
                }
            }
            boolean remove = marcFile.renameTo(errorFile);
            if (configuration.isNeedLogDetail()) {
                prglog.info("[PRG] remove MARC to error directory: " + remove);
            }

            if (xmlFile.exists()) {
                File xmlErrorFile = new File(configuration.getErrorXmlDir(), xmlFile.getName());
                if (xmlErrorFile.exists()) {
                    boolean deleted = xmlErrorFile.delete();
                    if (deleted) {
                        prglog.info("[PRG] Delete " + xmlErrorFile);
                    } else {
                        prglog.error("[PRG] Unable to delete " + xmlErrorFile);
                    }
                }
                remove = xmlFile.renameTo(xmlErrorFile);
                if (configuration.isNeedLogDetail()) {
                    prglog.info("[PRG] remove XML to error_xml directory: " + remove);
                }
            }
        }

        if (fileStatistics != null) {
            conversionStatistics.add(fileStatistics);
        }
        if (importStatistics != null) {
            importStatistics.add(converter.getLoadStatistics());
        }

    }

    //Delete the directories
    File[] dirs = null;

    DirectoryListing d1 = new DirectoryListing();
    try {
        List<File> dirslist = d1.getDirectoryListing(fSourceDir);
        int dirsize = dirslist.size();
        dirs = new File[dirsize];
        dirs = dirslist.toArray(dirs);
    } catch (FileNotFoundException fe) {
        prglog.error("Exception" + fe);
    }

    Arrays.sort(dirs, Collections.reverseOrder());
    for (File remfile : dirs) {
        if (remfile.isDirectory()) {
            remfile.delete();
        }
    }

    if (configuration.isNeedLogDetail()) {
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        Date convEndDate = new Date();
        libconvertlog.info("[LIB] Conversion completed at " + dateFormat.format(convEndDate));
        prglog.info("[PRG] Conversion statistics summary: " + conversionStatistics.toString());
        libconvertlog.info("[LIB] Conversion statistics summary: " + conversionStatistics.toString() + "\n");
        libconvertlog.info(" *********** END OF CONVERT PROCESS ************ \n");
        if (importStatistics != null) {
            prglog.info("[PRG] Load statistics summary: " + importStatistics.toString());
            libloadlog.info("[LIB] Load statistics summary: " + importStatistics.toString());
        }
    }

    if (recordImporter != null) {
        recordImporter.optimize();
    }

    // if there is a load command, change the source dir to
    // temporary xml dir and restore the original destination xml dir
    /*
    if(configuration.isNeedLoad() && null != originalDestinationXmlDir) {
       configuration.setSourceDir(configuration.getDestinationXmlDir());
       configuration.setDestinationXmlDir(originalDestinationXmlDir);
    }
    */
}

From source file:com.sitewhere.hbase.device.HBaseDeviceEvent.java

/**
 * Find all event rows associated with a site and return values that match the search
 * criteria. TODO: This is not optimized at all and will take forever in cases where
 * there are ton of assignments and events. It has to go through every record
 * associated with the site. It works for now though.
 * /* w  ww  .  j av a 2 s .c  o  m*/
 * @param hbase
 * @param siteToken
 * @param eventType
 * @param criteria
 * @return
 * @throws SiteWhereException
 */
protected static Pager<byte[]> getEventRowsForSite(ISiteWhereHBaseClient hbase, String siteToken,
        DeviceAssignmentRecordType eventType, IDateRangeSearchCriteria criteria) throws SiteWhereException {
    Long siteId = IdManager.getInstance().getSiteKeys().getValue(siteToken);
    if (siteId == null) {
        throw new SiteWhereSystemException(ErrorCode.InvalidSiteToken, ErrorLevel.ERROR);
    }
    byte[] startPrefix = HBaseSite.getAssignmentRowKey(siteId);
    byte[] afterPrefix = HBaseSite.getAfterAssignmentRowKey(siteId);

    HTableInterface events = null;
    ResultScanner scanner = null;
    try {
        events = hbase.getTableInterface(ISiteWhereHBase.EVENTS_TABLE_NAME);
        Scan scan = new Scan();
        scan.setStartRow(startPrefix);
        scan.setStopRow(afterPrefix);
        scanner = events.getScanner(scan);

        List<DatedByteArray> matches = new ArrayList<DatedByteArray>();
        Iterator<Result> results = scanner.iterator();
        while (results.hasNext()) {
            Result current = results.next();
            byte[] key = current.getRow();
            if (key.length > 7) {
                Map<byte[], byte[]> cells = current.getFamilyMap(ISiteWhereHBase.FAMILY_ID);
                for (byte[] qual : cells.keySet()) {
                    byte[] value = cells.get(qual);
                    if ((qual.length > 3) && (qual[3] == eventType.getType())) {
                        Date eventDate = getDateForEventKeyValue(key, qual);
                        if ((criteria.getStartDate() != null) && (eventDate.before(criteria.getStartDate()))) {
                            continue;
                        }
                        if ((criteria.getEndDate() != null) && (eventDate.after(criteria.getEndDate()))) {
                            continue;
                        }
                        matches.add(new DatedByteArray(eventDate, value));
                    }
                }
            }
        }
        Collections.sort(matches, Collections.reverseOrder());
        Pager<byte[]> pager = new Pager<byte[]>(criteria);
        for (DatedByteArray match : matches) {
            pager.process(match.getJson());
        }
        return pager;
    } catch (IOException e) {
        throw new SiteWhereException("Error scanning event rows.", e);
    } finally {
        if (scanner != null) {
            scanner.close();
        }
        HBaseUtils.closeCleanly(events);
    }
}

From source file:de.fhg.fokus.odp.middleware.ckan.CKANGatewayUtil.java

/**
 * Sort the passed Map by the values//from  ww w . j  a  v a2s .  c om
 * 
 * @param passedMap
 *            the HashMap to sort.
 * @return the sorted HashMap.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
private static LinkedHashMap sortHashMapByValues(HashMap<String, Long> passedMap) {
    List<String> mapKeys = new ArrayList<String>(passedMap.keySet());
    List<Long> mapValues = new ArrayList<Long>(passedMap.values());

    Comparator comparator = Collections.reverseOrder();
    Collections.sort(mapValues, comparator);
    Collections.sort(mapKeys, comparator);

    LinkedHashMap<String, Long> sortedMap = new LinkedHashMap<String, Long>();

    Iterator valueIt = mapValues.iterator();
    while (valueIt.hasNext()) {
        Object val = valueIt.next();
        Iterator keyIt = mapKeys.iterator();

        while (keyIt.hasNext()) {
            Object key = keyIt.next();
            String comp1 = passedMap.get(key).toString();
            String comp2 = val.toString();

            if (comp1.equals(comp2)) {
                passedMap.remove(key);
                mapKeys.remove(key);
                sortedMap.put((String) key, (Long) val);
                break;
            }

        }

    }
    return sortedMap;

}

From source file:hivemall.topicmodel.OnlineLDAModel.java

@Nonnull
public SortedMap<Float, List<String>> getTopicWords(@Nonnegative final int k, @Nonnegative int topN) {
    double lambdaSum = 0.d;
    final SortedMap<Float, List<String>> sortedLambda = new TreeMap<Float, List<String>>(
            Collections.reverseOrder());

    for (Map.Entry<String, float[]> e : _lambda.entrySet()) {
        final float lambda_k = e.getValue()[k];
        lambdaSum += lambda_k;/*  ww  w.  j a va2 s  .  c  om*/

        List<String> labels = sortedLambda.get(lambda_k);
        if (labels == null) {
            labels = new ArrayList<String>();
            sortedLambda.put(lambda_k, labels);
        }
        labels.add(e.getKey());
    }

    final SortedMap<Float, List<String>> ret = new TreeMap<Float, List<String>>(Collections.reverseOrder());

    topN = Math.min(topN, _lambda.keySet().size());
    int tt = 0;
    for (Map.Entry<Float, List<String>> e : sortedLambda.entrySet()) {
        float key = (float) (e.getKey().floatValue() / lambdaSum);
        ret.put(Float.valueOf(key), e.getValue());

        if (++tt == topN) {
            break;
        }
    }

    return ret;
}

From source file:delfos.group.grs.consensus.ConsensusGRS.java

public File getConsensusOutputXMLwithDesiredConsensusDegree(File consensusInputXML, double consensusDegree) {
    File consensusOutputDirectory = (File) getParameterValue(CONSENSUS_OUTPUT_FILES_DIRECTORY);

    String consensusInputXMLFileNameNoExtension = consensusInputXML.getName().substring(0,
            consensusInputXML.getName().lastIndexOf("."));

    String consensusInputXMLInOutputDirectoryAbsolutePath = consensusOutputDirectory.getAbsolutePath()
            + File.separator + consensusInputXMLFileNameNoExtension;

    File consensusInputXMLInOutputDirectory = new File(consensusInputXMLInOutputDirectoryAbsolutePath);

    if (!consensusInputXML.exists()) {
        Global.showWarning("The input XML '" + consensusInputXMLInOutputDirectory
                + "' does not exists in the output directory");
        return null;
    }/*from  ww w.  jav  a 2 s  .c  om*/

    if (!consensusOutputDirectory.exists()) {
        Global.showWarning("'" + consensusOutputDirectory.getAbsolutePath() + "' not exists");
        return null;
    }

    if (!consensusOutputDirectory.isDirectory()) {
        Global.showWarning("'" + consensusOutputDirectory.getAbsolutePath() + "' is not a directory");
        return null;
    }

    List<File> childrenFiles = new ArrayList<>(Arrays.asList(consensusOutputDirectory.listFiles()));
    PriorityQueue<PriorityItem<File>> queue = new PriorityQueue<>(Collections.reverseOrder());

    for (File consensusOutputFile : childrenFiles) {
        final String outputFileNameNoExtension = consensusOutputFile.getName().substring(0,
                consensusOutputFile.getName().lastIndexOf("."));
        if (outputFileNameNoExtension.startsWith(consensusInputXMLFileNameNoExtension)
                && outputFileNameNoExtension.contains("Consenso")) {
            try {
                Global.showln(consensusOutputFile.getAbsolutePath());
                double thisFileConsensusDegree = ConsensusOfIndividualRecommendationsToXML
                        .readConsensusOutputXML(consensusOutputFile).consensusDegree;

                queue.add(new PriorityItem<>(consensusOutputFile, thisFileConsensusDegree));
            } catch (JDOMException | IOException ex) {
                Global.showWarning(ex);
            }
        }
    }

    if (queue.isEmpty()) {
        return null;
    }

    if (Global.isVerboseAnnoying()) {
        Global.showInfoMessage("Found " + queue.size() + " consensus files");
    }

    while (!queue.isEmpty()) {
        PriorityItem<File> priorityItem = queue.poll();

        double consensusDegreeThisFile = priorityItem.getPriority();

        if (consensusDegreeThisFile >= consensusDegree) {
            return priorityItem.getKey();
        }
    }

    throw new IllegalStateException(
            "Consensus degree not reached for '" + consensusInputXMLFileNameNoExtension + "'");
}