Example usage for java.util TreeMap firstKey

List of usage examples for java.util TreeMap firstKey

Introduction

In this page you can find the example usage for java.util TreeMap firstKey.

Prototype

public K firstKey() 

Source Link

Usage

From source file:edu.dfci.cccb.mev.hcl.domain.simple.SimpleTwoDimensionalHclBuilder.java

private Node cluster(final Dataset dataset, Dimension dimension, Metric metric, Linkage linkage)
        throws DatasetException {
    final Type dimensionType = dimension.type();
    final RealMatrix original = toRealMatrix(dataset);
    final int size = dimensionType == ROW ? original.getRowDimension() : original.getColumnDimension();
    final int other = dimensionType == COLUMN ? original.getRowDimension() : original.getColumnDimension();
    Iterator<Integer> enumerator = new Iterator<Integer>() {

        private int counter = -1;

        @Override//  w w  w .java2  s.co  m
        public boolean hasNext() {
            return true;
        }

        @Override
        public Integer next() {
            counter--;
            if (counter > 0)
                counter = -1;
            return counter;
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    };
    final double[][] distances = new double[size][size];

    log.debug("Populating node hash");
    final Map<Integer, Node> genehash = new HashMap<Integer, Node>() {
        private static final long serialVersionUID = 1L;

        {
            for (int index = size; --index >= 0; put(index,
                    nodeBuilder().leaf(dataset.dimension(dimensionType).keys().get(index))))
                ;
        }
    };
    TreeMap<Double, int[]> sorted = new TreeMap<>();

    log.debug("Populating distance matrix");
    for (int i = 0; i < size; i++) {
        for (int j = i + 1; j < size; j++) {
            double distance = metric.distance(new AbstractList<Double>() {

                private int i;

                @Override
                public Double get(int index) {
                    return dimensionType == ROW ? original.getEntry(i, index) : original.getEntry(index, i);
                }

                @Override
                public int size() {
                    return other;
                }

                private List<Double> initializeProjection(int i) {
                    this.i = i;
                    return this;
                }
            }.initializeProjection(i), new AbstractList<Double>() {

                private int j;

                @Override
                public Double get(int index) {
                    return dimensionType == ROW ? original.getEntry(j, index) : original.getEntry(index, j);
                }

                @Override
                public int size() {
                    return other;
                }

                private List<Double> initializeProjection(int j) {
                    this.j = j;
                    return this;
                }
            }.initializeProjection(j));

            distances[i][j] = distance;
            distances[j][i] = distance;
            int[] genePair = { i, j };
            // Enter the distance calculated and the genes measured into a
            // treemap. Will be automatically sorted.
            sorted.put(distance, genePair);
        }
    }

    log.debug("Aggregating");
    while (true) {
        // Get the first key of the TreeMap. Will be the shortest distance de
        // facto.
        final double minkey = (Double) sorted.firstKey();
        int[] minValues = (int[]) sorted.firstEntry().getValue();

        final int value1 = minValues[0], value2 = minValues[1];
        // find

        Node cluster = nodeBuilder().branch(minkey, genehash.get(value1), genehash.get(value2));
        int id = enumerator.next();

        genehash.put(id, cluster);
        genehash.remove(value1);
        genehash.remove(value2);

        if (genehash.size() <= 1)
            break;

        // Iterate over all the current clusters to remeasure distance with the
        // previously clustered group.
        for (Entry<Integer, Node> e : genehash.entrySet()) {
            Node c = e.getValue();
            // Skip measuring the new cluster with itself.
            if (c == cluster)
                continue;

            List<Double> aggregation = new ArrayList<>();
            // Get genes from each cluster. Distance is measured from each element
            // to every element.
            for (int current : traverse(dimension.keys(), c))
                for (int created : traverse(dimension.keys(), cluster))
                    aggregation.add(distances[current][created]);

            int[] valuePair = { e.getKey(), id };
            sorted.put(linkage.aggregate(aggregation), valuePair);
        }

        // Get the shortest distance.
        // Check to make sure shortest distance does not include a gene pair
        // that
        // has already had its elements clustered.
        boolean minimized = false;
        while (!minimized) {
            double mk = sorted.firstKey();
            minValues = sorted.firstEntry().getValue();
            // If the gene pair is not present in the current gene set, remove
            // this distance.
            if (!genehash.containsKey(minValues[0]) || !genehash.containsKey(minValues[1]))
                sorted.remove(mk);
            else
                minimized = true;
        }
    }

    Node result = genehash.entrySet().iterator().next().getValue();
    log.debug("Clustered " + result);
    return result;
}

From source file:statistic.ca.gui.JCAStatisticPanel.java

private void showDiagramm(String diagrammName) {
    if (diagrammName.equals("Grundinformationen")) {
        String[] columnNames = { "Bezeichnung", "Wert" };
        EvacuationCellularAutomaton tmpCA;
        int nrOfInd = 0;
        double evacSec = 0.0;
        double evacCAStep = 0;
        double notEvac = 0;
        double evac = 0;
        double notEvacNoExit = 0;
        double notEvacNoTime = 0;
        int bestEvacIndex = 0;
        int aveEvacIndex = 0;
        int worseEvacIndex = 0;
        TreeMap<Double, Integer> findMedian = new TreeMap<>();
        for (int i = 0; i < selectedBatchResultEntry.getCa().length; i++) {
            tmpCA = selectedBatchResultEntry.getCa()[i];
            nrOfInd += tmpCA.getInitialIndividualCount();
            evacSec += tmpCA.getSecondsPerStep() * tmpCA.getTimeStep();
            evacCAStep += tmpCA.getTimeStep();
            evac += tmpCA.getInitialIndividualCount() - tmpCA.deadIndividualsCount();
            notEvac += tmpCA.deadIndividualsCount();
            notEvacNoExit += tmpCA.getDeadIndividualCount(DeathCause.ExitUnreachable);// getNrOfExitUnreachableDeadIndividuals();
            notEvacNoTime += tmpCA.getDeadIndividualCount(DeathCause.NotEnoughTime);// getNrOfNotEnoughTimeDeadIndividuals();
            findMedian.put(tmpCA.getTimeStep() * tmpCA.getSecondsPerStep(), i);
        }/*from  ww  w.  jav a2 s .c  o m*/
        bestEvacIndex = findMedian.firstEntry().getValue();
        for (int j = 0; j < findMedian.size() / 2; j++)
            findMedian.remove(findMedian.firstKey());
        aveEvacIndex = findMedian.get(findMedian.firstKey());
        worseEvacIndex = findMedian.get(findMedian.lastKey());
        Object[][] data = { { "Informationen fr Modell", selectedBatchResultEntry.getName() },
                { "Evakuierungszeit in Sekunden", evacSec / selectedBatchResultEntry.getCa().length },
                { "Evakuierungszeit in ZA-Schritten", evacCAStep / selectedBatchResultEntry.getCa().length },
                { "Anzahl Individuen", (double) nrOfInd / selectedBatchResultEntry.getCa().length },
                { "evakuiert", evac / selectedBatchResultEntry.getCa().length },
                { "nicht evakuiert", notEvac / selectedBatchResultEntry.getCa().length },
                { "nicht evakuiert weil kein Ausgang erreichbar",
                        notEvacNoExit / selectedBatchResultEntry.getCa().length },
                { "nicht evakuiert weil die Zeit nicht gereicht hat",
                        notEvacNoTime / selectedBatchResultEntry.getCa().length },
                { "beste Evakuierungszeit (Durchlaufindex,Zeit)", ("(" + (bestEvacIndex + 1) + " - "
                        + (selectedBatchResultEntry.getCa()[bestEvacIndex].getTimeStep()
                                / selectedBatchResultEntry.getCa()[bestEvacIndex].getStepsPerSecond())
                        + ")") },
                { "durchschnit. Evakuierungszeit (Durchlaufindex,Zeit)", ("(" + (aveEvacIndex + 1) + " - "
                        + (selectedBatchResultEntry.getCa()[aveEvacIndex].getTimeStep()
                                / selectedBatchResultEntry.getCa()[bestEvacIndex].getStepsPerSecond())
                        + ")") },
                { "schlechteste Evakuierungszeit (Durchlaufindex,Zeit)",
                        ("(" + (worseEvacIndex + 1) + " - "
                                + (selectedBatchResultEntry.getCa()[worseEvacIndex].getTimeStep()
                                        / selectedBatchResultEntry.getCa()[bestEvacIndex].getStepsPerSecond())
                                + ")") } };
        basicInformationTable = new JTable(data, columnNames);
        basicInformationScrollPane = new JScrollPane(basicInformationTable);
        diagrams.addTable(diagrammName, basicInformationScrollPane, west);
    }

    if ((noIndividualsInAtLeastOneAssignmentIndex) && !(diagrammName.equals("Grundinformationen"))) {
        chartData = new ChartData("bar", "NO INDIVIDUALS in at least one of the choosed dataset(s)", "",
                new ArrayList<>(), new ArrayList<>());
        evakuierungsdauer = ChartFactory.createBarChart(
                "NO INDIVIDUALS in at least one of the choosed dataset(s)", "", chartData.getYAxisLabel(),
                chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
        diagrams.addChart("NO INDIVIDUALS in at least one of the choosed dataset(s)", evakuierungsdauer, west);
    } else {

        if (diagrammName.equals("Ausgangsverteilung")) {
            chartData = new ChartData("pie",
                    diagrammName + ":" + selectedBatchResultEntry.getName() + "-"
                            + assignmentGroups.get(assignmentIndexToShow.get(0)).toString(),
                    "Ausgnge", categoryDatasetValues, categoryDatasetAssignments);
            ausgangsverteilung = ChartFactory.createPieChart(
                    diagrammName + ":" + selectedBatchResultEntry.getName() + "-"
                            + assignmentGroups.get(assignmentIndexToShow.get(0)).toString(),
                    ChartData.getPieDataSet(), false, true, false);
            diagrams.addChart(diagrammName, ausgangsverteilung, west);
        }

        if (diagrammName.equals("Ankunftskurve")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            ankunftskurve = ChartFactory.createXYLineChart(diagrammName, chartData.getYAxisLabel(),
                    "Individuen", (XYDataset) datasetCollection, PlotOrientation.VERTICAL, true, true, false);
            diagrams.addChart(diagrammName, ankunftskurve, west);
        }

        if (diagrammName.equals("Evakuierungsdauer")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            evakuierungsdauer = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, evakuierungsdauer, west);
        }

        if (diagrammName.equals("evakuierte Individuen in Prozent")) {
            chartData = new ChartData("pie",
                    diagrammName + ":" + selectedBatchResultEntry.getName() + "-"
                            + assignmentGroups.get(assignmentIndexToShow.get(0)).toString(),
                    "Individuen", categoryDatasetValues, categoryDatasetAssignments);
            evakuierteIndividueninProzent = ChartFactory.createPieChart(
                    diagrammName + ":" + selectedBatchResultEntry.getName() + "-"
                            + assignmentGroups.get(assignmentIndexToShow.get(0)).toString(),
                    ChartData.getPieDataSet(), false, true, false);
            diagrams.addChart(diagrammName, evakuierteIndividueninProzent, west);
        }

        if (diagrammName.equals("maximale Blockadezeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            maxblockadezeit = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, maxblockadezeit, west);
        }

        if (diagrammName.equals("durchschnittliche Blockadezeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            aveblockadezeit = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, aveblockadezeit, west);
        }

        if (diagrammName.equals("minimale Blockadezeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            minblockadezeit = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, minblockadezeit, west);
        }

        if (diagrammName.equals("zurckgelegte Distanz")) {
            chartData = new ChartData("bar", diagrammName, "Meter [m]", categoryDatasetValues,
                    categoryDatasetAssignments);
            zurueckgelegteDistanz = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, zurueckgelegteDistanz, west);
        }

        if (diagrammName.equals("minimale Distanz zum initialen Ausgang")) {
            chartData = new ChartData("bar", diagrammName, "Meter [m]", categoryDatasetValues,
                    categoryDatasetAssignments);
            minimaleDistanzzuminitialenAusgang = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, minimaleDistanzzuminitialenAusgang, west);
        }

        if (diagrammName.equals("minimale Distanz zum nchsten Ausgang")) {
            chartData = new ChartData("bar", diagrammName, "Meter [m]", categoryDatasetValues,
                    categoryDatasetAssignments);
            minimaleDistanzzumnaechstenAusgang = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, minimaleDistanzzumnaechstenAusgang, west);
        }

        if (diagrammName.equals("maximale Zeit bis Safe")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            maxZeitBisSafe = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, maxZeitBisSafe, west);
        }

        if (diagrammName.equals("durchschnittliche Zeit bis Safe")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            aveZeitBisSafe = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, aveZeitBisSafe, west);
        }

        if (diagrammName.equals("minimale Zeit bis Safe")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            minZeitBisSafe = ChartFactory.createBarChart(diagrammName, "Belegungen", chartData.getYAxisLabel(),
                    chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true, false);
            diagrams.addChart(diagrammName, minZeitBisSafe, west);
        }

        if (diagrammName.equals("Distanz ber Zeit")) {
            chartData = new ChartData("bar", diagrammName, "Meter [m]", categoryDatasetValues,
                    categoryDatasetAssignments);
            distanzueberZeit = ChartFactory.createXYLineChart(diagrammName, "Zeit [s]",
                    chartData.getYAxisLabel(), (XYDataset) datasetCollection, PlotOrientation.VERTICAL, true,
                    true, false);
            diagrams.addChart(diagrammName, distanzueberZeit, west);
        }

        if (diagrammName.equals("maximale Geschwindigkeit ber Zeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            maximaleGeschwindigkeitueberZeit = ChartFactory.createXYLineChart(diagrammName,
                    chartData.getYAxisLabel(), "Meter pro Sekunde [m/s]", (XYDataset) datasetCollection,
                    PlotOrientation.VERTICAL, true, true, false);
            diagrams.addChart(diagrammName, maximaleGeschwindigkeitueberZeit, west);
        }

        if (diagrammName.equals("durschnittliche Geschwindigkeit ber Zeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            durschnittlicheGeschwindigkeitueberZeit = ChartFactory.createXYLineChart(diagrammName,
                    chartData.getYAxisLabel(), "Meter pro Sekunde [m/s]", (XYDataset) datasetCollection,
                    PlotOrientation.VERTICAL, true, true, false);
            diagrams.addChart(diagrammName, durschnittlicheGeschwindigkeitueberZeit, west);
        }

        if (diagrammName.equals("maximale Geschwindigkeit")) {
            chartData = new ChartData("bar", diagrammName, "Meter pro Sekunde [m/s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            maximaleGeschwindigkeit = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, maximaleGeschwindigkeit, west);
        }

        if (diagrammName.equals("durchschnittliche Geschwindigkeit")) {
            chartData = new ChartData("bar", diagrammName, "Meter pro Sekunde [m/s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            durchschnittlicheGeschwindigkeit = ChartFactory.createBarChart(diagrammName, "Belegungen",
                    chartData.getYAxisLabel(), chartData.getCDataSet(), PlotOrientation.VERTICAL, false, true,
                    false);
            diagrams.addChart(diagrammName, durchschnittlicheGeschwindigkeit, west);
        }

        if (diagrammName.equals("Panik ber Zeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            panik = ChartFactory.createXYLineChart(diagrammName, chartData.getYAxisLabel(), "Panik",
                    (XYDataset) datasetCollection, PlotOrientation.VERTICAL, true, true, false);
            diagrams.addChart(diagrammName, panik, west);
        }

        if (diagrammName.equals("Erschpfung ber Zeit")) {
            chartData = new ChartData("bar", diagrammName, "Zeit [s]", categoryDatasetValues,
                    categoryDatasetAssignments);
            erschoepfung = ChartFactory.createXYLineChart(diagrammName, chartData.getYAxisLabel(),
                    "Erschpfung", (XYDataset) datasetCollection, PlotOrientation.VERTICAL, true, true, false);
            diagrams.addChart(diagrammName, erschoepfung, west);
        }

    } //end else

    categoryDatasetValues = new ArrayList<>();
    categoryDatasetAssignments = new ArrayList<>();
    //dataset = new XYSeries("");
    datasetCollection = new XYSeriesCollection();
    diagrams.validate();
}

From source file:org.jahia.admin.sites.ManageSites.java

/**
 * Display page to let user choose a set of templates.
 *
 * @param request  Servlet request.//  ww w .j  ava 2s .c  o m
 * @param response Servlet response.
 * @param session  HttpSession object.
 */
private void displayTemplateSetChoice(HttpServletRequest request, HttpServletResponse response,
        HttpSession session) throws IOException, ServletException {
    try {
        logger.debug("Display template set choice started ");

        // retrieve previous form values...
        String jahiaDisplayMessage = (String) request.getAttribute(CLASS_NAME + "jahiaDisplayMessage");
        // set default values...
        if (jahiaDisplayMessage == null) {
            jahiaDisplayMessage = Jahia.COPYRIGHT;
        }

        String selectedTmplSet = (String) request.getAttribute("selectedTmplSet");

        TreeMap<String, JCRNodeWrapper> orderedTemplateSets = getTemplatesSets();

        // try to select the default set if not selected
        if (selectedTmplSet == null) {
            selectedTmplSet = SettingsBean.getInstance().getPropertiesFile()
                    .getProperty("default_templates_set", orderedTemplateSets.firstKey());
        }

        JCRNodeWrapper selectedPackage = selectedTmplSet != null
                && orderedTemplateSets.containsKey(selectedTmplSet) ? orderedTemplateSets.get(selectedTmplSet)
                        : orderedTemplateSets.get(orderedTemplateSets.firstKey());

        request.setAttribute("selectedTmplSet", selectedTmplSet);
        request.setAttribute("tmplSets", orderedTemplateSets.values());
        request.setAttribute("modules",
                getModulesOfType(JahiaTemplateManagerService.MODULE_TYPE_MODULE).values());
        request.setAttribute("jahiApps",
                getModulesOfType(JahiaTemplateManagerService.MODULE_TYPE_JAHIAPP).values());
        request.setAttribute("selectedModules", jParams.getParameterValues("selectedModules"));
        request.setAttribute("selectedPackage", selectedPackage);
        Locale currentLocale = (Locale) session.getAttribute(ProcessingContext.SESSION_LOCALE);
        if (currentLocale == null) {
            currentLocale = request.getLocale();
        }
        Locale selectedLocale = (Locale) session.getAttribute(CLASS_NAME + "selectedLocale");
        if (selectedLocale == null) {
            selectedLocale = LanguageCodeConverters
                    .languageCodeToLocale(Jahia.getSettings().getDefaultLanguageCode());
        }
        session.setAttribute(CLASS_NAME + "selectedLocale", selectedLocale);
        request.setAttribute("selectedLocale", selectedLocale);
        request.setAttribute("currentLocale", currentLocale);

        logger.debug("Nb template set found " + orderedTemplateSets.size());

        // redirect...
        JahiaAdministration.doRedirect(request, response, session, JSP_PATH + "site_choose_template_set.jsp");

        // set default values...
        session.setAttribute(CLASS_NAME + "jahiaDisplayMessage", Jahia.COPYRIGHT);
    } catch (RepositoryException e) {
        throw new ServletException(e);
    }
}

From source file:eu.edisonproject.training.wsd.BabelNet.java

private Set<Term> babelNetDisambiguation(String language, String lemma, Set<String> ngarms) {
    if (ngarms.isEmpty()) {
        return null;
    }/*from www . j  a  va 2 s. c  o m*/
    if (ngarms.size() == 1 && ngarms.iterator().next().length() <= 1) {
        return null;
    }

    HashMap<CharSequence, Double> idsMap = new HashMap<>();
    Map<CharSequence, Term> termMap = new HashMap<>();
    Set<Term> terms = new HashSet<>();
    int count = 0;
    int breaklimit = 1000;
    int oneElementlimit = 65;
    int difflimit = 60;
    Double persent;
    for (String n : ngarms) {
        if (n.length() <= 1) {
            continue;
        }
        count++;
        if (idsMap.size() == 1 && count > oneElementlimit) {
            //                Double score = idsMap.values().iterator().next();
            //                if (score >= 10) {
            break;
            //                }
        }

        if ((count % 2) == 0 && idsMap.size() >= 2 && count > difflimit) {
            ValueComparator bvc = new ValueComparator(idsMap);
            TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc);
            sorted_map.putAll(idsMap);
            Iterator<CharSequence> iter = sorted_map.keySet().iterator();
            Double first = idsMap.get(iter.next());
            Double second = idsMap.get(iter.next());

            persent = first / (first + second);
            if (persent > 0.65) {
                break;
            }
        }
        if (count > breaklimit) {
            break;
        }

        String clearNg = n.replaceAll("_", " ");
        if (clearNg == null) {
            continue;
        }
        if (clearNg.startsWith(" ")) {
            clearNg = clearNg.replaceFirst(" ", "");
        }
        if (clearNg.endsWith(" ")) {
            clearNg = clearNg.substring(0, clearNg.length() - 1);
        }

        Pair<Term, Double> termPair = null;
        try {
            termPair = babelNetDisambiguation(language, lemma, clearNg);
        } catch (Exception ex) {
            if (ex.getMessage() != null && ex.getMessage().contains("Your key is not valid")) {
                try {
                    termPair = babelNetDisambiguation(language, lemma, clearNg);
                } catch (Exception ex1) {
                    //                       LOGGER.log(Level.WARNING, ex1, null);
                }
            } else {
                LOGGER.log(Level.WARNING, null, ex);
            }
        }
        if (termPair != null) {
            termMap.put(termPair.first.getUid(), termPair.first);
            Double score;
            if (idsMap.containsKey(termPair.first.getUid())) {
                score = idsMap.get(termPair.first.getUid());
                //                    score++;
                score += termPair.second;
            } else {
                //                    score = 1.0;
                score = termPair.second;
            }
            idsMap.put(termPair.first.getUid(), score);
        }
    }
    if (!idsMap.isEmpty()) {
        ValueComparator bvc = new ValueComparator(idsMap);
        TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc);
        sorted_map.putAll(idsMap);
        count = 0;
        Double firstScore = idsMap.get(sorted_map.firstKey());
        terms.add(termMap.get(sorted_map.firstKey()));
        idsMap.remove(sorted_map.firstKey());
        for (CharSequence tvID : sorted_map.keySet()) {
            if (count >= 1) {
                Double secondScore = idsMap.get(tvID);
                persent = secondScore / (firstScore + secondScore);
                if (persent > 0.2) {
                    terms.add(termMap.get(tvID));
                }
                if (count >= 2) {
                    break;
                }
            }
            count++;
        }
        return terms;
    }
    return null;
}

From source file:nl.uva.sne.disambiguators.BabelNet.java

private Set<Term> babelNetDisambiguation(String language, String lemma, Set<String> ngarms) {
    if (ngarms.isEmpty()) {
        return null;
    }/*  w ww .  j a  v  a 2s .  co m*/
    if (ngarms.size() == 1 && ngarms.iterator().next().length() <= 1) {
        return null;
    }

    HashMap<String, Double> idsMap = new HashMap<>();
    Map<String, Term> termMap = new HashMap<>();
    Set<Term> terms = new HashSet<>();
    int count = 0;
    int breaklimit = 1000;
    int oneElementlimit = 65;
    int difflimit = 60;
    Double persent;
    for (String n : ngarms) {
        if (n.length() <= 1) {
            continue;
        }
        count++;
        if (idsMap.size() == 1 && count > oneElementlimit) {
            //                Double score = idsMap.values().iterator().next();
            //                if (score >= 10) {
            break;
            //                }
        }

        if ((count % 2) == 0 && idsMap.size() >= 2 && count > difflimit) {
            ValueComparator bvc = new ValueComparator(idsMap);
            TreeMap<String, Double> sorted_map = new TreeMap(bvc);
            sorted_map.putAll(idsMap);
            Iterator<String> iter = sorted_map.keySet().iterator();
            Double first = idsMap.get(iter.next());
            Double second = idsMap.get(iter.next());

            persent = first / (first + second);
            if (persent > 0.65) {
                break;
            }
        }
        if (count > breaklimit) {
            break;
        }

        String clearNg = n.replaceAll("_", " ");
        if (clearNg == null) {
            continue;
        }
        if (clearNg.startsWith(" ")) {
            clearNg = clearNg.replaceFirst(" ", "");
        }
        if (clearNg.endsWith(" ")) {
            clearNg = clearNg.substring(0, clearNg.length() - 1);
        }

        Pair<Term, Double> termPair = null;
        try {
            termPair = babelNetDisambiguation(language, lemma, clearNg);
        } catch (Exception ex) {
            if (ex.getMessage() != null && ex.getMessage().contains("Your key is not valid")) {
                try {
                    termPair = babelNetDisambiguation(language, lemma, clearNg);
                } catch (Exception ex1) {
                    //                        Logger.getLogger(BabelNet.class.getName()).log(Level.WARNING, ex1, null);
                }
            } else {
                Logger.getLogger(SemanticUtils.class.getName()).log(Level.WARNING, null, ex);
            }
        }
        if (termPair != null) {
            termMap.put(termPair.first.getUID(), termPair.first);
            Double score;
            if (idsMap.containsKey(termPair.first.getUID())) {
                score = idsMap.get(termPair.first.getUID());
                //                    score++;
                score += termPair.second;
            } else {
                //                    score = 1.0;
                score = termPair.second;
            }
            idsMap.put(termPair.first.getUID(), score);
        }
    }
    if (!idsMap.isEmpty()) {
        ValueComparator bvc = new ValueComparator(idsMap);
        TreeMap<String, Double> sorted_map = new TreeMap(bvc);
        sorted_map.putAll(idsMap);
        count = 0;
        Double firstScore = idsMap.get(sorted_map.firstKey());
        terms.add(termMap.get(sorted_map.firstKey()));
        idsMap.remove(sorted_map.firstKey());
        for (String tvID : sorted_map.keySet()) {
            if (count >= 1) {
                Double secondScore = idsMap.get(tvID);
                persent = secondScore / (firstScore + secondScore);
                if (persent > 0.2) {
                    terms.add(termMap.get(tvID));
                }
                if (count >= 2) {
                    break;
                }
            }
            count++;
        }
        return terms;
    }
    return null;
}

From source file:org.apache.druid.indexing.kafka.supervisor.KafkaSupervisor.java

/**
 * This method does two things -/*from   w w  w  .j ava2  s . co  m*/
 * 1. Makes sure the checkpoints information in the taskGroup is consistent with that of the tasks, if not kill
 * inconsistent tasks.
 * 2. truncates the checkpoints in the taskGroup corresponding to which segments have been published, so that any newly
 * created tasks for the taskGroup start indexing from after the latest published offsets.
 */
private void verifyAndMergeCheckpoints(final TaskGroup taskGroup) {
    final int groupId = taskGroup.groupId;
    final List<Pair<String, TreeMap<Integer, Map<Integer, Long>>>> taskSequences = new ArrayList<>();
    final List<ListenableFuture<TreeMap<Integer, Map<Integer, Long>>>> futures = new ArrayList<>();
    final List<String> taskIds = new ArrayList<>();

    for (String taskId : taskGroup.taskIds()) {
        final ListenableFuture<TreeMap<Integer, Map<Integer, Long>>> checkpointsFuture = taskClient
                .getCheckpointsAsync(taskId, true);
        taskIds.add(taskId);
        futures.add(checkpointsFuture);
    }

    try {
        List<TreeMap<Integer, Map<Integer, Long>>> futuresResult = Futures.successfulAsList(futures)
                .get(futureTimeoutInSeconds, TimeUnit.SECONDS);

        for (int i = 0; i < futuresResult.size(); i++) {
            final TreeMap<Integer, Map<Integer, Long>> checkpoints = futuresResult.get(i);
            final String taskId = taskIds.get(i);
            if (checkpoints == null) {
                try {
                    // catch the exception in failed futures
                    futures.get(i).get();
                } catch (Exception e) {
                    log.error(e, "Problem while getting checkpoints for task [%s], killing the task", taskId);
                    killTask(taskId);
                    taskGroup.tasks.remove(taskId);
                }
            } else if (checkpoints.isEmpty()) {
                log.warn("Ignoring task [%s], as probably it is not started running yet", taskId);
            } else {
                taskSequences.add(new Pair<>(taskId, checkpoints));
            }
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    final KafkaDataSourceMetadata latestDataSourceMetadata = (KafkaDataSourceMetadata) indexerMetadataStorageCoordinator
            .getDataSourceMetadata(dataSource);
    final boolean hasValidOffsetsFromDb = latestDataSourceMetadata != null
            && latestDataSourceMetadata.getKafkaPartitions() != null
            && ioConfig.getTopic().equals(latestDataSourceMetadata.getKafkaPartitions().getTopic());
    final Map<Integer, Long> latestOffsetsFromDb;
    if (hasValidOffsetsFromDb) {
        latestOffsetsFromDb = latestDataSourceMetadata.getKafkaPartitions().getPartitionOffsetMap();
    } else {
        latestOffsetsFromDb = null;
    }

    // order tasks of this taskGroup by the latest sequenceId
    taskSequences.sort((o1, o2) -> o2.rhs.firstKey().compareTo(o1.rhs.firstKey()));

    final Set<String> tasksToKill = new HashSet<>();
    final AtomicInteger earliestConsistentSequenceId = new AtomicInteger(-1);
    int taskIndex = 0;

    while (taskIndex < taskSequences.size()) {
        TreeMap<Integer, Map<Integer, Long>> taskCheckpoints = taskSequences.get(taskIndex).rhs;
        String taskId = taskSequences.get(taskIndex).lhs;
        if (earliestConsistentSequenceId.get() == -1) {
            // find the first replica task with earliest sequenceId consistent with datasource metadata in the metadata
            // store
            if (taskCheckpoints.entrySet().stream()
                    .anyMatch(sequenceCheckpoint -> sequenceCheckpoint.getValue().entrySet().stream()
                            .allMatch(partitionOffset -> Longs.compare(partitionOffset.getValue(),
                                    latestOffsetsFromDb == null ? partitionOffset.getValue()
                                            : latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(),
                                                    partitionOffset.getValue())) == 0)
                            && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey()))
                    || (pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() > 0
                            && earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) {
                final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>(
                        taskCheckpoints.tailMap(earliestConsistentSequenceId.get()));
                log.info("Setting taskGroup sequences to [%s] for group [%d]", latestCheckpoints, groupId);
                taskGroup.sequenceOffsets.clear();
                taskGroup.sequenceOffsets.putAll(latestCheckpoints);
            } else {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], latestoffsets from DB [%s]", taskId,
                        taskCheckpoints, latestOffsetsFromDb);
                tasksToKill.add(taskId);
            }
        } else {
            // check consistency with taskGroup sequences
            if (taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey()) == null
                    || !(taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey())
                            .equals(taskGroup.sequenceOffsets.firstEntry().getValue()))
                    || taskCheckpoints.tailMap(taskGroup.sequenceOffsets.firstKey())
                            .size() != taskGroup.sequenceOffsets.size()) {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], taskgroup checkpoints [%s]", taskId,
                        taskCheckpoints, taskGroup.sequenceOffsets);
                tasksToKill.add(taskId);
            }
        }
        taskIndex++;
    }

    if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) || (taskGroup.tasks.size() == 0
            && pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) {
        // killing all tasks or no task left in the group ?
        // clear state about the taskgroup so that get latest offset information is fetched from metadata store
        log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId);
        taskGroups.remove(groupId);
        partitionGroups.get(groupId).replaceAll((partition, offset) -> NOT_SET);
    }

    taskSequences.stream().filter(taskIdSequences -> tasksToKill.contains(taskIdSequences.lhs))
            .forEach(sequenceCheckpoint -> {
                log.warn(
                        "Killing task [%s], as its checkpoints [%s] are not consistent with group checkpoints[%s] or latest "
                                + "persisted offsets in metadata store [%s]",
                        sequenceCheckpoint.lhs, sequenceCheckpoint.rhs, taskGroup.sequenceOffsets,
                        latestOffsetsFromDb);
                killTask(sequenceCheckpoint.lhs);
                taskGroup.tasks.remove(sequenceCheckpoint.lhs);
            });
}

From source file:com.google.gwt.emultest.java.util.TreeMapTest.java

/**
 * Test method for 'java.util.TreeMap.TreeMap(SortedMap)'.
 *
 * @see java.util.TreeMap#TreeMap(SortedMap)
 *//*w ww .  j  a v  a  2  s  .  co m*/
public void testConstructor_SortedMap() {
    K[] keys = getKeys();
    V[] values = getValues();
    SortedMap<K, V> sourceMap = new TreeMap<K, V>();
    _assertEmpty(sourceMap);

    // populate the source map
    sourceMap.put(keys[0], values[0]);
    sourceMap.put(keys[1], values[1]);
    sourceMap.put(keys[2], values[2]);

    TreeMap<K, V> copyConstructed = new TreeMap<K, V>(sourceMap);
    _assertEquals(sourceMap, copyConstructed);

    Comparator<K> comp = Collections.reverseOrder(getComparator());
    TreeMap<K, V> reversedTreeMap = new TreeMap<K, V>(comp);
    reversedTreeMap.put(keys[0], values[0]);
    reversedTreeMap.put(keys[1], values[1]);
    TreeMap<K, V> anotherTreeMap = new TreeMap<K, V>(reversedTreeMap);
    assertTrue(anotherTreeMap.comparator() == comp);
    assertEquals(keys[1], anotherTreeMap.firstKey());
    assertEquals(keys[0], anotherTreeMap.lastKey());
}

From source file:com.enonic.vertical.adminweb.handlers.ContentBaseHandlerServlet.java

private void handlerPreviewSiteList(HttpServletRequest request, HttpServletResponse response,
        AdminService admin, ExtendedMap formItems, User user)
        throws VerticalAdminException, VerticalEngineException {
    Map<String, Object> parameters = new HashMap<String, Object>();
    parameters.put("page", formItems.get("page"));
    int unitKey = formItems.getInt("selectedunitkey", -1);
    int siteKey = formItems.getInt("menukey", -1);

    int contentKey = formItems.getInt("contentkey", -1);
    int contentTypeKey;
    if (contentKey >= 0) {
        parameters.put("contentkey", contentKey);
        contentTypeKey = admin.getContentTypeKey(contentKey);
        parameters.put("sessiondata", formItems.getBoolean("sessiondata", false));
    } else {//w w w . j  av  a 2  s. c  o m
        contentTypeKey = formItems.getInt("contenttypekey", -1);
    }
    parameters.put("contenttypekey", contentTypeKey);

    int versionKey = formItems.getInt("versionkey", -1);
    if (versionKey != -1) {
        parameters.put("versionkey", versionKey);
    }

    Document doc = XMLTool.domparse(admin.getAdminMenu(user, -1));
    Element rootSitesElement = doc.getDocumentElement();
    Element[] allSiteElements = XMLTool.getElements(rootSitesElement);
    int defaultPageTemplateKey = -1;
    if (allSiteElements.length > 0) {
        TreeMap<String, Element> allSitesMap = new TreeMap<String, Element>();
        for (Element siteElement : allSiteElements) {
            int mKey = Integer.valueOf(siteElement.getAttribute("key"));
            if (admin.hasContentPageTemplates(mKey, contentTypeKey)) {
                String name = siteElement.getAttribute("name");
                allSitesMap.put(name, siteElement);
            }
            rootSitesElement.removeChild(siteElement);
        }

        if (allSitesMap.size() > 0) {
            Element firstMenuElem = allSitesMap.get(allSitesMap.firstKey());
            if (siteKey < 0) {
                siteKey = Integer.valueOf(firstMenuElem.getAttribute("key"));
            }

            for (Element siteElement : allSitesMap.values()) {
                rootSitesElement.appendChild(siteElement);
                int key = Integer.parseInt(siteElement.getAttribute("key"));
                if (key == siteKey) {
                    String defaultPageTemplateAttr = siteElement.getAttribute("defaultpagetemplate");
                    if (defaultPageTemplateAttr != null && !defaultPageTemplateAttr.equals("")) {
                        defaultPageTemplateKey = Integer.parseInt(defaultPageTemplateAttr);
                    }

                }
            }
        }
    }

    addCommonParameters(admin, user, request, parameters, unitKey, siteKey);

    if (siteKey >= 0) {
        int[] excludeTypeKeys = { 1, 2, 3, 4, 6 };
        String pageTemplateXML = admin.getPageTemplatesByMenu(siteKey, excludeTypeKeys);
        Document ptDoc = XMLTool.domparse(pageTemplateXML);
        XMLTool.mergeDocuments(doc, ptDoc, true);

        if (contentKey >= 0) {
            Document chDoc = XMLTool.domparse(admin.getContentHomes(contentKey));
            XMLTool.mergeDocuments(doc, chDoc, true);
        }

        if (formItems.containsKey("pagetemplatekey")) {
            int pageTemplateKey = formItems.getInt("pagetemplatekey");
            parameters.put("pagetemplatekey", String.valueOf(pageTemplateKey));
        } else {
            if (contentTypeKey >= 0) {
                org.jdom.Document pageTemplateDocument = XMLTool.jdomparse(pageTemplateXML);
                org.jdom.Element root = pageTemplateDocument.getRootElement();
                List<org.jdom.Element> pageTemplates = root.getChildren("pagetemplate");
                Set<KeyValue> pageTemplateKeys = new HashSet<KeyValue>();
                for (org.jdom.Element pageTemplate : pageTemplates) {

                    int pageTemplateKey = Integer.parseInt(pageTemplate.getAttribute("key").getValue());
                    org.jdom.Element contentTypesNode = pageTemplate.getChild("contenttypes");
                    List<org.jdom.Element> contentTypeElements = contentTypesNode.getChildren("contenttype");

                    if (checkMatchingContentType(contentTypeKey, contentTypeElements)) {
                        KeyValue keyValue = new KeyValue(pageTemplateKey, pageTemplate.getChildText("name"));
                        pageTemplateKeys.add(keyValue);
                    }
                }
                if (pageTemplateKeys.size() > 0) {
                    KeyValue[] keys = new KeyValue[pageTemplateKeys.size()];
                    keys = pageTemplateKeys.toArray(keys);
                    Arrays.sort(keys);
                    parameters.put("pagetemplatekey", keys[0].key);
                } else {
                    if (defaultPageTemplateKey < 0) {
                        throw new VerticalAdminException("Unable to resolve page template. "
                                + "No matching page template found and default page template is not set.");
                    }
                    parameters.put("pagetemplatekey", String.valueOf(defaultPageTemplateKey));
                }

            }
        }

        if (formItems.containsKey("menuitemkey")) {
            parameters.put("menuitemkey", formItems.get("menuitemkey"));
        }
    }

    transformXML(request, response, doc, "contenttype_preview_list.xsl", parameters);
}

From source file:com.irccloud.android.fragment.MessageViewFragment.java

private synchronized void refresh(MessageAdapter adapter, TreeMap<Long, EventsDataSource.Event> events) {
    synchronized (adapterLock) {
        hiddenMap = null;//from  w w w. j  a v a2 s . com
        expandMap = null;

        if (getActivity() != null)
            textSize = PreferenceManager.getDefaultSharedPreferences(getActivity()).getInt("textSize",
                    getActivity().getResources().getInteger(R.integer.default_text_size));
        timestamp_width = -1;
        if (conn.getReconnectTimestamp() == 0)
            conn.cancel_idle_timer(); //This may take a while...
        collapsedEvents.clear();
        currentCollapsedEid = -1;
        lastCollapsedDay = -1;

        if (events == null || (events.size() == 0 && buffer.min_eid > 0)) {
            if (buffer != null && conn != null && conn.getState() == NetworkConnection.STATE_CONNECTED) {
                requestingBacklog = true;
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        conn.request_backlog(buffer.cid, buffer.bid, 0);
                    }
                });
            } else {
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        headerView.setVisibility(View.GONE);
                        backlogFailed.setVisibility(View.GONE);
                        loadBacklogButton.setVisibility(View.GONE);
                    }
                });
            }
        } else if (events.size() > 0) {
            if (server != null) {
                ignore.setIgnores(server.ignores);
            } else {
                ignore.setIgnores(null);
            }
            collapsedEvents.setServer(server);
            earliest_eid = events.firstKey();
            if (events.firstKey() > buffer.min_eid && buffer.min_eid > 0 && conn != null
                    && conn.getState() == NetworkConnection.STATE_CONNECTED) {
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        headerView.setVisibility(View.VISIBLE);
                        backlogFailed.setVisibility(View.GONE);
                        loadBacklogButton.setVisibility(View.GONE);
                    }
                });
            } else {
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        headerView.setVisibility(View.GONE);
                        backlogFailed.setVisibility(View.GONE);
                        loadBacklogButton.setVisibility(View.GONE);
                    }
                });
            }
            if (events.size() > 0) {
                avgInsertTime = 0;
                //Debug.startMethodTracing("refresh");
                long start = System.currentTimeMillis();
                Iterator<EventsDataSource.Event> i = events.values().iterator();
                EventsDataSource.Event next = i.next();
                Calendar calendar = Calendar.getInstance();
                while (next != null) {
                    EventsDataSource.Event e = next;
                    next = i.hasNext() ? i.next() : null;
                    String type = (next == null) ? "" : next.type;

                    if (next != null && currentCollapsedEid != -1
                            && !expandedSectionEids.contains(currentCollapsedEid)
                            && (type.equalsIgnoreCase("joined_channel")
                                    || type.equalsIgnoreCase("parted_channel")
                                    || type.equalsIgnoreCase("nickchange") || type.equalsIgnoreCase("quit")
                                    || type.equalsIgnoreCase("user_channel_mode"))) {
                        calendar.setTimeInMillis(next.eid / 1000);
                        insertEvent(adapter, e, true, calendar.get(Calendar.DAY_OF_YEAR) == lastCollapsedDay);
                    } else {
                        insertEvent(adapter, e, true, false);
                    }
                }
                adapter.insertLastSeenEIDMarker();
                Log.i("IRCCloud", "Backlog rendering took: " + (System.currentTimeMillis() - start) + "ms");
                //Debug.stopMethodTracing();
                avgInsertTime = 0;
                //adapter.notifyDataSetChanged();
            }
        }
        if (conn.getReconnectTimestamp() == 0 && conn.getState() == NetworkConnection.STATE_CONNECTED)
            conn.schedule_idle_timer();
    }
}

From source file:org.apache.hadoop.mapred.HFSPScheduler.java

private void assignSizeBasedTasks(TaskType type, HelperForType helper,
        TreeMap<JobDurationInfo, JobInProgress> sizeBasedJobs,
        TreeMap<JobDurationInfo, TaskStatuses> taskStatusesSizeBased) throws IOException {

    final boolean isMap = type == TaskType.MAP;
    int totClaimedSlots = 0;

    // StringBuilder builder = new StringBuilder("SBJobs(");
    // builder.append(type).append("): [");
    // boolean first = true;
    // for (Entry<JobDurationInfo,JobInProgress> jip : sizeBasedJobs.entrySet())
    // {/* w  w w.j a v  a2  s .  c  o m*/
    // if (first)
    // first = false;
    // else
    // builder.append(",");
    // builder.append(jip.getValue().getJobID())
    // .append(" -> ")
    // .append(jip.getKey().getPhaseDuration())
    // .append("/")
    // .append(jip.getKey().getPhaseTotalDuration())
    // .append(" p: ")
    // .append(this.getNumPendingNewTasks(jip.getValue(), type))
    // .append(" r: ")
    // .append(this.getNumRunningTasks(jip.getValue(), type))
    // .append(" f: ")
    // .append(this.getNumFinishedTasks(jip.getValue(), type));
    // }
    // builder.append("]");
    // LOG.debug(builder.toString());

    for (Entry<JobDurationInfo, JobInProgress> entry : sizeBasedJobs.entrySet()) {

        JobInProgress jip = entry.getValue();
        JobDurationInfo jdi = entry.getKey();
        TaskStatuses taskStatuses = taskStatusesSizeBased.get(jdi);

        if (!this.isJobReadyForTypeScheduling(jip, type)) {
            if (LOG.isDebugEnabled() && jip.getStatus().getRunState() != JobStatus.SUCCEEDED) {
                LOG.debug(
                        "SIZEBASED(" + jip.getJobID() + ":" + type + "):" + "job is not ready for scheduling ("
                                + "status: " + JobStatus.getJobRunState(jip.getStatus().getRunState())
                                + ", mapProgress: " + jip.getStatus().mapProgress() + ", reduceProgress: "
                                + jip.getStatus().reduceProgress() + ", scheduleReduces: "
                                + jip.scheduleReduces() + ")");
            }
            continue;
        }

        // NEW
        int pendingNewTasks = this.getNumPendingNewTasks(jip, type);
        int pendingResumableTasks = (taskStatuses == null) ? 0 : taskStatuses.suspendedTaskStatuses.size();

        int totAvailableSizeBasedSlots = helper.totAvailableSizeBasedSlots();

        // missing slots for resumable
        int missingResumableSlots = 0;
        if (pendingResumableTasks > 0 && pendingResumableTasks > totAvailableSizeBasedSlots) {
            if (totAvailableSizeBasedSlots <= 0)
                missingResumableSlots = pendingResumableTasks;
            else
                missingResumableSlots = pendingResumableTasks - totAvailableSizeBasedSlots;
            totAvailableSizeBasedSlots = (pendingResumableTasks > totAvailableSizeBasedSlots) ? 0
                    : totAvailableSizeBasedSlots - pendingResumableTasks;
        }

        int missingNewSlots = 0;
        if (pendingNewTasks > 0 && pendingNewTasks > totAvailableSizeBasedSlots) {
            if (totAvailableSizeBasedSlots <= 0)
                missingNewSlots = pendingNewTasks;
            else
                missingNewSlots = pendingNewTasks - totAvailableSizeBasedSlots;
            totAvailableSizeBasedSlots = (pendingNewTasks > totAvailableSizeBasedSlots) ? 0
                    : totAvailableSizeBasedSlots - pendingNewTasks;
        }

        TreeMap<TaskAttemptID, TaskStatus> suspended = null;
        if (taskStatuses != null)
            suspended = taskStatuses.suspendedTaskStatuses;

        if (pendingNewTasks > 0 || pendingResumableTasks > 0 || (suspended != null && !suspended.isEmpty())) {
            LOG.debug(jip.getJobID() + ":" + type + " (d: " + jdi.getPhaseDuration() + "/"
                    + jdi.getPhaseTotalDuration() + "):" + " pendingNewTasks: " + pendingNewTasks
                    + " pendingResumableTasks: " + pendingResumableTasks
                    // + " notResumableTasksOnThisTT: " + notResumableTasks
                    + " totAvailableSizeBasedSlots: "
                    + (helper.totAvailableSizeBasedSlots() <= 0 ? 0 : helper.totAvailableSizeBasedSlots())
                    + " currAvailableSlots: " + helper.currAvailableSlots + " => missingNewSlots: "
                    + missingNewSlots + " missingResumableSlots: " + missingResumableSlots);
        }

        if (this.preemptionStrategy.isPreemptionActive()
                && (missingNewSlots > 0 || missingResumableSlots > 0)) {
            ClaimedSlots claimedSlots = this.claimSlots(helper, Phase.SIZE_BASED, jip, missingNewSlots,
                    missingResumableSlots, totClaimedSlots, sizeBasedJobs, taskStatusesSizeBased);

            totClaimedSlots += claimedSlots.getNumPreemptedForNewTasks()
                    + claimedSlots.getNumPreemptedForResumableTasks();

            LOG.debug(jip.getJobID() + " taskStatusesOnTT: " + taskStatusesSizeBased.get(jdi)
                    + " pendingNewTasks: " + pendingNewTasks + " pendingResumableTasks: "
                    + pendingResumableTasks + " missingNewSlots: " + missingNewSlots
                    + " missingResumableSlots: " + missingResumableSlots);
        }

        while (pendingNewTasks > 0 || pendingResumableTasks > 0
                || (suspended != null && !suspended.isEmpty())) {

            if (helper.currAvailableSlots <= 0) {
                LOG.debug("SIZEBASED(" + jip.getJobID() + ":" + type + "):" + " no slots available on "
                        + taskHelper.ttStatus.getTrackerName());
                return;
            }

            LOG.debug("SIZEBASED(" + jip.getJobID() + ":" + type + "):" + " totAvailableSizeBasedSlots(): "
                    + helper.totAvailableSizeBasedSlots() + " pendingNewTasks: " + pendingNewTasks
                    + " pendingResumableTasks: " + pendingResumableTasks + " suspended("
                    + (suspended == null ? 0 : suspended.size()) + "): " + suspended);

            if (this.preemptionStrategy.isPreemptionActive() && (suspended != null && !suspended.isEmpty())) {
                TaskStatus toResume = suspended.remove(suspended.firstKey());
                // LOG.debug("RESUME: " + toResume.getTaskID() + " " +
                // toResume.getRunState());
                TaskAttemptID tAID = toResume.getTaskID();
                JobInProgress rJIP = this.taskTrackerManager.getJob(tAID.getTaskID().getJobID());
                TaskInProgress tip = rJIP.getTaskInProgress(tAID.getTaskID());
                if (this.preemptionStrategy.resume(tip, toResume)) {
                    taskHelper.resume(tAID, Phase.SIZE_BASED);
                    pendingResumableTasks -= 1;
                } else {
                    LOG.debug("SIZEBASED(" + jip.getJobID() + ":" + type + "):" + " cannot resume " + tAID
                            + " on " + taskHelper.ttStatus.getTrackerName());
                }
            } else {

                Task task = this.obtainNewTask(jip, taskHelper.ttStatus, isMap, taskHelper.currentTime);

                if (task == null) {
                    LOG.debug("SIZEBASED(" + jip.getJobID() + ":" + type + "):"
                            + " cannot obtain slot for new task on " + taskHelper.ttStatus.getTrackerName()
                            + " (#pendingNew: " + pendingNewTasks + ", #pendingResumable: "
                            + pendingResumableTasks + ", #free_" + type + "_slots: " + helper.currAvailableSlots
                            + ")");
                    break;
                }

                taskHelper.slotObtained(task, Phase.SIZE_BASED);
                pendingNewTasks -= 1;
            }
        }
    }
}