Example usage for java.util TreeSet descendingIterator

List of usage examples for java.util TreeSet descendingIterator

Introduction

In this page you can find the example usage for java.util TreeSet descendingIterator.

Prototype

public Iterator<E> descendingIterator() 

Source Link

Document

Returns an iterator over the elements in this set in descending order.

Usage

From source file:Main.java

public static void main(String[] args) {

    TreeSet<Integer> treeadd = new TreeSet<Integer>();

    treeadd.add(1);/*from  w w w  .  j  a v  a2  s.c  o m*/
    treeadd.add(13);
    treeadd.add(17);
    treeadd.add(2);

    // create descending iterator
    Iterator<Integer> iterator = treeadd.descendingIterator();

    while (iterator.hasNext()) {
        System.out.println(iterator.next());
    }
}

From source file:cz.matfyz.oskopek.learnr.tools.DatasetIO.java

/**
 * A manual method of importing a dataset from a text file.
 * <p/>//from w  w  w  . jav  a 2  s  . c  o m
 * Used for plain dataset distribution. Does not import statistics of any kind.
 * <p/>
 * <b>Warning:</b> Expects a syntactically perfect dataset according to the TXT dataset format specification! (See documentation).
 *
 * @param filename the filename from which to import
 * @return the imported dataset
 * @throws IOException if an error during read occurs
 */
public static Dataset importTXTDataset(String filename) throws IOException {
    LOGGER.debug("Import dataset from TXT: \'{}\'", filename);
    Dataset dataset = new Dataset();
    BufferedReader br = new BufferedReader(new FileReader(filename));

    br.readLine(); // PREAMBLE
    dataset.setName(br.readLine().split(":")[1].trim());
    dataset.setDescription(br.readLine().split(":")[1].trim());
    dataset.setAuthor(br.readLine().split(":")[1].trim());
    dataset.setCreatedDate(Long.parseLong(br.readLine().split(":")[1].trim()));
    int initWeight = Integer.parseInt(br.readLine().split(":")[1].trim());
    String[] limitsStr = br.readLine().split("/");
    Limits limits = new Limits(Integer.parseInt(limitsStr[0].split(":")[1].trim()),
            Integer.parseInt(limitsStr[1]));
    dataset.setLimits(limits);
    String answerCheckTypeStr = br.readLine().split(":")[1].trim();
    dataset.setAnswerCheckType(Dataset.AnswerCheckType.valueOf(answerCheckTypeStr));
    dataset.setGoodAnswerPenalty(Integer.parseInt(br.readLine().split(":")[1].trim()));
    dataset.setBadAnswerPenalty(Integer.parseInt(br.readLine().split(":")[1].trim()));

    String buffer;
    br.readLine(); // QUESTIONS
    TreeSet<Question> questionSet = new TreeSet<>();
    while ((buffer = br.readLine()) != null) {
        if (StringUtils.isWhitespace(buffer))
            continue;
        String[] split = buffer.split(";");
        String text = split[0].trim();

        List<Answer> answerList = new ArrayList<>();
        for (int i = 1; i < split.length; i++) {
            Answer answer = new Answer();
            answer.setValue(split[i].trim());
            answerList.add(answer);
        }
        Question q = new Question(text, new Statistics(), answerList, initWeight);

        LOGGER.debug("Reading question \'{}\'; weight \'{}\'.", q.getText(), q.getWeight());
        if (!questionSet.add(q)) {
            LOGGER.warn("Question \'{}\' already in dataset, adding as an answer.", q.getText());
            Iterator<Question> descIter = questionSet.descendingIterator(); // Descending iterator, because it's probably last
            while (descIter.hasNext()) {
                Question current = descIter.next();
                if (current.equals(q)) {
                    current.getAnswerList().addAll(q.getAnswerList());
                    break;
                }
            }
        }
    }
    dataset.setQuestionSet(questionSet);
    dataset.setFinishedSet(new TreeSet<Question>());

    br.close();
    return dataset;
}

From source file:io.wcm.handler.mediasource.dam.impl.CropRenditionHandler.java

/**
 * Searches for the biggest web enabled rendition and, if exists, adds a {@link VirtualCropRenditionMetadata} to the
 * list//from   w w  w . j  av  a 2  s . co m
 * @param candidates
 * @return {@link Set} of {@link RenditionMetadata}
 */
@Override
protected Set<RenditionMetadata> postProcessCandidates(Set<RenditionMetadata> candidates) {
    TreeSet<RenditionMetadata> processedCandidates = new TreeSet<>(candidates);
    Iterator<RenditionMetadata> descendingIterator = processedCandidates.descendingIterator();
    VirtualCropRenditionMetadata cropRendition = null;
    while (descendingIterator.hasNext()) {
        RenditionMetadata rendition = descendingIterator.next();
        if (DEFAULT_WEB_RENDITION_PATTERN.matcher(rendition.getRendition().getName()).matches()) {
            RenditionMetadata sourceRendition = new RenditionMetadata(rendition.getRendition());
            boolean isImage = FileExtension.isImage(assetFileExtension);
            if (isImage && sourceRendition.getWidth() >= cropDimension.getRight()
                    && sourceRendition.getHeight() >= cropDimension.getBottom()) {
                // found biggest virtual rendition for cropped image
                cropRendition = new VirtualCropRenditionMetadata(sourceRendition.getRendition(),
                        cropDimension.getWidth(), cropDimension.getHeight(), cropDimension);
                break;
            }
        }
    }
    if (cropRendition != null) {
        processedCandidates.add(cropRendition);
    }
    return processedCandidates;
}

From source file:hu.ppke.itk.nlpg.purepos.decoder.AbstractDecoder.java

protected Set<Entry<Integer, Double>> pruneGuessedTags(Map<Integer, Double> guessedTags) {
    TreeSet<Entry<Integer, Double>> set = new TreeSet<Map.Entry<Integer, Double>>(
            /* reverse comparator */
            new Comparator<Entry<Integer, Double>>() {

                @Override//from  ww  w .j a  v a2s  .c  om
                public int compare(Entry<Integer, Double> o1, Entry<Integer, Double> o2) {
                    if (o1.getValue() > o2.getValue())
                        return -1;
                    else if (o1.getValue() < o2.getValue())
                        return 1;
                    else
                        return Double.compare(o1.getKey(), o2.getKey());
                }
            });

    int maxTag = SuffixGuesser.getMaxProbabilityTag(guessedTags);
    double maxVal = guessedTags.get(maxTag);
    double minval = maxVal - sufTheta;
    for (Entry<Integer, Double> entry : guessedTags.entrySet()) {
        if (entry.getValue() > minval) {
            set.add(entry);
        }
    }
    if (set.size() > maxGuessedTags) {
        Iterator<Entry<Integer, Double>> it = set.descendingIterator();
        while (set.size() > maxGuessedTags) {
            it.next();
            it.remove();
        }
    }

    return set;
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.TestProportionalCapacityPreemptionPolicyForNodePartitions.java

/**
 * Format is:/*from   ww  w .jav a  2s.  c o  m*/
 * <pre>
 * root (<partition-name-1>=[guaranteed max used pending],<partition-name-2>=..);
 * -A(...);
 * --A1(...);
 * --A2(...);
 * -B...
 * </pre>
 * ";" splits queues, and there should no empty lines, no extra spaces
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
private ParentQueue mockQueueHierarchy(String queueExprs) {
    String[] queueExprArray = queueExprs.split(";");
    ParentQueue rootQueue = null;
    for (int idx = 0; idx < queueExprArray.length; idx++) {
        String q = queueExprArray[idx];
        CSQueue queue;

        // Initialize queue
        if (isParent(queueExprArray, idx)) {
            ParentQueue parentQueue = mock(ParentQueue.class);
            queue = parentQueue;
            List<CSQueue> children = new ArrayList<CSQueue>();
            when(parentQueue.getChildQueues()).thenReturn(children);
        } else {
            LeafQueue leafQueue = mock(LeafQueue.class);
            final TreeSet<FiCaSchedulerApp> apps = new TreeSet<>(CapacityScheduler.applicationComparator);
            when(leafQueue.getApplications()).thenReturn(apps);
            OrderingPolicy<FiCaSchedulerApp> so = mock(OrderingPolicy.class);
            when(so.getPreemptionIterator()).thenAnswer(new Answer() {
                public Object answer(InvocationOnMock invocation) {
                    return apps.descendingIterator();
                }
            });
            when(leafQueue.getOrderingPolicy()).thenReturn(so);

            Map<String, TreeSet<RMContainer>> ignorePartitionContainers = new HashMap<>();
            when(leafQueue.getIgnoreExclusivityRMContainers()).thenReturn(ignorePartitionContainers);
            queue = leafQueue;
        }

        setupQueue(queue, q, queueExprArray, idx);
        if (queue.getQueueName().equals(ROOT)) {
            rootQueue = (ParentQueue) queue;
        }
    }
    return rootQueue;
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicyMockFramework.java

/**
 * Format is:/*from  w  w w  . j  a  va  2 s. c  o  m*/
 * <pre>
 * root (<partition-name-1>=[guaranteed max used pending (reserved)],<partition-name-2>=..);
 * -A(...);
 * --A1(...);
 * --A2(...);
 * -B...
 * </pre>
 * ";" splits queues, and there should no empty lines, no extra spaces
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
private ParentQueue mockQueueHierarchy(String queueExprs) {
    String[] queueExprArray = queueExprs.split(";");
    ParentQueue rootQueue = null;
    for (int idx = 0; idx < queueExprArray.length; idx++) {
        String q = queueExprArray[idx];
        CSQueue queue;

        // Initialize queue
        if (isParent(queueExprArray, idx)) {
            ParentQueue parentQueue = mock(ParentQueue.class);
            queue = parentQueue;
            List<CSQueue> children = new ArrayList<CSQueue>();
            when(parentQueue.getChildQueues()).thenReturn(children);
        } else {
            LeafQueue leafQueue = mock(LeafQueue.class);
            final TreeSet<FiCaSchedulerApp> apps = new TreeSet<>(new Comparator<FiCaSchedulerApp>() {
                @Override
                public int compare(FiCaSchedulerApp a1, FiCaSchedulerApp a2) {
                    if (a1.getPriority() != null && !a1.getPriority().equals(a2.getPriority())) {
                        return a1.getPriority().compareTo(a2.getPriority());
                    }

                    int res = a1.getApplicationId().compareTo(a2.getApplicationId());
                    return res;
                }
            });
            when(leafQueue.getApplications()).thenReturn(apps);
            when(leafQueue.getAllApplications()).thenReturn(apps);
            OrderingPolicy<FiCaSchedulerApp> so = mock(OrderingPolicy.class);
            when(so.getPreemptionIterator()).thenAnswer(new Answer() {
                public Object answer(InvocationOnMock invocation) {
                    return apps.descendingIterator();
                }
            });
            when(leafQueue.getOrderingPolicy()).thenReturn(so);

            Map<String, TreeSet<RMContainer>> ignorePartitionContainers = new HashMap<>();
            when(leafQueue.getIgnoreExclusivityRMContainers()).thenReturn(ignorePartitionContainers);
            queue = leafQueue;
        }

        setupQueue(queue, q, queueExprArray, idx);
        if (queue.getQueueName().equals(ROOT)) {
            rootQueue = (ParentQueue) queue;
        }
    }
    return rootQueue;
}

From source file:com.digipom.manteresting.android.processor.json.NailsJsonProcessor.java

@Override
public ArrayList<ContentProviderOperation> parse(JSONObject response, Meta meta) throws JSONException {
    final ArrayList<ContentProviderOperation> batch = Lists.newArrayList();
    final TreeSet<Integer> nailIds = new TreeSet<Integer>();
    final Cursor nails = resolver.query(ManterestingContract.Nails.CONTENT_URI, new String[] { Nails.NAIL_ID },
            null, null, Nails.NAIL_ID + " DESC");
    int greatestOfExisting = Integer.MIN_VALUE;

    if (nails != null && !nails.isClosed()) {
        try {//from  w  ww.ja va2 s .co  m
            nails.moveToFirst();

            final int idColumn = nails.getColumnIndex(Nails.NAIL_ID);

            while (!nails.isAfterLast()) {
                final int nailId = nails.getInt(idColumn);
                nailIds.add(nailId);
                greatestOfExisting = nailId > greatestOfExisting ? nailId : greatestOfExisting;
                nails.moveToNext();
            }
        } finally {
            if (nails != null) {
                nails.close();
            }
        }
    }

    final JSONArray objects = response.getJSONArray("objects");
    int smallestOfNew = Integer.MAX_VALUE;

    for (int i = 0; i < objects.length(); i++) {
        final JSONObject nailObject = objects.getJSONObject(i);

        final boolean isPrivate = nailObject.getJSONObject("workbench").getBoolean("private");

        if (!isPrivate) {
            final ContentProviderOperation.Builder builder = ContentProviderOperation
                    .newInsert(Nails.CONTENT_URI);
            final int nailId = nailObject.getInt("id");
            smallestOfNew = nailId < smallestOfNew ? nailId : smallestOfNew;

            builder.withValue(Nails.NAIL_ID, nailId);
            builder.withValue(Nails.NAIL_JSON, nailObject.toString());

            batch.add(builder.build());
            nailIds.add(nailId);
        }
    }

    // If more than LIMIT were fetched, and this was the initial fetch, then
    // we flush everything in the DB before adding the new nails (as
    // otherwise we would introduce a gap).
    if (meta.nextOffset == meta.nextLimit // For initial fetch
            && smallestOfNew > greatestOfExisting) {
        if (LoggerConfig.canLog(Log.DEBUG)) {
            Log.d(TAG, "Flushing all existing nails on initial fetch, so as to avoid a gap.");
        }

        resolver.delete(Nails.CONTENT_URI, null, null);
    } else {
        // If more than 500 nails, find the 500th biggest and delete those
        // after it.
        if (nailIds.size() > MAX_COUNT) {
            Iterator<Integer> it = nailIds.descendingIterator();

            for (int i = 0; i < MAX_COUNT; i++) {
                it.next();
            }

            final Integer toDelete = it.next();

            if (LoggerConfig.canLog(Log.DEBUG)) {
                Log.d(TAG, "deleting from nails where NAIL_ID is less than or equal to " + toDelete);
            }

            SelectionBuilder selectionBuilder = new SelectionBuilder();
            selectionBuilder.where(ManterestingContract.Nails.NAIL_ID + " <= ?",
                    new String[] { String.valueOf(toDelete) });
            resolver.delete(ManterestingContract.Nails.CONTENT_URI, selectionBuilder.getSelection(),
                    selectionBuilder.getSelectionArgs());
        }
    }

    return batch;
}

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

private static SortedSet<Filename> getLatest(File[] files, int numLatest) {
    TreeSet<Filename> allFiles = new TreeSet<Filename>();
    TreeSet<Filename> oldFiles = new TreeSet<Filename>();
    for (File file : files)
        allFiles.add(new Filename(file));
    if (allFiles.size() <= numLatest)
        return allFiles;
    Iterator<Filename> iter = allFiles.descendingIterator();
    for (int i = 0; i < numLatest; i++)
        oldFiles.add(iter.next());//from w ww. ja  va 2 s.co m

    return oldFiles;
}