Example usage for java.util SortedSet tailSet

List of usage examples for java.util SortedSet tailSet

Introduction

In this page you can find the example usage for java.util SortedSet tailSet.

Prototype

SortedSet<E> tailSet(E fromElement);

Source Link

Document

Returns a view of the portion of this set whose elements are greater than or equal to fromElement .

Usage

From source file:net.sourceforge.fenixedu.domain.Lesson.java

public YearMonthDay getNextPossibleSummaryDate() {

    YearMonthDay currentDate = new YearMonthDay();
    HourMinuteSecond now = new HourMinuteSecond();
    Summary lastSummary = getLastSummary();

    if (lastSummary != null) {

        SortedSet<YearMonthDay> datesEvenToday = getAllLessonDatesUntil(currentDate);
        SortedSet<YearMonthDay> possibleDates = datesEvenToday
                .tailSet(lastSummary.getSummaryDateYearMonthDay());

        possibleDates.remove(lastSummary.getSummaryDateYearMonthDay());
        if (!possibleDates.isEmpty()) {
            YearMonthDay nextPossibleDate = possibleDates.first();
            return isTimeValidToInsertSummary(now, nextPossibleDate) ? nextPossibleDate : null;
        }//from   ww w.  java 2s .co m

    } else {
        YearMonthDay nextPossibleDate = hasAnyLessonInstances() ? getFirstLessonInstance().getDay()
                : getLessonStartDay();
        return isTimeValidToInsertSummary(now, nextPossibleDate) ? nextPossibleDate : null;
    }

    return null;
}

From source file:net.sourceforge.fenixedu.domain.Lesson.java

public YearMonthDay getNextPossibleLessonInstanceDate() {

    SortedSet<YearMonthDay> allLessonDates = getAllLessonDates();
    LessonInstance lastLessonInstance = getLastLessonInstance();

    if (lastLessonInstance != null) {
        YearMonthDay day = lastLessonInstance.getDay();
        SortedSet<YearMonthDay> nextLessonDates = allLessonDates.tailSet(day);
        nextLessonDates.remove(day);/*from  www .j  ava  2 s . c  o  m*/
        return nextLessonDates.isEmpty() ? null : nextLessonDates.first();
    }

    return allLessonDates.isEmpty() ? null : allLessonDates.first();
}

From source file:cerrla.LocalCrossEntropyDistribution.java

/**
 * Modifies the policy values before updating (cutting the values down to
 * size).//from   ww w  .java2  s.  c  om
 * 
 * @param elites
 *            The policy values to modify.
 * @param numElite
 *            The minimum number of elite samples.
 * @param staleValue
 *            The number of policies a sample hangs around for.
 * @param minValue
 *            The minimum observed value.
 * @return The policy values that were removed.
 */
private SortedSet<PolicyValue> preUpdateModification(SortedSet<PolicyValue> elites, int numElite,
        int staleValue, double minValue) {
    // Firstly, remove any policy values that have been around for more
    // than N steps

    // Make a backup - just in case the elites are empty afterwards
    SortedSet<PolicyValue> backup = new TreeSet<PolicyValue>(elites);

    // Only remove stuff if the elites are a representative solution
    if (!ProgramArgument.GLOBAL_ELITES.booleanValue()) {
        int iteration = policyGenerator_.getPoliciesEvaluated();
        for (Iterator<PolicyValue> iter = elites.iterator(); iter.hasNext();) {
            PolicyValue pv = iter.next();
            if (iteration - pv.getIteration() >= staleValue) {
                if (ProgramArgument.RETEST_STALE_POLICIES.booleanValue())
                    policyGenerator_.retestPolicy(pv.getPolicy());
                iter.remove();
            }
        }
    }
    if (elites.isEmpty())
        elites.addAll(backup);

    SortedSet<PolicyValue> tailSet = null;
    if (elites.size() > numElite) {
        // Find the N_E value
        Iterator<PolicyValue> pvIter = elites.iterator();
        PolicyValue currentPV = null;
        for (int i = 0; i < numElite; i++)
            currentPV = pvIter.next();

        // Iter at N_E value. Remove any values less than N_E's value
        tailSet = new TreeSet<PolicyValue>(elites.tailSet(new PolicyValue(null, currentPV.getValue(), -1)));
        elites.removeAll(tailSet);
    }

    return tailSet;
}

From source file:de.clusteval.serverclient.BackendClientCompleter.java

@SuppressWarnings("unused")
@Override/*w  w w  .  j  av a  2 s .  c om*/
public int complete(String buffer, int cursor, List<CharSequence> candidates) {
    SortedSet<String> strings = new TreeSet<String>();
    strings.add("performRun");
    strings.add("resumeRun");
    strings.add("terminateRun");
    strings.add("getRunStatus");
    strings.add("getOptRunStatus");
    strings.add("shutdown");
    strings.add("getRuns");
    strings.add("getQueue");
    strings.add("getRunResumes");
    strings.add("getDataSets");
    strings.add("getPrograms");
    strings.add("getRunResults");
    strings.add("generateDataSet");
    strings.add("randomizeDataConfig");
    strings.add("getActiveThreads");

    boolean exception = true;
    while (exception) {
        exception = false;
        try {
            if (buffer == null) {
                candidates.addAll(strings);
            } else if (buffer.equals("performRun ")) {
                this.updateRuns();
                candidates.addAll(runs);
                return buffer.length();
            } else if (buffer.equals("resumeRun ")) {
                this.updateRunResumes();
                candidates.addAll(runResumes);
                return buffer.length();
            } else if (buffer.equals("getRunResults ")) {
                this.updateRunResults();
                candidates.addAll(runResults);
                return buffer.length();
            } else if (buffer.equals("getRunResumes ")) {
                this.updateRunResumes();
                candidates.addAll(runResumes);
                return buffer.length();
            } else if (buffer.equals("terminateRun ") || buffer.equals("getRunStatus ")
                    || buffer.equals("getOptRunStatus ")) {
                this.updateRunningRuns();
                candidates.addAll(runningRuns);
                return buffer.length();
            } else if (buffer.startsWith("performRun ")) {
                updateRuns();
                int posSpace = buffer.indexOf(' ');
                for (String match : runs.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("resumeRun ")) {
                this.updateRunResumes();
                int posSpace = buffer.indexOf(' ');
                for (String match : runResumes.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("getRunResults ")) {
                this.updateRunResults();
                int posSpace = buffer.indexOf(' ');
                for (String match : runResults.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("getRunResumes ")) {
                this.updateRunResumes();
                int posSpace = buffer.indexOf(' ');
                for (String match : runResumes.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("generateDataSet ")) {
                this.updateDataSetGenerators();
                int posSpace = buffer.indexOf(' ');
                for (String match : dataSetGenerators.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("randomizeDataConfig ")) {
                this.updateDataRandomizers();
                int posSpace = buffer.indexOf(' ');
                for (String match : dataRandomizers.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else if (buffer.startsWith("generateDataSet ")) {
                this.updateDataSetGenerators();
                candidates.addAll(dataSetGenerators);
                return buffer.length();
            } else if (buffer.startsWith("terminateRun ") || buffer.startsWith("getRunStatus ")) {
                this.updateRunningRuns();
                int posSpace = buffer.indexOf(' ');
                for (String match : runningRuns.tailSet(buffer.substring(posSpace + 1))) {
                    if (!match.startsWith(buffer.substring(posSpace + 1))) {
                        break;
                    }

                    candidates.add(match);
                }

                return posSpace + 1;
            } else {
                for (String match : strings.tailSet(buffer)) {
                    if (!match.startsWith(buffer)) {
                        break;
                    }

                    candidates.add(match);
                }
            }
        } catch (RemoteException e) {
            exception = true;
            try {
                // client = new EvalClient(new String[]{"-clientId",
                // clientId});
                client = new BackendClient(newArgs);
            } catch (ConnectException e1) {
                // e1.printStackTrace();
            } catch (ParseException e1) {
                // e1.printStackTrace();
            }
            // return -1;
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e1) {
                e1.printStackTrace();
            }
        }

        if (candidates.size() == 1) {
            candidates.set(0, candidates.get(0) + " ");
        }
    }

    return candidates.isEmpty() ? -1 : 0;
}

From source file:com.google.gwt.emultest.java.util.TreeSetTest.java

/**
 * Test method for 'java.util.SortedSet.tailSet(Object, Object)'.
 *
 * @see java.util.SortedSet#tailSet(Object)
 */// w  w  w.j av  a  2  s. c  o  m
@SuppressWarnings("unchecked")
public void testTailSet_throwsClassCastException() {
    SortedSet SortedSet = createNavigableSet();
    SortedSet.add(getKeys()[0]);
    if (isNaturalOrder()) {
        // TODO Why does this succeed with natural ordering when subSet doesn't?
        SortedSet.tailSet(getConflictingKey());
    } else {
        try {
            SortedSet.tailSet(getConflictingKey());
            assertTrue("CCE expected in Development Mode", !TestUtils.isJvm());
        } catch (ClassCastException e) {
            // expected outcome
        }
    }
}

From source file:org.apache.accumulo.server.gc.SimpleGarbageCollector.java

/**
 * This method removes candidates from the candidate list under two conditions: 1. They are in the same folder as a bulk processing file, if that option is
 * selected 2. They are still in use in the file column family in the METADATA table
 *///from w w  w  .j av  a  2s .c o  m
public void confirmDeletes(SortedSet<String> candidates) throws AccumuloException {

    Scanner scanner;
    if (offline) {
        try {
            scanner = new OfflineMetadataScanner();
        } catch (IOException e) {
            throw new IllegalStateException("Unable to create offline metadata scanner", e);
        }
    } else {
        try {
            scanner = new IsolatedScanner(instance.getConnector(credentials)
                    .createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS));
        } catch (AccumuloSecurityException ex) {
            throw new AccumuloException(ex);
        } catch (TableNotFoundException ex) {
            throw new AccumuloException(ex);
        }
    }

    // skip candidates that are in a bulk processing folder
    if (checkForBulkProcessingFiles) {

        log.debug("Checking for bulk processing flags");

        scanner.setRange(Constants.METADATA_BLIP_KEYSPACE);

        // WARNING: This block is IMPORTANT
        // You MUST REMOVE candidates that are in the same folder as a bulk
        // processing flag!

        for (Entry<Key, Value> entry : scanner) {
            String blipPath = entry.getKey().getRow().toString()
                    .substring(Constants.METADATA_BLIP_FLAG_PREFIX.length());
            Iterator<String> tailIter = candidates.tailSet(blipPath).iterator();
            int count = 0;
            while (tailIter.hasNext()) {
                if (tailIter.next().startsWith(blipPath)) {
                    count++;
                    tailIter.remove();
                } else {
                    break;
                }
            }

            if (count > 0)
                log.debug("Folder has bulk processing flag: " + blipPath);

        }
    }

    // skip candidates that are still in use in the file column family in
    // the metadata table
    scanner.clearColumns();
    scanner.fetchColumnFamily(Constants.METADATA_DATAFILE_COLUMN_FAMILY);
    scanner.fetchColumnFamily(Constants.METADATA_SCANFILE_COLUMN_FAMILY);
    ColumnFQ.fetch(scanner, Constants.METADATA_DIRECTORY_COLUMN);

    TabletIterator tabletIterator = new TabletIterator(scanner, Constants.METADATA_KEYSPACE, false, true);

    while (tabletIterator.hasNext()) {
        Map<Key, Value> tabletKeyValues = tabletIterator.next();

        for (Entry<Key, Value> entry : tabletKeyValues.entrySet()) {
            if (entry.getKey().getColumnFamily().equals(Constants.METADATA_DATAFILE_COLUMN_FAMILY)
                    || entry.getKey().getColumnFamily().equals(Constants.METADATA_SCANFILE_COLUMN_FAMILY)) {

                String cf = entry.getKey().getColumnQualifier().toString();
                String delete;
                if (cf.startsWith("../")) {
                    delete = cf.substring(2);
                } else {
                    String table = new String(KeyExtent.tableOfMetadataRow(entry.getKey().getRow()));
                    delete = "/" + table + cf;
                }
                // WARNING: This line is EXTREMELY IMPORTANT.
                // You MUST REMOVE candidates that are still in use
                if (candidates.remove(delete))
                    log.debug("Candidate was still in use in the METADATA table: " + delete);

                String path = delete.substring(0, delete.lastIndexOf('/'));
                if (candidates.remove(path))
                    log.debug("Candidate was still in use in the METADATA table: " + path);
            } else if (Constants.METADATA_DIRECTORY_COLUMN.hasColumns(entry.getKey())) {
                String table = new String(KeyExtent.tableOfMetadataRow(entry.getKey().getRow()));
                String delete = "/" + table + entry.getValue().toString();
                if (candidates.remove(delete))
                    log.debug("Candidate was still in use in the METADATA table: " + delete);
            } else
                throw new AccumuloException(
                        "Scanner over metadata table returned unexpected column : " + entry.getKey());
        }
    }
}

From source file:org.apache.hadoop.hbase.regionserver.DefaultMemStore.java

private boolean walkForwardInSingleRow(final SortedSet<KeyValue> set, final KeyValue firstOnRow,
        final GetClosestRowBeforeTracker state) {
    boolean foundCandidate = false;
    SortedSet<KeyValue> tail = set.tailSet(firstOnRow);
    if (tail.isEmpty())
        return foundCandidate;
    for (Iterator<KeyValue> i = tail.iterator(); i.hasNext();) {
        KeyValue kv = i.next();//  ww  w  .ja v  a2  s .  c o  m
        // Did we go beyond the target row? If so break.
        if (state.isTooFar(kv, firstOnRow))
            break;
        if (state.isExpired(kv)) {
            i.remove();
            continue;
        }
        // If we added something, this row is a contender. break.
        if (state.handle(kv)) {
            foundCandidate = true;
            break;
        }
    }
    return foundCandidate;
}

From source file:org.apache.hadoop.hbase.regionserver.Memcache.java

/**
 * /*w  ww  .ja v  a 2  s  .  c o  m*/
 * @param set
 * @param sget
 * @param result
 * @return -1 if error ,0 if not ready and 1 if ready
 */
private int internalGetRow(SortedSet<KeyValue> set, ServerGet sget, List<KeyValue> result, boolean multiFamily)
        throws IOException {
    if (set.isEmpty()) {
        return 0;
    }
    //Getting only the things that are related to this row
    //TODO add family and column in the future, right now it just adds
    //complexity since you need the first column and/or ts and maybe there is
    //no column to look for yet as in the cases of getFamilies and getTop
    set = set.tailSet(new KeyValue(sget.getRow()));

    //TODO have to remember to check the order of the set, so that tailSet
    //returns the things that are smaller and not bigger
    int retCode = 0;
    // The cases that we need at this level:
    //0 next
    //1 include
    //2 next store
    //3 done
    for (KeyValue kv : set) {
        retCode = sget.compareTo(kv, multiFamily);
        switch (retCode) {
        //Do not include in result, look at next kv
        case 0:
            break;

        //Include in result
        case 1:
            result.add(kv);
            break;

        //Go to next storefile
        case 2:
            return 0;

        //Done, early out
        case 3:
            return 1;

        default:
            return -1;
        }
    }
    return 0;
}

From source file:org.apache.hadoop.hdfs.server.namenode.JournalSet.java

/**
 * Return a manifest of what finalized edit logs are available. All available
 * edit logs are returned starting from the transaction id passed. If
 * 'fromTxId' falls in the middle of a log, that log is returned as well.
 * /*from   w  w  w.j  a  va2 s  .  c o m*/
 * @param fromTxId Starting transaction id to read the logs.
 * @return RemoteEditLogManifest object.
 */
public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) {
    // Collect RemoteEditLogs available from each FileJournalManager
    List<RemoteEditLog> allLogs = Lists.newArrayList();
    for (JournalAndStream j : journals) {
        if (j.getManager() instanceof FileJournalManager) {
            FileJournalManager fjm = (FileJournalManager) j.getManager();
            try {
                allLogs.addAll(fjm.getRemoteEditLogs(fromTxId, false));
            } catch (Throwable t) {
                LOG.warn("Cannot list edit logs in " + fjm, t);
            }
        }
    }

    // Group logs by their starting txid
    ImmutableListMultimap<Long, RemoteEditLog> logsByStartTxId = Multimaps.index(allLogs,
            RemoteEditLog.GET_START_TXID);
    long curStartTxId = fromTxId;

    List<RemoteEditLog> logs = Lists.newArrayList();
    while (true) {
        ImmutableList<RemoteEditLog> logGroup = logsByStartTxId.get(curStartTxId);
        if (logGroup.isEmpty()) {
            // we have a gap in logs - for example because we recovered some old
            // storage directory with ancient logs. Clear out any logs we've
            // accumulated so far, and then skip to the next segment of logs
            // after the gap.
            SortedSet<Long> startTxIds = Sets.newTreeSet(logsByStartTxId.keySet());
            startTxIds = startTxIds.tailSet(curStartTxId);
            if (startTxIds.isEmpty()) {
                break;
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Found gap in logs at " + curStartTxId + ": "
                            + "not returning previous logs in manifest.");
                }
                logs.clear();
                curStartTxId = startTxIds.first();
                continue;
            }
        }

        // Find the one that extends the farthest forward
        RemoteEditLog bestLog = Collections.max(logGroup);
        logs.add(bestLog);
        // And then start looking from after that point
        curStartTxId = bestLog.getEndTxId() + 1;
    }
    RemoteEditLogManifest ret = new RemoteEditLogManifest(logs);

    if (LOG.isDebugEnabled()) {
        LOG.debug("Generated manifest for logs since " + fromTxId + ":" + ret);
    }
    return ret;
}

From source file:org.jclouds.blobstore.integration.internal.StubAsyncBlobStore.java

public Future<? extends ListContainerResponse<? extends ResourceMetadata>> list(final String name,
        ListContainerOptions... optionsList) {
    final ListContainerOptions options = (optionsList.length == 0) ? new ListContainerOptions()
            : optionsList[0];/*  w w  w  . j a va2  s .  c o m*/
    return new FutureBase<ListContainerResponse<ResourceMetadata>>() {
        public ListContainerResponse<ResourceMetadata> get() throws InterruptedException, ExecutionException {
            final Map<String, Blob> realContents = getContainerToBlobs().get(name);

            if (realContents == null)
                throw new ContainerNotFoundException(name);

            SortedSet<ResourceMetadata> contents = Sets.newTreeSet(
                    Iterables.transform(realContents.keySet(), new Function<String, ResourceMetadata>() {
                        public ResourceMetadata apply(String key) {
                            return copy(realContents.get(key).getMetadata());
                        }
                    }));

            if (options.getMarker() != null) {
                final String finalMarker = options.getMarker();
                ResourceMetadata lastMarkerMetadata = Iterables.find(contents,
                        new Predicate<ResourceMetadata>() {
                            public boolean apply(ResourceMetadata metadata) {
                                return metadata.getName().equals(finalMarker);
                            }
                        });
                contents = contents.tailSet(lastMarkerMetadata);
                contents.remove(lastMarkerMetadata);
            }

            final String prefix = options.getPath();
            if (prefix != null) {
                contents = Sets.newTreeSet(Iterables.filter(contents, new Predicate<ResourceMetadata>() {
                    public boolean apply(ResourceMetadata o) {
                        return (o != null && o.getName().startsWith(prefix));
                    }
                }));
            }

            int maxResults = contents.size();
            boolean truncated = false;
            String marker = null;
            if (options.getMaxResults() != null && contents.size() > 0) {
                SortedSet<ResourceMetadata> contentsSlice = firstSliceOfSize(contents,
                        options.getMaxResults().intValue());
                maxResults = options.getMaxResults();
                if (!contentsSlice.contains(contents.last())) {
                    // Partial listing
                    truncated = true;
                    marker = contentsSlice.last().getName();
                } else {
                    marker = null;
                }
                contents = contentsSlice;
            }

            final String delimiter = options.isRecursive() ? null : "/";
            if (delimiter != null) {
                SortedSet<String> commonPrefixes = null;
                Iterable<String> iterable = Iterables.transform(contents,
                        new CommonPrefixes(prefix != null ? prefix : null, delimiter));
                commonPrefixes = iterable != null ? Sets.newTreeSet(iterable) : new TreeSet<String>();
                commonPrefixes.remove(CommonPrefixes.NO_PREFIX);

                contents = Sets.newTreeSet(Iterables.filter(contents,
                        new DelimiterFilter(prefix != null ? prefix : null, delimiter)));

                Iterables.<ResourceMetadata>addAll(contents,
                        Iterables.transform(commonPrefixes, new Function<String, ResourceMetadata>() {
                            public ResourceMetadata apply(String o) {
                                MutableResourceMetadata md = new MutableResourceMetadataImpl();
                                md.setType(ResourceType.RELATIVE_PATH);
                                md.setName(o);
                                return md;
                            }
                        }));
            }
            return new ListContainerResponseImpl<ResourceMetadata>(contents, prefix, marker, maxResults,
                    truncated);
        }
    };
}