Example usage for java.util SortedSet size

List of usage examples for java.util SortedSet size

Introduction

In this page you can find the example usage for java.util SortedSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.cloudata.core.tabletserver.Tablet.java

public Row.Key[] getSplitedRowKeyRanges(int splitPerTablet) throws IOException {
    if (splitPerTablet < 0) {
        return null;
    }//from w  w  w.java2s .c  o  m

    SortedSet<Row.Key> memoryRowKeys = memorySSTable.getAllRowKeys();
    SortedSet<MapFileIndexRecord> indexRecords = diskSSTable.get().getMapFileIndex();

    SortedSet<Row.Key> rowKeys = null;

    int memoryRowKeySize = memoryRowKeys.size();
    int indexSize = indexRecords.size();
    if (indexSize > 0 && memoryRowKeySize > indexSize * 2) {
        rowKeys = new TreeSet<Row.Key>();
        int gap = memoryRowKeySize / indexSize;

        Row.Key[] rowKeyArr = new Row.Key[memoryRowKeySize];
        memoryRowKeys.toArray(rowKeyArr);

        for (int i = 0; i < indexSize; i++) {
            rowKeys.add(rowKeyArr[(i + 1) * gap - 1]);
        }
    } else {
        rowKeys = memoryRowKeys;
    }

    for (MapFileIndexRecord eachIndex : indexRecords) {
        rowKeys.add(eachIndex.getRowKey());
    }

    int rowKeySize = rowKeys.size();

    if (splitPerTablet >= rowKeySize) {
        return rowKeys.toArray(new Row.Key[] {});
    } else {
        Row.Key[] result = new Row.Key[splitPerTablet];

        Row.Key[] rowKeyArr = new Row.Key[rowKeySize];
        rowKeys.toArray(rowKeyArr);
        int gap = rowKeySize / splitPerTablet;
        for (int i = 0; i < splitPerTablet - 1; i++) {
            result[i] = rowKeyArr[(i + 1) * gap];
        }
        result[splitPerTablet - 1] = tabletInfo.getEndRowKey();

        return result;
    }
}

From source file:com.palantir.atlasdb.sweep.SweepTaskRunner.java

private Set<Long> getTimestampsToSweep(Cell cell, Collection<Long> timestamps /* start timestamps */,
        @Modified Map<Long, Long> startTsToCommitTs, @Output Set<Cell> sentinelsToAdd, long sweepTimestamp,
        boolean sweepLastCommitted, SweepStrategy sweepStrategy) {
    Set<Long> uncommittedTimestamps = Sets.newHashSet();
    SortedSet<Long> committedTimestampsToSweep = Sets.newTreeSet();
    long maxStartTs = TransactionConstants.FAILED_COMMIT_TS;
    boolean maxStartTsIsCommitted = false;
    for (long startTs : timestamps) {
        long commitTs = ensureCommitTimestampExists(startTs, startTsToCommitTs);

        if (startTs > maxStartTs && commitTs < sweepTimestamp) {
            maxStartTs = startTs;/*from  w ww  . j  a  va2 s . c o  m*/
            maxStartTsIsCommitted = commitTs != TransactionConstants.FAILED_COMMIT_TS;
        }
        // Note: there could be an open transaction whose start timestamp is equal to
        // sweepTimestamp; thus we want to sweep all cells such that:
        // (1) their commit timestamp is less than sweepTimestamp
        // (2) their start timestamp is NOT the greatest possible start timestamp
        //     passing condition (1)
        if (commitTs > 0 && commitTs < sweepTimestamp) {
            committedTimestampsToSweep.add(startTs);
        } else if (commitTs == TransactionConstants.FAILED_COMMIT_TS) {
            uncommittedTimestamps.add(startTs);
        }
    }

    if (committedTimestampsToSweep.isEmpty()) {
        return uncommittedTimestamps;
    }

    if (sweepStrategy == SweepStrategy.CONSERVATIVE && committedTimestampsToSweep.size() > 1) {
        // We need to add a sentinel if we are removing a committed value
        sentinelsToAdd.add(cell);
    }

    if (sweepLastCommitted && maxStartTsIsCommitted) {
        return Sets.union(uncommittedTimestamps, committedTimestampsToSweep);
    }
    return Sets.union(uncommittedTimestamps,
            committedTimestampsToSweep.subSet(0L, committedTimestampsToSweep.last()));
}

From source file:com.android.mms.transaction.MessagingNotification.java

/**
 * Checks to see if there are any "unseen" messages or delivery
 * reports and builds a sorted (by delivery date) list of unread notifications.
 *
 * @param context the context to use//from ww w.  j a v a  2s  .  c o m
 * @param newMsgThreadId The thread ID of a new message that we're to notify about; if there's
 *  no new message, use THREAD_NONE. If we should notify about multiple or unknown thread IDs,
 *  use THREAD_ALL.
 * @param isStatusMessage
 */
public static void blockingUpdateNewMessageIndicator(Context context, long newMsgThreadId,
        boolean isStatusMessage) {
    if (DEBUG) {
        Contact.logWithTrace(TAG, "blockingUpdateNewMessageIndicator: newMsgThreadId: " + newMsgThreadId);
    }
    final boolean isDefaultSmsApp = MmsConfig.isSmsEnabled(context);
    if (!isDefaultSmsApp) {
        cancelNotification(context, NOTIFICATION_ID);
        if (DEBUG || Log.isLoggable(LogTag.APP, Log.VERBOSE)) {
            Log.d(TAG,
                    "blockingUpdateNewMessageIndicator: not the default sms app - skipping " + "notification");
        }
        return;
    }

    // notificationSet is kept sorted by the incoming message delivery time, with the
    // most recent message first.
    SortedSet<NotificationInfo> notificationSet = new TreeSet<NotificationInfo>(INFO_COMPARATOR);

    Set<Long> threads = new HashSet<Long>(4);

    addMmsNotificationInfos(context, threads, notificationSet);
    addSmsNotificationInfos(context, threads, notificationSet);

    if (notificationSet.isEmpty()) {
        if (DEBUG) {
            Log.d(TAG, "blockingUpdateNewMessageIndicator: notificationSet is empty, "
                    + "canceling existing notifications");
        }
        cancelNotification(context, NOTIFICATION_ID);
    } else {
        if (DEBUG || Log.isLoggable(LogTag.APP, Log.VERBOSE)) {
            Log.d(TAG, "blockingUpdateNewMessageIndicator: count=" + notificationSet.size()
                    + ", newMsgThreadId=" + newMsgThreadId);
        }

        if (isInCurrentConversation(newMsgThreadId, threads)) {
            if (DEBUG) {
                Log.d(TAG,
                        "blockingUpdateNewMessageIndicator: newMsgThreadId == "
                                + "sCurrentlyDisplayedThreadId so NOT showing notification,"
                                + " but playing soft sound. threadId: " + newMsgThreadId);
            }
            playInConversationNotificationSound(context, newMsgThreadId);
            return;
        }
        updateNotification(context, newMsgThreadId, threads.size(), notificationSet);
    }

    // And deals with delivery reports (which use Toasts). It's safe to call in a worker
    // thread because the toast will eventually get posted to a handler.
    MmsSmsDeliveryInfo delivery = getSmsNewDeliveryInfo(context);
    if (delivery != null) {
        delivery.deliver(context, isStatusMessage);
    }

    notificationSet.clear();
    threads.clear();
}

From source file:com.revetkn.ios.analyzer.ArtworkAnalyzer.java

/** Modifies the passed-in {@code applicationArtwork} instance to include image reference data. */
protected void detectImageReferences(File projectRootDirectory, final ApplicationArtwork applicationArtwork,
        final ArtworkExtractionProgressCallback progressCallback) throws Exception {
    final Map<File, String> contentsOfReferencingFiles = extractContentsOfReferencingFiles(
            projectRootDirectory);//from   ww w.  jav a2  s .  com
    final SortedSet<File> unreferencedImageFiles = synchronizedSortedSet(new TreeSet<File>());
    final SortedSet<File> onlyProjectFileReferencedImageFiles = synchronizedSortedSet(new TreeSet<File>());
    final SortedMap<File, SortedSet<File>> allImageFilesAndReferencingFiles = synchronizedSortedMap(
            new TreeMap<File, SortedSet<File>>());

    final AtomicInteger imageFilesProcessed = new AtomicInteger(0);

    Set<Callable<Object>> imageReferenceProcessingTasks = new HashSet<Callable<Object>>();

    for (final File imageFile : applicationArtwork.getAllImageFiles()) {
        imageReferenceProcessingTasks.add(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                String imageFilename = imageFile.getName();
                SortedSet<File> filesWhereImageIsReferenced = new TreeSet<File>();
                Set<String> imageFilenameVariants = imageFilenameVariants(imageFilename);

                for (Entry<File, String> entry : contentsOfReferencingFiles.entrySet()) {
                    String fileContents = entry.getValue();

                    for (String imageFilenameVariant : imageFilenameVariants) {
                        // Quoted references, e.g. "aboutBackground"
                        if (fileContents.indexOf(format("\"%s\"", imageFilenameVariant)) != -1)
                            filesWhereImageIsReferenced.add(entry.getKey());

                        // Nib files, e.g. <string key="NSResourceName">aboutBackground~ipad.png</string>
                        else if (fileContents.indexOf(format(">%s<", imageFilenameVariant)) != -1)
                            filesWhereImageIsReferenced.add(entry.getKey());
                    }
                }

                if (filesWhereImageIsReferenced.size() == 1 && "project.pbxproj"
                        .equals(filesWhereImageIsReferenced.first().getName().toLowerCase()))
                    onlyProjectFileReferencedImageFiles.add(imageFile);

                if (filesWhereImageIsReferenced.size() == 0) {
                    unreferencedImageFiles.add(imageFile);
                } else {
                    allImageFilesAndReferencingFiles.put(imageFile, filesWhereImageIsReferenced);
                }

                progressCallback.onProcessedImageReferences(imageFile, filesWhereImageIsReferenced,
                        imageFilesProcessed.incrementAndGet(), applicationArtwork.getAllImageFiles().size());

                return null;
            }
        });
    }

    for (Future<Object> future : getExecutorService().invokeAll(imageReferenceProcessingTasks))
        future.get();

    applicationArtwork.setAllImageFilesAndReferencingFiles(allImageFilesAndReferencingFiles);
    applicationArtwork.setUnreferencedImageFiles(unreferencedImageFiles);
    applicationArtwork.setOnlyProjectFileReferencedImageFiles(onlyProjectFileReferencedImageFiles);
}

From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluation.java

private List<EvaluatedAxiom> applyCELOE(SparqlEndpointKS ks, NamedClass nc, boolean equivalence)
        throws ComponentInitException {
    // get instances of class as positive examples
    SPARQLReasoner sr = new SPARQLReasoner(ks);
    SortedSet<Individual> posExamples = sr.getIndividuals(nc, 20);

    // get negative examples via various strategies
    System.out.print("finding negatives ... ");
    AutomaticNegativeExampleFinderSPARQL2 finder = new AutomaticNegativeExampleFinderSPARQL2(ks.getEndpoint());
    SortedSet<Individual> negExamples = finder.getNegativeExamples(nc, posExamples, 20);
    SortedSetTuple<Individual> examples = new SortedSetTuple<Individual>(posExamples, negExamples);
    System.out.println("done (" + negExamples.size() + ")");

    ComponentManager cm = ComponentManager.getInstance();

    SparqlKnowledgeSource ks2 = cm.knowledgeSource(SparqlKnowledgeSource.class);
    ks2.setInstances(Datastructures.individualSetToStringSet(examples.getCompleteSet()));
    ks2.setUrl(ks.getEndpoint().getURL());
    ks2.setDefaultGraphURIs(new TreeSet<String>(ks.getEndpoint().getDefaultGraphURIs()));
    ks2.setUseLits(false);/*from   w  w  w  .  jav a  2s .c o m*/
    ks2.setUseCacheDatabase(true);
    ks2.setRecursionDepth(2);
    ks2.setCloseAfterRecursion(true);
    //        ks2.getConfigurator().setSaveExtractedFragment(true);
    System.out.println("getting fragment ... ");
    ks2.init();
    System.out.println("done");

    AbstractReasonerComponent rc = cm.reasoner(FastInstanceChecker.class, ks2);
    rc.init();

    // TODO: super class learning
    ClassLearningProblem lp = cm.learningProblem(ClassLearningProblem.class, rc);
    //        lp.setPositiveExamples(posExamples);
    //        lp.setNegativeExamples(negExamples);
    //        try {
    lp.setClassToDescribe(nc);
    //      } catch (MalformedURLException e1) {
    //         e1.printStackTrace();
    //      }
    //        lp.setType("equivalence");
    lp.setEquivalence(true);
    //        lp.setAccuracyMethod("fmeasure");
    lp.setHeuristic(HeuristicType.FMEASURE);
    lp.setUseApproximations(false);
    lp.setMaxExecutionTimeInSeconds(10);
    lp.init();

    CELOE la = null;
    try {
        la = cm.learningAlgorithm(CELOE.class, lp, rc);
    } catch (LearningProblemUnsupportedException e) {
        e.printStackTrace();
    }
    //        CELOEConfigurator cc = la.getConfigurator();
    la.setMaxExecutionTimeInSeconds(10);
    la.setNoisePercentage(25);
    la.init();
    System.out.print("running CELOE ... ");
    la.start();
    System.out.println("done");

    // convert the result to axioms (to make it compatible with the other algorithms)
    List<? extends EvaluatedDescription> learnedDescriptions = la
            .getCurrentlyBestEvaluatedDescriptions(threshold);
    List<EvaluatedAxiom> evaluatedAxioms = new LinkedList<EvaluatedAxiom>();
    for (EvaluatedDescription learnedDescription : learnedDescriptions) {
        Axiom axiom;
        if (equivalence) {
            axiom = new EquivalentClassesAxiom(nc, learnedDescription.getDescription());
        } else {
            axiom = new SubClassAxiom(nc, learnedDescription.getDescription());
        }
        Score score = lp.computeScore(learnedDescription.getDescription());
        evaluatedAxioms.add(new EvaluatedAxiom(axiom, score));
    }

    cm.freeAllComponents();
    return evaluatedAxioms;
}

From source file:org.apache.hadoop.hbase.regionserver.Memcache.java

/**
 * Write an update/*from w  w  w . j av a2 s .c om*/
 * @param kv
 * @return approximate size of the passed key and value.
 */
long delete(final KeyValue kv, boolean multiFamily) {
    long size = -1;
    this.lock.readLock().lock();
    long deleteSize = 0L;
    try {
        //Have to find out what want to do here, to find the fastest way of removing
        //things that are under a delete.
        //Actions that will take place here are:
        //1. Insert a put at the right place
        //2. Insert a deleteFamily and a deleteColumn entry and deleting all the
        //related entries already in there.
        //3. Insert a delete, with timestamp, and finding the put in memcache,
        //deleting both of them. 

        //first check what type the current kv is
        byte type = kv.getType();

        boolean notpresent = false;
        List<KeyValue> deletes = new ArrayList<KeyValue>();
        SortedSet<KeyValue> tailSet = null;
        if (type == KeyValue.Type.DeleteFamily.getCode()) {
            //need to check row/fam and bigger ts
            //cases for mem:
            //1. r/f same but ts bigger, next from headset
            //2. r/f same and ts smaller or equal, add to deleteList
            //3. r or f not the same, done get next kv
            tailSet = this.memcache.tailSet(kv);
            int tailsize = tailSet.size();
            int ret = 0;
            for (KeyValue mem : tailSet) {
                ret = deleteFamilyCompare(mem, kv, multiFamily);
                if (ret == 0) {
                    deletes.add(mem);
                    continue;
                } else if (ret == 1) {
                    break;
                }
            }
            notpresent = this.memcache.add(kv);
            size = heapSize(kv, notpresent);
        } else if (type == KeyValue.Type.DeleteColumn.getCode()) {
            deletes = new ArrayList<KeyValue>();
            //Need to check row/fam/col and bigger ts
            tailSet = this.memcache.tailSet(kv);
            int ret = 0;
            for (KeyValue mem : tailSet) {
                ret = deleteColumnCompare(mem, kv, multiFamily);
                if (ret == 0) {
                    deletes.add(mem);
                    continue;
                } else if (ret == 1) {
                    break;
                }
            }
            notpresent = this.memcache.add(kv);
            size = heapSize(kv, notpresent);
        } else {
            deletes = new ArrayList<KeyValue>();
            //Need to check row/fam/col/ts
            tailSet = this.memcache.tailSet(kv);
            int tailsize = tailSet.size();

            int ret = 0;
            for (KeyValue mem : tailSet) {
                ret = deleteCompare(mem, kv, multiFamily);
                if (ret == 0) {
                    deletes.add(mem);
                    break;
                } else if (ret == 1) {
                    break;
                }
            }
        }

        for (KeyValue delete : deletes) {
            notpresent = this.memcache.remove(delete);
            deleteSize += heapSize(delete, notpresent);
        }

    } finally {
        this.lock.readLock().unlock();
    }
    return size - deleteSize;
}

From source file:no.abmu.finances.service.hibernate3.FinanceServiceHelperH3Impl.java

public SchemaList getReportDataBySchemaTypeAndVersion(SortedSet<OrgUnitReport> orgSortedSet,
        String schemaTypeName, String schemaVersion, String prefix) {

    Assert.checkRequiredArgument("orgSortedSet", orgSortedSet);
    Assert.checkRequiredArgument("schemaTypeName", schemaTypeName);
    Assert.checkRequiredArgument("schemaVersion", schemaVersion);

    int numberOfReports = orgSortedSet.size();

    SchemaList schemaList = new SchemaList(numberOfReports);
    ProgressStatistics ps = new ProgressStatistics(numberOfReports);
    for (OrgUnitReport orgUnitReport : orgSortedSet) {
        Map<String, Object> report = orgUnitReport.createReport();
        fillInReportValues(report, schemaTypeName, schemaVersion, prefix);
        schemaList.add(report);//from   w w w.  ja  va 2  s.com
        ps.increaseCountAndDumpStatus();
    }
    ps.dumpStatus(true);

    return schemaList;
}

From source file:org.eclipse.gyrex.p2.internal.commands.ListCommand.java

private void listArtifacts() throws Exception {
    IProvisioningAgent agent = null;/*  ww  w  .j av a  2s . c o m*/
    try {
        // get agent
        agent = P2Activator.getInstance().getService(IProvisioningAgentProvider.class).createAgent(null);
        if (agent == null)
            throw new IllegalStateException(
                    "The current system has not been provisioned using p2. Unable to acquire provisioning agent.");

        final IMetadataRepositoryManager manager = (IMetadataRepositoryManager) agent
                .getService(IMetadataRepositoryManager.SERVICE_NAME);
        if (manager == null)
            throw new IllegalStateException(
                    "The provision system is broken. Unable to acquire metadata repository service.");

        // sync repos
        RepoUtil.configureRepositories(manager,
                (IArtifactRepositoryManager) agent.getService(IArtifactRepositoryManager.SERVICE_NAME));

        // load repos
        final URI[] knownRepositories = manager
                .getKnownRepositories(IRepositoryManager.REPOSITORIES_NON_SYSTEM);
        for (final URI uri : knownRepositories) {
            printf("Loading %s", uri.toString());
            manager.loadRepository(uri, new NullProgressMonitor());
        }

        // query for everything that provides an OSGi bundle and features
        IQuery<IInstallableUnit> query = QueryUtil.createMatchQuery(
                "properties[$0] == true || providedCapabilities.exists(p | p.namespace == 'osgi.bundle')", //$NON-NLS-1$
                new Object[] { MetadataFactory.InstallableUnitDescription.PROP_TYPE_GROUP });

        // wrap query if necessary
        if (latestVersionOnly) {
            query = QueryUtil.createPipeQuery(query, QueryUtil.createLatestIUQuery());
        }

        // execute
        printf("Done loading. Searching...");
        final SortedSet<String> result = new TreeSet<>();
        for (final Iterator stream = manager.query(query, new NullProgressMonitor()).iterator(); stream
                .hasNext();) {
            final IInstallableUnit iu = (IInstallableUnit) stream.next();

            // exclude fragments
            if ((iu.getFragments() != null) && (iu.getFragments().size() > 0)) {
                continue;
            }

            final String id = iu.getId();

            // exclude source IUs
            if (StringUtils.endsWith(id, ".source") || StringUtils.endsWith(id, ".source.feature.group")) {
                continue;
            }

            // get name
            String name = iu.getProperty(IInstallableUnit.PROP_NAME, null);
            if ((name == null) || name.startsWith("%")) {
                name = ""; //$NON-NLS-1$
            }

            // check if filter is provided
            if (StringUtils.isBlank(filterString) || StringUtils.containsIgnoreCase(id, filterString)
                    || StringUtils.containsIgnoreCase(name, filterString)) {
                result.add(String.format("%s (%s, %s)", name, id, iu.getVersion()));
            }
        }

        if (result.isEmpty()) {
            printf("No artifacts found!");
        } else {
            printf("Found %d artifacts:", result.size());
            for (final String artifact : result) {
                printf(artifact);
            }
        }
    } finally {
        if (null != agent) {
            agent.stop();
        }
    }
}

From source file:de.ailis.xadrian.data.Complex.java

/**
 * Adds the factories needed to fulfill the need of the specified complex
 * ware.//w  w  w.  java2 s. com
 *
 * @param complexWare
 *            The complex ware for which factories must be added
 * @param race
 *            The race from which factories should be bought. If null then
 *            the cheapest factory is used.
 * @return True if a new factories were added, false if this was not
 *         possible
 */
private boolean addBaseComplexForWare(final ComplexWare complexWare, final Race race) {
    final Ware ware = complexWare.getWare();
    final FactoryFactory factoryFactory = this.game.getFactoryFactory();

    // Remove all automatically added factories which produces the
    // specified ware and calculate the real need which must be
    // fulfilled.
    double need = complexWare.getMissing();
    final double oldNeed = need;
    for (final ComplexFactory complexFactory : new ArrayList<ComplexFactory>(this.autoFactories)) {
        if (complexFactory.getFactory().getProduct().getWare().equals(ware)) {
            need += complexFactory.getProductPerHour(getSuns()).getQuantity();
            this.autoFactories.remove(complexFactory);
        }
    }

    // Determine the available factory sizes
    final SortedSet<FactorySize> sizesSet = factoryFactory.getFactorySizes(ware, race);
    final FactorySize[] sizes = sizesSet.toArray(new FactorySize[sizesSet.size()]);

    // Abort if no factories were found
    if (sizes.length == 0)
        return false;

    // Get the cheapest factories for the sizes
    final Map<FactorySize, Factory> factories = new HashMap<FactorySize, Factory>();
    for (final FactorySize size : sizes) {
        if (race == null)
            factories.put(size, factoryFactory.getCheapestFactory(ware, size));
        else
            factories.put(size, factoryFactory.getFactory(ware, size, race));
    }

    // Get the smallest possible production quantity
    final double minProduction = factories.get(sizes[0]).getProductPerHour(getSuns(), 0).getQuantity();

    // Iterate the available sizes (from largest to smallest) and add
    // the factories producing an adequate number of products
    for (int i = sizes.length - 1; i >= 0; i--) {
        final FactorySize size = sizes[i];
        final Factory factory = factories.get(size);
        final double product = factory.getProductPerHour(getSuns(), 0).getQuantity();

        // Calculate the number of factories of the current size needed
        log.debug("Need " + need + " units of " + ware + ". Considering " + factory + " which produces "
                + product + " units");
        final int quantity = (int) Math.floor((need + minProduction - 0.1) / product);

        // Add the number of factories and decrease the need
        if (quantity > 0) {
            log.debug("Adding " + quantity + "x " + factory);
            this.autoFactories.add(new ComplexFactory(this.game, factory, quantity, 0));
            need -= quantity * product;
        } else
            log.debug("Not adding any " + factory);
    }
    if (Math.abs(need - oldNeed) < .0000001) {
        log.debug("Unable to calculate best matching factory. Aborting");
        return false;
    }
    return true;
}

From source file:edu.harvard.med.screensaver.model.screenresults.ScreenResult.java

private SortedSet<AssayPlate> findOrCreateAssayPlatesDataLoaded(int plateNumber, int replicatesDataLoaded) {
    SortedSet<AssayPlate> mostRecentAssayPlatesForPlateNumber = Sets.newTreeSet();
    SortedSet<AssayPlate> allAssayPlatesForPlateNumber = getScreen().findAssayPlates(plateNumber);
    if (!allAssayPlatesForPlateNumber.isEmpty()) {
        final LibraryScreening lastLibraryScreening = ImmutableSortedSet
                .copyOf(Iterables.transform(allAssayPlatesForPlateNumber, AssayPlate.ToLibraryScreening))
                .last();/* w w w  .j a va 2s  .  c  om*/
        assert lastLibraryScreening != null;
        mostRecentAssayPlatesForPlateNumber
                .addAll(Sets.filter(allAssayPlatesForPlateNumber, new Predicate<AssayPlate>() {
                    public boolean apply(AssayPlate ap) {
                        return lastLibraryScreening.equals(ap.getLibraryScreening());
                    }
                }));
    }
    SortedSet<AssayPlate> assayPlatesDataLoaded = Sets.newTreeSet();
    // if there are fewer assay plates screened replicates than we have data
    // for, then a library screening must not have been recorded for the assay
    // plates that were used to generate this data, so we'll create them now
    if (mostRecentAssayPlatesForPlateNumber.size() < replicatesDataLoaded) {
        //log.warn("creating missing assay plate(s) for plate number " + plateNumber);
        for (int r = 0; r < replicatesDataLoaded; r++) {
            assayPlatesDataLoaded.add(getScreen().createAssayPlate(plateNumber, r));
        }
    } else {
        for (AssayPlate assayPlate : mostRecentAssayPlatesForPlateNumber) {
            if (assayPlate.getReplicateOrdinal() < replicatesDataLoaded) {
                assayPlatesDataLoaded.add(assayPlate);
            }
        }
    }
    return assayPlatesDataLoaded;
}