Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:hoot.services.writers.review.ReviewPrepareDbWriter.java

private void removeUnusedElementIdMappings() throws Exception {
    final String logMsgStart = "Removing unused ID mappings for map with ID: " + mapId + ".  Step 4 of 4.";
    log.info(logMsgStart);/*  ww w.j  a va2s.  c  om*/

    //get all unique reviewable item ids in review_items
    //TODO: these need to be buffered queries
    final Set<String> reviewableItemIds = new HashSet<String>(
            new SQLQuery(conn, DbUtils.getConfiguration(mapId)).from(reviewItems)
                    .where(reviewItems.mapId.eq(mapId)).list(reviewItems.reviewableItemId));

    final Set<String> reviewAgainstItemIds = new HashSet<String>(

            new SQLQuery(conn, DbUtils.getConfiguration(mapId)).from(reviewItems)
                    .where(reviewItems.mapId.eq(mapId)).list(reviewItems.reviewAgainstItemId)

    );

    final Set<String> elementUniqueIds = new HashSet<String>(

            new SQLQuery(conn, DbUtils.getConfiguration(mapId)).from(elementIdMappings)
                    .where(elementIdMappings.mapId.eq(mapId)).list(elementIdMappings.elementId)

    );
    //anything in elementUniqueIds that's not in reviewableItemIds or reviewAgainstItemIds,
    //regardless of what map it belongs to, must be a unique id not being used and should be
    //deleted
    Set<String> uniqueIdsNotInReviewItems = new HashSet<String>(elementUniqueIds);
    uniqueIdsNotInReviewItems.removeAll(reviewableItemIds);
    uniqueIdsNotInReviewItems.removeAll(reviewAgainstItemIds);
    if (uniqueIdsNotInReviewItems.size() > 0) {
        final long result =

                new SQLDeleteClause(conn, DbUtils.getConfiguration(mapId), elementIdMappings)
                        .where(elementIdMappings.elementId.in(uniqueIdsNotInReviewItems)
                                .and(elementIdMappings.mapId.eq(mapId)))
                        .execute();
        if (result != uniqueIdsNotInReviewItems.size()) {
            throw new Exception("Error deleting redundant existing data from element ID mappings table during "
                    + "review prepare job.");
        }
        log.debug(result + " redundant element ID mappings deleted.");

    }
}

From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java

@SuppressWarnings("unchecked")
public Graph getRelatedTerms(final String queryString, final Long userId, final int howMany) {
    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();/*from   ww  w. jav a2 s  .c  o  m*/

    final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, NO_DATE, NO_DATE, false,
            DocumentState.MATCHED_TO_TOPICS, FullTextQuery.ID);

    // find the specified number of terms from the most recent 100 documents
    // that match the query
    final Sort sort = new Sort(new SortField("creationDate", SortField.LONG, true));
    fullTextQuery.setSort(sort);
    fullTextQuery.setFirstResult(0);
    fullTextQuery.setMaxResults(100);

    final List<Long> documentIds = new ArrayList<Long>(100);
    final List<Long> termIds = new ArrayList<Long>(100);

    final List<Object[]> results = fullTextQuery.list();

    for (final Object[] ith : results) {
        final Long id = (Long) ith[0];
        documentIds.add(id);
    }

    final Map<String, Node> nodes = new LinkedHashMap<String, Node>();
    final Node root = new Node(queryString, Boolean.TRUE);
    nodes.put(queryString, root);

    final Map<String, Edge> edges = new HashMap<String, Edge>();

    if (!documentIds.isEmpty()) {
        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery termsQuery = session.createSQLQuery("SELECT term.id "
                + "        FROM document_term dt INNER JOIN term on term.id = dt.term_id "
                + "        WHERE dt.document_id IN (:documentIds) GROUP BY term.id ORDER BY SUM(dt.tf_idf) DESC");
        termsQuery.setParameterList("documentIds", documentIds);
        termsQuery.setMaxResults(100);
        termIds.addAll((List<Long>) termsQuery.list());
    }

    if (!documentIds.isEmpty() && !termIds.isEmpty()) {

        final Session session = (Session) this.em.getDelegate();
        final org.hibernate.SQLQuery associationsQuery = session.createSQLQuery(
                "SELECT CONCAT(a.term_value) term_a_value, CONCAT(b.term_value) term_b_value, SUM(da.association_weight) sum_weight "
                        + "      FROM document_association da "
                        + "      INNER JOIN term a ON da.term_a_id = a.id "
                        + "        AND a.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(a.term_value) > 2 "
                        + "      INNER JOIN term b ON da.term_b_id = b.id "
                        + "        AND b.part_of_speech NOT IN (1, 3, 18, 19, 25, 39, 40) "
                        + "        AND length(b.term_value) > 2 "
                        + "      WHERE da.document_id IN (:documentIds) AND (da.term_a_id IN (:termIds) OR da.term_b_id IN (:termIds)) "
                        + "      GROUP BY a.id, b.id ORDER BY sum_weight DESC");
        associationsQuery.setParameterList("documentIds", documentIds);
        associationsQuery.setParameterList("termIds", termIds);
        associationsQuery.setMaxResults(howMany);

        final List<Object[]> relatedTermsResults = associationsQuery.list();

        final Set<String> aNodeKeys = new HashSet<String>();
        final Set<String> bNodeKeys = new HashSet<String>();

        for (final Object[] ith : relatedTermsResults) {
            final String a = (String) ith[0];
            final String b = (String) ith[1];

            if (!nodes.containsKey(a)) {
                final Node node = new Node(a);
                nodes.put(a, node);
            }

            if (!nodes.containsKey(b)) {
                final Node node = new Node(b);
                nodes.put(b, node);
            }

            if (a.equals(b)) {
                continue;
            }

            final String edgeKey = a + "||" + b;
            final String edgeKeyInverse = b + "||" + a;
            if (!edges.containsKey(edgeKey) && !edges.containsKey(edgeKeyInverse)) {
                final Node nodeA = nodes.get(a);
                final Node nodeB = nodes.get(b);

                aNodeKeys.add(a);
                bNodeKeys.add(b);

                final Edge edge = new Edge(nodeA, nodeB);
                edges.put(edgeKey, edge);
            }
        }

        // "orphan" handling, any b that is not also an a needs an edge from
        // root
        final Set<String> orphanKeys = new HashSet<String>();
        orphanKeys.addAll(bNodeKeys);
        orphanKeys.removeAll(aNodeKeys);

        for (final String orphanKey : orphanKeys) {
            final Node orphan = nodes.get(orphanKey);
            final Edge orphanToParent = new Edge(root, orphan);
            edges.put(root.getName() + "||" + orphan.getName(), orphanToParent);
        }
    }

    final List<Node> nodeList = new ArrayList<Node>(nodes.size());
    // keep root as first element
    nodes.remove(root.getName());
    nodeList.add(root);
    nodeList.addAll(nodes.values());

    final Graph graph = new Graph(nodeList, new ArrayList<Edge>(edges.values()));

    stopWatch.stop();
    logger.info("Related terms search took: " + stopWatch.toString());

    return graph;
}

From source file:org.openmrs.module.drugorderexport.web.controller.ViewPatientRegimenController.java

@SuppressWarnings("static-access")
protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response)
        throws Exception {

    ModelAndView mav = new ModelAndView();

    List<Object[]> patientHistory = new ArrayList<Object[]>();

    DrugOrderService service = Context.getService(DrugOrderService.class);

    String linkStr = request.getParameter("linkId");
    Integer linkId = 1;//from w ww  .  ja  va2s.  co m
    if (linkStr != null && !linkStr.equals("")) {
        linkId = Integer.parseInt(linkStr);
    }

    int patientId = 0;
    try {
        patientId = Integer.parseInt(request.getParameter("patient"));
    } catch (Exception e) {
        e.printStackTrace();
        mav.addObject("msg", "There is no patient with id= " + request.getParameter("patient"));
    }
    patientHistory.clear();

    Patient patient = new Patient();
    RegimenUtils regimenHistory = new RegimenUtils();

    if (patientId != 0) {
        patient = Context.getPatientService().getPatient(patientId);
        RegimenHistory history = regimenHistory.getRegimenHistory(patient);
        List<Regimen> regimens = history.getRegimenList();

        Set<RegimenComponent> regimenComponents = new HashSet<RegimenComponent>();
        Set<RegimenComponent> componentsStopped = new HashSet<RegimenComponent>();

        for (Regimen r : regimens) {
            regimenComponents = r.getComponents();

            if (r.getEndDate() == null) {
                r.setEndDate(new Date());
            }
            for (RegimenComponent rc : regimenComponents) {
                if (rc.getStopDate() != null)
                    if (rc.getStopDate().getTime() <= r.getStartDate().getTime()) {
                        componentsStopped.add(rc);

                    }

            }
            if (componentsStopped != null)
                regimenComponents.removeAll(componentsStopped);

        }

        Concept weightConcept = Context.getConceptService().getConcept(5089);

        Concept CD4CountConcept = Context.getConceptService().getConcept(5497);

        Concept hivViralLoad = Context.getConceptService().getConcept(856);

        if (patient != null)

            for (Regimen regimen : regimens) {
                String viralLoad = "";
                if (service.getPatientObsValue(patient, hivViralLoad, regimen.getStartDate(),
                        regimen.getEndDate()) != null) {
                    viralLoad = service.getPatientObsValue(patient, hivViralLoad, regimen.getStartDate(),
                            regimen.getEndDate()).toString();
                } else {
                    viralLoad = "-";
                }

                if (service.getPatientObsValue(patient, CD4CountConcept, regimen.getStartDate(),
                        regimen.getEndDate()) == null) {
                    patientHistory.add(new Object[] {
                            regimen, " No Test In This Period", service.getPatientObsValue(patient,
                                    weightConcept, regimen.getStartDate(), regimen.getEndDate()) + " (Kg)",
                            viralLoad });
                }
                if (service.getPatientObsValue(patient, weightConcept, regimen.getStartDate(),
                        regimen.getEndDate()) == null) {
                    patientHistory.add(new Object[] {
                            regimen, service.getPatientObsValue(patient, CD4CountConcept,
                                    regimen.getStartDate(), regimen.getEndDate()),
                            "No Test In This Period", viralLoad });

                }
                if (service.getPatientObsValue(patient, CD4CountConcept, regimen.getStartDate(),
                        regimen.getEndDate()) != null
                        && service.getPatientObsValue(patient, weightConcept, regimen.getStartDate(),
                                regimen.getEndDate()) != null) {
                    patientHistory.add(new Object[] { regimen,
                            service.getPatientObsValue(patient, CD4CountConcept, regimen.getStartDate(),
                                    regimen.getEndDate()),
                            service.getPatientObsValue(patient, weightConcept, regimen.getStartDate(),
                                    regimen.getEndDate()) + " (Kg)",
                            viralLoad });

                }

            }

    }

    mav.addObject("patient", patient);
    mav.addObject("program", Context.getProgramWorkflowService().getPatientPrograms(patient, null, null, null,
            null, null, false));
    mav.addObject("listPatientHistory", patientHistory);
    mav.setViewName("/module/drugorderexport/showpatientregimens");
    mav.addObject("linkId", linkId);
    return mav;
}

From source file:com.projity.pm.task.ProjectFactory.java

public synchronized Set getOpenOrLoadingProjects() {
    final Set projectIds = new HashSet();
    ProjectFactory.getInstance().getPortfolio().forProjects(new Closure() {
        public void execute(Object impl) {
            Project project = (Project) impl;
            projectIds.add(new Long(project.getUniqueId()));
        }/*from   w w  w  .  j  a v a 2  s  .  c om*/
    });
    projectIds.addAll(loadingProjects);
    projectIds.removeAll(closingProjects);
    return projectIds;
}

From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveUnusedAssignAndAggregateRule.java

private boolean removeUnusedVarsFromUnionAll(UnionAllOperator unionOp, Set<LogicalVariable> toRemove) {
    Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = unionOp.getVariableMappings()
            .iterator();//from w  w w.  j a  v a2s .  co  m
    boolean modified = false;
    Set<LogicalVariable> removeFromRemoveSet = new HashSet<LogicalVariable>();
    while (iter.hasNext()) {
        Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = iter.next();
        if (toRemove.contains(varMapping.third)) {
            iter.remove();
            modified = true;
        }
        // In any case, make sure we do not removing these variables.
        removeFromRemoveSet.add(varMapping.first);
        removeFromRemoveSet.add(varMapping.second);
    }
    toRemove.removeAll(removeFromRemoveSet);
    return modified;
}

From source file:org.apache.ambari.server.controller.metrics.timeline.AMSPropertyProvider.java

/**
 * Support properties with aggregate functions and metrics padding method.
 *///from ww w  . jav  a2 s  .  c om
@Override
public Set<String> checkPropertyIds(Set<String> propertyIds) {
    Set<String> supportedIds = new HashSet<String>();
    for (String propertyId : propertyIds) {
        if (propertyId.startsWith(ZERO_PADDING_PARAM)
                || PropertyHelper.hasAggregateFunctionSuffix(propertyId)) {
            supportedIds.add(propertyId);
        }
    }
    propertyIds.removeAll(supportedIds);
    return propertyIds;
}

From source file:arekkuusu.grimoireOfAlice.item.crafting.ShapedRecipe.java

public void build() throws IllegalArgumentException {
    List<Object> objects = new LinkedList<>();
    if (mirrored) {
        objects.add(true);/* w  w  w.j  av a 2  s  .c om*/
    }

    if (row1 == null && row2 == null && row3 == null)
        throw new IllegalArgumentException("Please specify at least one grid row for recipe builder");
    if (out == null)
        throw new IllegalArgumentException("Output not specified");

    Set<Character> row1Chars = ImmutableSet.of();
    Set<Character> row2Chars = ImmutableSet.of();
    Set<Character> row3Chars = ImmutableSet.of();

    if (row1 != null) {
        objects.add(row1);
        row1Chars = new HashSet<>(Arrays.asList(ArrayUtils.toObject(row1.toCharArray())));
    }

    if (row2 != null) {
        objects.add(row2);
        row2Chars = new HashSet<>(Arrays.asList(ArrayUtils.toObject(row2.toCharArray())));
    }

    if (row3 != null) {
        objects.add(row3);
        row3Chars = new HashSet<>(Arrays.asList(ArrayUtils.toObject(row3.toCharArray())));
    }

    Set<Character> mappedCharacters = new HashSet<>(characters.keySet());
    mappedCharacters.add(' ');

    row1Chars.removeAll(mappedCharacters);
    row2Chars.removeAll(mappedCharacters);
    row3Chars.removeAll(mappedCharacters);

    if (!row1Chars.isEmpty()) {
        throw new IllegalArgumentException(
                "The first row has characters which are not mapped. They are: " + row1Chars);
    }

    if (!row2Chars.isEmpty()) {
        throw new IllegalArgumentException(
                "The second row has characters which are not mapped. They are: " + row2Chars);
    }

    if (!row3Chars.isEmpty()) {
        throw new IllegalArgumentException(
                "The third row has characters which are not mapped. They are: " + row3Chars);
    }

    characters.forEach((key, obj) -> {
        objects.add(key);
        objects.add(obj);
    });

    ShapedOreRecipe recipe = new ShapedOreRecipe(out, objects.toArray());
    //noinspection unchecked
    CraftingManager.getInstance().getRecipeList().add(recipe);
}

From source file:ca.weblite.codename1.ios.CodenameOneIOSBuildTask.java

/**
 * Generates an updated project file by adding new soruce files and removing
 * non-existing files.  This does not write the project file.  It simply processes
 * content and returns modified content.
 * @param pbxProjContent  The contents of the project file.
 * @return String with the updated project file contents.
 *//*w w w.  j av  a2s  .c  om*/
protected String updatePbxProj(String pbxProjContent) {
    Set<String> inProject = getCurrentXcodeFiles(pbxProjContent);
    Set<String> inFileSystem = getCurrentAppSrcFiles();
    Set<String> missingFromProject = new HashSet<String>();
    missingFromProject.addAll(inFileSystem);
    missingFromProject.removeAll(inProject);
    Set<String> missingFromFileSystem = new HashSet<String>();
    missingFromFileSystem.addAll(inProject);
    missingFromFileSystem.removeAll(inFileSystem);

    File appDir = new File(getOut(), "build/xcode/src/app");

    if (!missingFromProject.isEmpty()) {
        System.out.println(
                "Found " + missingFromProject.size() + " missing from the Xcode project.  Adding them now...");
        System.out.println(missingFromProject);
        List<File> filesToAdd = new ArrayList<File>();
        for (String s : missingFromProject) {
            filesToAdd.add(new File(appDir, s));
        }
        pbxProjContent = this.injectFilesIntoXcodeProject(pbxProjContent, filesToAdd.toArray(new File[0]));
    }

    if (!missingFromFileSystem.isEmpty()) {
        System.out.println("Found " + missingFromFileSystem.size()
                + " missing from the fileSystem.  Removing them in Xcode...");
        System.out.println(missingFromFileSystem);
        List<File> filesToRemove = new ArrayList<File>();
        for (String s : missingFromFileSystem) {
            filesToRemove.add(new File(appDir, s));
        }
        pbxProjContent = removeFilesFromXcodeProject(pbxProjContent, filesToRemove.toArray(new File[0]));
    }
    return pbxProjContent;

}

From source file:de.saly.elasticsearch.mailsource.ParallelPollingIMAPMailSource.java

@SuppressWarnings({ "rawtypes", "unchecked" })
protected void fetch(final Folder folder) throws MessagingException, IOException {

    if ((folder.getType() & Folder.HOLDS_MESSAGES) == 0) {
        logger.warn("Folder {} cannot hold messages", folder.getFullName());
        return;/*from   w  w  w . j  av a  2 s.c  o m*/

    }

    final int messageCount = folder.getMessageCount();

    final UIDFolder uidfolder = (UIDFolder) folder;
    final long servervalidity = uidfolder.getUIDValidity();
    final RiverState riverState = stateManager.getRiverState(folder);
    final Long localvalidity = riverState.getUidValidity();

    logger.info("Fetch mails from folder {} ({})", folder.getURLName().toString(), messageCount);

    logger.debug("Server uid validity: {}, Local uid validity: {}", servervalidity, localvalidity);

    if (localvalidity == null || localvalidity.longValue() != servervalidity) {

        logger.debug("UIDValidity fail, full resync " + localvalidity + "!=" + servervalidity);

        if (localvalidity != null) {
            mailDestination.clearDataForFolder(folder.getFullName());
        }

        final ProcessResult result = process(messageCount, 1, folder.getFullName());

        riverState.setLastCount(result.getProcessedCount());

        if (result.getProcessedCount() > 0) {
            riverState.setLastIndexed(new Date());
        }

        if (result.getProcessedCount() > 0) {
            riverState.setLastTook(result.getTook());
        }

        riverState.setLastSchedule(new Date());

        if (result.getProcessedCount() > 0 && result.getHighestUid() > 0) {
            riverState.setLastUid(result.getHighestUid());
        }

        riverState.setUidValidity(servervalidity);
        stateManager.setRiverState(riverState);

        logger.info("Initiailly processed {} mails for folder {}", result.getProcessedCount(),
                folder.getFullName());
        logger.debug("Processed result {}", result.toString());

    } else {

        if (messageCount == 0) {
            logger.debug("Folder {} is empty", folder.getFullName());
        } else {

            if (withFlagSync) {
                // detect flag change
                final Message[] flagMessages = folder.getMessages();
                folder.fetch(flagMessages, IMAPUtils.FETCH_PROFILE_FLAGS_UID);

                for (final Message message : flagMessages) {
                    try {

                        final long uid = ((UIDFolder) message.getFolder()).getUID(message);

                        final String id = uid + "::" + message.getFolder().getURLName();

                        final int storedHashcode = mailDestination.getFlaghashcode(id);

                        if (storedHashcode == -1) {
                            // New mail which is not indexed yet
                            continue;
                        }

                        final int flagHashcode = message.getFlags().hashCode();

                        if (flagHashcode != storedHashcode) {
                            // flags change for this message, must update
                            mailDestination.onMessage(message);

                            if (logger.isDebugEnabled()) {
                                logger.debug("Update " + id + " because of flag change");
                            }
                        }
                    } catch (final Exception e) {
                        logger.error("Error detecting flagchanges for message "
                                + ((MimeMessage) message).getMessageID(), e);
                        stateManager.onError("Error detecting flagchanges", message, e);
                    }
                }
            }

            final long highestUID = riverState.getLastUid(); // this uid is
                                                             // already
                                                             // processed

            logger.debug("highestUID: {}", highestUID);

            final Message[] msgsnew = uidfolder.getMessagesByUID(highestUID, UIDFolder.LASTUID);

            // msgnew.size is always >= 1
            if (highestUID > 0 && uidfolder.getUID(msgsnew[0]) <= highestUID) {
                // msgsnew = (Message[]) ArrayUtils.remove(msgsnew, 0);
            }

            if (msgsnew.length > 0) {

                logger.info("{} new messages in folder {}", msgsnew.length, folder.getFullName());

                final int start = msgsnew[0].getMessageNumber();

                final ProcessResult result = process(messageCount, start, folder.getFullName());

                riverState.setLastCount(result.getProcessedCount());

                if (result.getProcessedCount() > 0) {
                    riverState.setLastIndexed(new Date());
                }

                if (result.getProcessedCount() > 0) {
                    riverState.setLastTook(result.getTook());
                }

                riverState.setLastSchedule(new Date());

                if (result.getProcessedCount() > 0 && result.getHighestUid() > 0) {
                    riverState.setLastUid(result.getHighestUid());
                }

                riverState.setUidValidity(servervalidity);
                stateManager.setRiverState(riverState);

                logger.info("Not initiailly processed {} mails for folder {}", result.getProcessedCount(),
                        folder.getFullName());
                logger.debug("Processed result {}", result.toString());
            } else {
                logger.debug("no new messages");
            }

        }
        // check for expunged/deleted messages

        final Set<Long> serverMailSet = new HashSet<Long>();

        final long oldmailUid = riverState.getLastUid();
        logger.debug("oldmailuid {}", oldmailUid);

        final Message[] msgsold = uidfolder.getMessagesByUID(1, oldmailUid);

        folder.fetch(msgsold, IMAPUtils.FETCH_PROFILE_UID);

        for (final Message m : msgsold) {
            try {
                final long uid = uidfolder.getUID(m);
                serverMailSet.add(uid);

            } catch (final Exception e) {
                stateManager.onError("Unable to handle old message ", m, e);
                logger.error("Unable to handle old message due to {}", e, e.toString());

                IMAPUtils.open(folder);
            }
        }

        final Set localMailSet = new HashSet(
                mailDestination.getCurrentlyStoredMessageUids(folder.getFullName(), false));

        logger.debug("Check now " + localMailSet.size() + " server mails for expunge");

        localMailSet.removeAll(serverMailSet);
        // localMailSet has now the ones that are not on server

        logger.info(
                localMailSet.size() + " messages were locally deleted, because they are expunged on server.");

        mailDestination.onMessageDeletes(localMailSet, folder.getFullName(), false);

    }

}

From source file:ddf.test.itests.platform.TestSolrCommands.java

@Ignore
@Test//from w  w w .  j  av  a2  s .c o m
public void testSolrBackupNumToKeep() throws InterruptedException {
    int numToKeep = 2;

    String command = BACKUP_COMMAND + " --numToKeep " + numToKeep;

    // Run this 3 times to make sure 2 backups are kept
    // On run 1, backup A is created.
    console.runCommand(command);
    Set<File> firstBackupDirSet = waitForBackupDirsToBeCreated(CATALOG_CORE_NAME, 1, 1);

    // On run 2, backup B is created (2 backups now: A and B).
    console.runCommand(command);
    Set<File> secondBackupDirSet = waitForBackupDirsToBeCreated(CATALOG_CORE_NAME, 2, 2);
    assertTrue("Unexpected backup directories found on pass 2.",
            secondBackupDirSet.containsAll(firstBackupDirSet));

    // On run 3, backup C is created (backup A is deleted and backups B and C remain).
    console.runCommand(command);
    // Wait for the 3rd backup to replace the 1st backup
    Set<File> thirdBackupDirSet = waitForFirstBackupDirToBeDeleted(CATALOG_CORE_NAME, firstBackupDirSet);

    assertThat("Wrong number of backup directories kept. Number of backups found in "
            + getSolrDataPath(CATALOG_CORE_NAME).getAbsolutePath() + " is : [" + thirdBackupDirSet.size()
            + "]; Expected: [2].", thirdBackupDirSet, hasSize(2));

    secondBackupDirSet.removeAll(firstBackupDirSet);
    assertTrue("Unexpected backup directories found on pass 3.",
            thirdBackupDirSet.containsAll(secondBackupDirSet));
}