Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.glaf.base.modules.sys.rest.SysUserResource.java

/**
 * /*ww  w .j  av a 2  s  . co m*/
 * 
 * @param request
 * @param uriInfo
 * @return
 */
@Path("setRole")
@POST
@Produces(MediaType.TEXT_PLAIN)
public ModelAndView setRole(@Context HttpServletRequest request, @Context UriInfo uriInfo) {
    RequestUtils.setRequestParameterToAttribute(request);
    logger.debug(RequestUtils.getParameterMap(request));
    ViewMessages messages = new ViewMessages();
    long userId = ParamUtil.getIntParameter(request, "user_id", 0);
    SysUser user = sysUserService.findById(userId);// 

    if (user != null) {// 
        long[] id = ParamUtil.getLongParameterValues(request, "id");// ???
        if (id != null) {
            Set<SysDeptRole> delRoles = new HashSet<SysDeptRole>();
            Set<SysDeptRole> oldRoles = user.getRoles();
            Set<SysDeptRole> newRoles = new HashSet<SysDeptRole>();
            for (int i = 0; i < id.length; i++) {
                logger.debug("id[" + i + "]=" + id[i]);
                SysDeptRole role = sysDeptRoleService.findById(id[i]);// 
                if (role != null) {
                    newRoles.add(role);// 
                }
            }

            oldRoles.retainAll(newRoles);// ??
            delRoles.removeAll(newRoles);// ??
            newRoles.removeAll(oldRoles);// ??
            user.setUpdateBy(RequestUtils.getActorId(request));

            if (sysUserService.updateRole(user, delRoles, newRoles)) {// ??
                messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_success"));
            } else {// ?
                messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_failure"));
            }
        }
    }
    MessageUtils.addMessages(request, messages);
    return new ModelAndView("show_json_msg");
}

From source file:cc.kave.commons.pointsto.analysis.unification.UnificationAnalysisVisitorContext.java

private void mergeIdentifiers(ReferenceLocation refLoc1, ReferenceLocation refLoc2) {
    Set<LocationIdentifier> loc1Identifiers = refLoc1.getIdentifiers();
    Set<LocationIdentifier> loc2Identifiers = refLoc2.getIdentifiers();

    {/*from  w w w.  ja  v  a2 s.  com*/
        Set<LocationIdentifier> missingInLoc1 = new HashSet<>(loc2Identifiers);
        missingInLoc1.removeAll(loc1Identifiers);
        for (LocationIdentifier identifier : missingInLoc1) {
            refLoc1.setLocation(identifier, createBottomLocation());
        }
    }

    {
        Set<LocationIdentifier> missingInLoc2 = new HashSet<>(loc1Identifiers);
        missingInLoc2.removeAll(loc2Identifiers);
        for (LocationIdentifier identifier : missingInLoc2) {
            refLoc2.setLocation(identifier, createBottomLocation());
        }
    }
}

From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDao.java

/**
 * convenience method to update the value(s) of a one-to-many object
 * property// w  w  w.  j a  va 2s  .c o  m
 * 
 * NOTE: this should be run from within a CriticalSection(WRITE)
 */
protected void updatePropertyResourceURIValues(Resource res, Property prop, Collection<String> uris,
        Model model) {
    log.debug("updatePropertyResourceURIValues(), resource=" + (res == null ? "null" : res.getURI())
            + ", property=" + (prop == null ? "null" : prop.getURI()) + ", uris=" + uris);

    if ((res == null) || (prop == null)) {
        return;
    }

    // figure existing URIs
    Set<String> existingUris = new HashSet<String>();
    StmtIterator stmts = model.listStatements(res, prop, (RDFNode) null);
    while (stmts.hasNext()) {
        Statement stmt = stmts.next();
        RDFNode o = stmt.getObject();
        if (o instanceof Resource) {
            existingUris.add(((Resource) o).getURI());
        }
    }

    // figure which to add and which to remove
    Set<String> addingUris = new HashSet<String>(uris);
    addingUris.removeAll(existingUris);
    Set<String> removingUris = new HashSet<String>(existingUris);
    removingUris.removeAll(uris);

    // for each to remove, remove it.
    for (String removeUri : removingUris) {
        Resource o = model.getResource(removeUri);
        model.remove(res, prop, o);
    }

    // for each to add, add it, unless it is null, empty, or invalid.
    for (String addUri : addingUris) {
        if ((addUri != null) && (!addUri.isEmpty())) {
            String badUriErrorStr = checkURI(addUri);
            if (badUriErrorStr == null) {
                Resource o = model.getResource(addUri);
                model.add(res, prop, o);
            } else {
                log.warn(badUriErrorStr);
            }
        }
    }
}

From source file:com.epam.ta.reportportal.core.item.UpdateTestItemHandlerImpl.java

@Override
public List<Issue> defineTestItemsIssues(String projectName, DefineIssueRQ defineIssue, String userName) {
    List<String> errors = new ArrayList<>();
    List<IssueDefinition> definitions = defineIssue.getIssues();

    expect(definitions, NOT_EMPTY_COLLECTION).verify(FAILED_TEST_ITEM_ISSUE_TYPE_DEFINITION, "");

    List<Issue> updated = new ArrayList<>(defineIssue.getIssues().size());
    ImmutableMap.Builder<IssueDefinition, TestItem> eventData = ImmutableMap.builder();
    for (IssueDefinition issueDefinition : definitions) {
        try {//w w  w .  j  a v  a 2s .com
            TestItem testItem = testItemRepository.findOne(issueDefinition.getId());
            verifyTestItem(testItem, issueDefinition.getId());

            eventData.put(issueDefinition, testItem);

            final Launch launch = launchRepository.findOne(testItem.getLaunchRef());
            expect(analyzerService.isPossible(launch.getId()), equalTo(true)).verify(FORBIDDEN_OPERATION,
                    Suppliers.formattedSupplier(
                            "Cannot update specified '{}' Test Item cause target Launch '{}' is processing by Auto-Analyze",
                            testItem.getId(), launch.getId()));

            final Project project = projectRepository.findOne(launch.getProjectRef());

            Issue issue = issueDefinition.getIssue();
            String issueType = verifyTestItemDefinedIssueType(issue.getIssueType(), project.getConfiguration());

            testItem = statisticsFacadeFactory
                    .getStatisticsFacade(project.getConfiguration().getStatisticsCalculationStrategy())
                    .resetIssueStatistics(testItem);

            TestItemIssue testItemIssue = testItem.getIssue();
            testItemIssue.setIssueType(issueType);

            String comment = issueDefinition.getIssue().getComment();
            if (null != comment) {
                comment = comment.trim();
            }

            if (null != issue.getExternalSystemIssues()) {
                Set<TestItemIssue.ExternalSystemIssue> issuesFromDB = null == testItemIssue
                        .getExternalSystemIssues() ? new HashSet<>() : testItemIssue.getExternalSystemIssues();
                Set<TestItemIssue.ExternalSystemIssue> issuesFromRequest = issue.getExternalSystemIssues()
                        .stream().map(TestItemUtils.externalIssueDtoConverter(userName)).collect(toSet());
                Set<TestItemIssue.ExternalSystemIssue> difference = Sets
                        .newHashSet(Sets.difference(issuesFromRequest, issuesFromDB));
                if (!difference.isEmpty()) {
                    for (TestItemIssue.ExternalSystemIssue externalSystemIssue : difference) {
                        externalSystemIssue.setSubmitter(userName);
                        externalSystemIssue.setSubmitDate(new Date().getTime());

                    }
                    Set<TestItemIssue.ExternalSystemIssue> externalSystemIssues;
                    if (issuesFromRequest.size() < issuesFromDB.size()) {
                        issuesFromRequest.removeAll(difference);
                        issuesFromRequest.addAll(difference);
                        externalSystemIssues = issuesFromRequest;
                    } else {
                        externalSystemIssues = issuesFromDB;
                        externalSystemIssues.addAll(difference);
                    }
                    testItemIssue.setExternalSystemIssues(externalSystemIssues);
                } else {
                    issuesFromDB.removeAll(Sets.newHashSet(Sets.difference(issuesFromDB, issuesFromRequest)));
                    testItemIssue.setExternalSystemIssues(issuesFromDB);
                }
            }

            testItemIssue.setIssueDescription(comment);
            testItem.setIssue(testItemIssue);

            testItemRepository.save(testItem);
            testItem = statisticsFacadeFactory
                    .getStatisticsFacade(project.getConfiguration().getStatisticsCalculationStrategy())
                    .updateIssueStatistics(testItem);
            updated.add(TestItemUtils.ISSUE_CONVERTER.apply(testItem.getIssue()));

        } catch (BusinessRuleViolationException e) {
            errors.add(e.getMessage());
        }
    }

    expect(!errors.isEmpty(), equalTo(FALSE)).verify(FAILED_TEST_ITEM_ISSUE_TYPE_DEFINITION, errors.toString());

    eventPublisher.publishEvent(new ItemIssueTypeDefined(eventData.build(), userName, projectName));
    return updated;
}

From source file:com.surevine.alfresco.repo.action.SafeMoveCopyServiceImpl.java

/**
 * Recursive copy algorithm/*w ww  .j  a v a 2 s. c om*/
 * 
 * @param dropName      drop the name property when associations don't allow duplicately named children
 */
private NodeRef recursiveCopy(CopyDetails copyDetails, boolean copyChildren, boolean dropName,
        Map<NodeRef, NodeRef> copiesByOriginal, Set<NodeRef> copies,
        Map<QName, CopyBehaviourCallback> callbacks) {
    NodeRef sourceNodeRef = copyDetails.getSourceNodeRef();
    Set<QName> sourceNodeAspectQNames = copyDetails.getSourceNodeAspectQNames();
    NodeRef targetParentNodeRef = copyDetails.getTargetParentNodeRef();
    QName assocTypeQName = copyDetails.getAssocTypeQName();
    QName assocQName = copyDetails.getAssocQName();

    // Avoid duplicate and cyclic copies
    if (copies.contains(sourceNodeRef)) {
        throw new IllegalStateException("Nested copy prevention has failed: \n" + "   " + copyDetails + "\n"
                + "   Copies by original: " + copiesByOriginal);
    } else if (copiesByOriginal.containsKey(sourceNodeRef)) {
        throw new IllegalStateException("Multiple child assocs between same two nodes detected: \n" + "   "
                + copyDetails + "\n" + "   Copies by original: " + copiesByOriginal);
    }

    // Extract Type Definition
    QName sourceNodeTypeQName = copyDetails.getSourceNodeTypeQName();

    // Does this node get copied at all?
    // The source node's type-bound behaviour has an effective veto.
    CopyBehaviourCallback sourceTypeBehaviour = callbacks.get(sourceNodeTypeQName);
    if (sourceTypeBehaviour == null) {
        throw new IllegalStateException("Source node type has no callback: " + sourceNodeTypeQName);
    }
    if (!sourceTypeBehaviour.getMustCopy(sourceNodeTypeQName, copyDetails)) {
        // Nothing to do
        return null;
    }

    // Get the type properties to copy
    Map<QName, Serializable> targetNodeProperties = buildCopyProperties(copyDetails,
            Collections.singleton(sourceNodeTypeQName), callbacks);

    // Some aspects are going to be applied automatically.  For efficiency, the initial node properties
    // for these aspects should be provided.
    Set<QName> defaultAspectQNames = getDefaultAspects(sourceNodeTypeQName);
    Map<QName, Serializable> defaultAspectsProperties = buildCopyProperties(copyDetails, defaultAspectQNames,
            callbacks);
    targetNodeProperties.putAll(defaultAspectsProperties);

    // Drop the name property, if required.  This prevents duplicate names and leaves it up to the client
    // to assign a new name.
    AssociationDefinition assocDef = dictionaryService.getAssociation(assocTypeQName);
    if (!assocDef.isChild()) {
        throw new AlfrescoRuntimeException("Association is not a child association: " + assocTypeQName);
    } else {
        ChildAssociationDefinition childAssocDef = (ChildAssociationDefinition) assocDef;
        if (dropName && !childAssocDef.getDuplicateChildNamesAllowed()) {
            // duplicate children are not allowed.
            targetNodeProperties.remove(ContentModel.PROP_NAME);
        }
    }

    // Lastly, make sure the the Node UUID is set correctly; after all, the contract
    // of the CopyDetails says that the targetNodeRef was already determined.
    String targetNodeUuid = copyDetails.getTargetNodeRef().getId();
    targetNodeProperties.put(ContentModel.PROP_NODE_UUID, targetNodeUuid);

    // The initial node copy is good to go
    ChildAssociationRef targetChildAssocRef = this.nodeService.createNode(targetParentNodeRef, assocTypeQName,
            assocQName, sourceNodeTypeQName, targetNodeProperties);
    NodeRef copyTarget = targetChildAssocRef.getChildRef();
    // Save the mapping for later
    copiesByOriginal.put(sourceNodeRef, copyTarget);
    copies.add(copyTarget);

    // We now have a node, so fire the BeforeCopyPolicy
    invokeBeforeCopy(sourceNodeRef, copyTarget);

    // Work out which aspects still need copying.  The source aspects less the default aspects
    // will give this set.
    Set<QName> remainingAspectQNames = new HashSet<QName>(sourceNodeAspectQNames);
    remainingAspectQNames.removeAll(defaultAspectQNames);

    // Prevent any rules being fired on the new destination node
    this.ruleService.disableRules(copyTarget);
    try {
        // Apply the remaining aspects and properties
        for (QName remainingAspectQName : remainingAspectQNames) {
            copyProperties(copyDetails, copyTarget, remainingAspectQName, callbacks);
        }

        // Copy residual properties
        copyResidualProperties(copyDetails, copyTarget);

        //  Apply the copy aspect to the new node   
        Map<QName, Serializable> copyProperties = new HashMap<QName, Serializable>();
        copyProperties.put(ContentModel.PROP_COPY_REFERENCE, sourceNodeRef);
        internalNodeService.addAspect(copyTarget, ContentModel.ASPECT_COPIEDFROM, copyProperties);

        // Copy permissions
        copyPermissions(sourceNodeRef, copyTarget);

        // We present the recursion option regardless of what the client chooses
        copyChildren(copyDetails, copyTarget, true, // We know that the node has been created
                copyChildren, copiesByOriginal, copies, callbacks);
    } finally {
        this.ruleService.enableRules(copyTarget);
    }

    return copyTarget;
}

From source file:com.aurel.track.fieldType.bulkSetters.ProjectPickerBulkSetter.java

/**
 * Sets the workItemBean's attribute depending on the value and bulkRelation
 * @param workItemBean/*  w w  w .  j a v  a2s  .  c om*/
 * @param fieldID
 * @param parameterCode
 * @param bulkTranformContext
 * @param selectContext
 * @param value   
 * @return ErrorData if an error is found
 */
@Override
public ErrorData setWorkItemAttribute(TWorkItemBean workItemBean, Integer fieldID, Integer parameterCode,
        BulkTranformContext bulkTranformContext, SelectContext selectContext, Object value) {
    if (getRelation() == BulkRelations.SET_NULL) {
        workItemBean.setAttribute(fieldID, parameterCode, null);
        return null;
    }
    Integer[] selectedValues = (Integer[]) value;
    if (getRelation() == BulkRelations.SET_TO) {
        workItemBean.setAttribute(fieldID, selectedValues);
        return null;
    }
    Object originalValue = workItemBean.getAttribute(fieldID, parameterCode);
    Object[] originalSelections = null;
    if (originalValue != null) {
        try {
            //multiple values are loaded in the workItem as Object[], not as Integer[] !!! 
            originalSelections = (Object[]) originalValue;
        } catch (Exception e) {
            LOGGER.info(
                    "Getting the original object array value for " + value + " failed with " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    }
    Set<Integer> originalSet = new HashSet<Integer>();
    if (originalSelections != null && originalSelections.length > 0) {
        for (int i = 0; i < originalSelections.length; i++) {
            try {
                originalSet.add((Integer) originalSelections[i]);
            } catch (Exception e) {
                LOGGER.info("Transforming the original object value " + originalSelections[i]
                        + " to Integer failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        }
    }
    Set<Integer> bulkSelectionsSet = GeneralUtils.createSetFromIntegerArr(selectedValues);
    switch (getRelation()) {
    case BulkRelations.ADD_ITEMS:
        originalSet.addAll(bulkSelectionsSet);
        workItemBean.setAttribute(fieldID, parameterCode,
                GeneralUtils.createIntegerArrFromCollection(originalSet));
        break;
    case BulkRelations.REMOVE_ITEMS:
        originalSet.removeAll(bulkSelectionsSet);
        workItemBean.setAttribute(fieldID, parameterCode,
                GeneralUtils.createIntegerArrFromCollection(originalSet));
        break;
    default:
        break;
    }
    return null;
}

From source file:com.diversityarrays.dal.db.TestDalDatabase.java

private void checkJsonResult(String where, DalResponseBuilder responseBuilder, String... keysExpected) {

    String response = responseBuilder.asString();

    assertNotEquals(where + ": No record returned", "{}", response);

    try {//from  w ww.ja v  a 2s  .c  o m
        JsonParser parser = new JsonParser(response);
        if (!parser.isMapResult()) {
            fail(where + ": result is not a JsonMap");
        }

        JsonMap jsonMap = parser.getMapResult();
        List<String> responseKeys = jsonMap.getKeysInOrder();

        if (responseKeys.size() != (keysExpected.length + 1)) {
            fail(where + " : expected key count mismatch: " + responseKeys.size() + "<>"
                    + (keysExpected.length + 1) + "\n\tactual= " + StringUtil.join(",", responseKeys)
                    + " \n\texpected= " + DALClient.TAG_RECORD_META + ","
                    + StringUtil.join(",", (Object[]) keysExpected));
        }

        Set<String> keySet;

        keySet = new HashSet<String>(Arrays.asList(keysExpected));
        keySet.add(DALClient.TAG_RECORD_META); // must ALWAYS expect 'RecordMeta'

        boolean checkPagination = keySet.contains(DALClient.TAG_PAGINATION);

        keySet.removeAll(responseKeys);
        if (!keySet.isEmpty()) {
            fail(where + ": missing keys in response: " + StringUtil.join(",", keySet));
        }

        keySet = new HashSet<String>(responseKeys);

        keySet.removeAll(Arrays.asList(keysExpected));
        keySet.remove("RecordMeta");

        if (!keySet.isEmpty()) {
            fail(where + ": unexpected keys in response: " + StringUtil.join(",", keySet));
        }

        if (checkPagination) {
            Object paginationObject = jsonMap.get(DALClient.TAG_PAGINATION);
            assertNotNull("Missing tag '" + DALClient.TAG_PAGINATION + "'", paginationObject);

            if (!(paginationObject instanceof List)) {
                fail(DALClient.TAG_PAGINATION + " is not a List: " + paginationObject.getClass().getName());
            }

            @SuppressWarnings("rawtypes")
            List list = (List) paginationObject;
            assertEquals(DALClient.TAG_PAGINATION + " is not a List of size 1", 1, list.size());

            Object mapObject = list.get(0);
            assertEquals(DALClient.TAG_PAGINATION + "[0] is not a JsonMap", JsonMap.class,
                    mapObject.getClass());

            JsonMap pagination = (JsonMap) mapObject;

            Set<String> paginationKeys = new HashSet<String>(pagination.getKeysInOrder());

            for (String attrName : PAGINATION_ATTRIBUTES) {
                Object attrObject = pagination.get(attrName);

                assertNotNull("Missing attribute " + DALClient.TAG_PAGINATION + "." + attrName, attrObject);

                assertEquals(DALClient.TAG_PAGINATION + "." + attrName + " is not a String", String.class,
                        attrObject.getClass());

                try {
                    Integer.parseInt((String) attrObject);
                } catch (NumberFormatException e) {
                    fail(DALClient.TAG_PAGINATION + "." + attrName + " is not a valid Integer");
                }

                paginationKeys.remove(attrName);
            }

            if (!paginationKeys.isEmpty()) {
                fail("Unexpected keys in " + DALClient.TAG_PAGINATION + ": "
                        + StringUtil.join(",", paginationKeys));
            }
        }
    } catch (ParseException e) {
        fail(where + ": invalid JSON : " + e.getMessage());
    }
}

From source file:edu.umd.cs.buildServer.BuildServer.java

/**
 * Build and run tests on given project submission.
 *
 * @param projectSubmission// w  w w  . j  ava  2  s .  c o  m
 *            the ProjectSubmission
 * @throws CompileFailureException
 *             if the project can't be compiled
 * @throws BuilderException
 * @throws IOException
 */
private <T extends TestProperties> void buildAndTestProject(ProjectSubmission<T> projectSubmission)
        throws CompileFailureException, MissingConfigurationPropertyException, IOException, BuilderException {
    // FIXME Should throw InternalBuildServerException instead of
    // BuilderException
    // Need to differentiate between problems with test-setup and bugs in my
    // servers
    File buildDirectory = getBuildServerConfiguration().getBuildDirectory();

    // Extract test properties and security policy files into build
    // directory
    TestPropertiesExtractor testPropertiesExtractor = null;
    try {
        testPropertiesExtractor = new TestPropertiesExtractor(projectSubmission.getTestSetup());
        testPropertiesExtractor.extract(buildDirectory);
    } catch (ZipExtractorException e) {
        throw new BuilderException(e);
    }

    // We absolutely have to have test.properties
    if (!testPropertiesExtractor.extractedTestProperties())
        throw new BuilderException("Test setup did not contain test.properties");

    T testProperties;
    try {
        // Load test.properties
        File testPropertiesFile = new File(buildDirectory, "test.properties");
        testProperties = (T) TestProperties.load(testPropertiesFile);
    } catch (Exception e) {
        throw new BuilderException(e.getMessage(), e);
    }

    // Set test properties in the ProjectSubmission.
    projectSubmission.setTestProperties(testProperties);

    // validate required files
    Set<String> requiredFiles = testProperties.getRequiredFiles();
    Set<String> providedFiles = projectSubmission.getFilesInSubmission();

    requiredFiles.removeAll(providedFiles);
    if (!requiredFiles.isEmpty()) {
        if (requiredFiles.size() == 1) {
            String missingFile = requiredFiles.iterator().next();
            throw new CompileFailureException("Missing required file " + missingFile, "");
        }

        throw new CompileFailureException("Missing required files", requiredFiles.toString());
    }

    // Create a BuilderAndTesterFactory, based on the language specified
    // in the test properties file
    BuilderAndTesterFactory<T> builderAndTesterFactory = projectSubmission.createBuilderAndTesterFactory();

    if (getDownloadOnly()) {
        log.error("Download only; skipping build and test");
        builderAndTesterFactory.setDownloadOnly();
    }
    builderAndTesterFactory.buildAndTest(buildDirectory, testPropertiesExtractor);
}

From source file:com.alibaba.jstorm.daemon.supervisor.SyncSupervisorEvent.java

private Set<String> getNeedReDownloadTopologys(Map<Integer, LocalAssignment> localAssignment) {
    Set<String> reDownloadTopologys = syncProcesses.getTopologyIdNeedDownload().getAndSet(null);
    if (reDownloadTopologys == null || reDownloadTopologys.size() == 0)
        return null;
    Set<String> needRemoveTopologys = new HashSet<String>();
    Map<Integer, String> portToStartWorkerId = syncProcesses.getPortToWorkerId();
    for (Entry<Integer, LocalAssignment> entry : localAssignment.entrySet()) {
        if (portToStartWorkerId.containsKey(entry.getKey()))
            needRemoveTopologys.add(entry.getValue().getTopologyId());
    }//from  w w w .j av a 2 s  . c  o m
    LOG.debug(
            "worker is starting on these topology, so delay download topology binary: " + needRemoveTopologys);
    reDownloadTopologys.removeAll(needRemoveTopologys);
    if (reDownloadTopologys.size() > 0)
        LOG.info("Following topologys is going to re-download the jars, " + reDownloadTopologys);
    return reDownloadTopologys;
}

From source file:fll.scheduler.TableOptimizer.java

/**
 * Run the table optimizer.//from  w  w w  . ja v a 2 s . c  o m
 * 
 * @param checkCanceled if non-null, checked to see if the optimizer should
 *          exit early
 */
public void optimize(final CheckCanceled checkCanceled) {
    final Set<Integer> optimizedTeams = new HashSet<Integer>();
    final Set<LocalTime> optimizedTimes = new HashSet<>();

    List<ConstraintViolation> teamViolations = pickTeamWithMostViolations(optimizedTeams);
    while ((null != checkCanceled && !checkCanceled.isCanceled()) && !teamViolations.isEmpty()) {
        final int team = teamViolations.get(0).getTeam();
        optimizedTeams.add(team);

        if (LOGGER.isTraceEnabled()) {
            LOGGER.trace("Optimize tables for team: " + team);
        }

        final Set<LocalTime> perfTimes = gatherPerformanceTimes(teamViolations);
        optimize(perfTimes, checkCanceled);

        optimizedTimes.addAll(perfTimes);

        teamViolations = pickTeamWithMostViolations(optimizedTeams);
    } // while team violations

    if (null != checkCanceled && !checkCanceled.isCanceled()) {
        // optimize non-full table times if we haven't already touched them while
        // optimizing teams
        final Set<LocalTime> perfTimes = findNonFullTableTimes();
        perfTimes.removeAll(optimizedTimes);
        if (!perfTimes.isEmpty()) {
            optimize(perfTimes, checkCanceled);
        }
    }

}