Example usage for java.util HashSet removeAll

List of usage examples for java.util HashSet removeAll

Introduction

In this page you can find the example usage for java.util HashSet removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:xc.mst.services.marcaggregation.MarcAggregationService.java

private List<OutputRecord> processBibUpdateActive(InputRecord r, SaxMarcXmlRecord smr, Repository repo) {
    LOG.info("MAS:  processBibUpdateActive: " + r.getId());
    List<OutputRecord> results = new ArrayList<OutputRecord>();
    boolean processedAlready = false;

    // If the match points are the same, then we do not need to worry about the match set changing; just update the record payload
    if (!changedMatchpoints.contains(r.getId())) {
        LOG.info(/* w  ww. j  a  v  a2s . c  om*/
                "MAS:  processBibUpdateActive: matchpoints have NOT changed; going to re-use the current matchset's agg record.");
        OutputRecord oldOutput;
        String xml;

        HashSet<Long> formerMatchSet = getCurrentMatchSetForRecord(r);
        LOG.info("MAS:  processBibUpdateActive formerMatchSet [" + formerMatchSet.size() + "] = "
                + formerMatchSet);

        // Although rare, it's possible that one or more bibs in this set are marked for deletion, but haven't actually been deleted yet.
        // So, let's not include any deleted records in the creation of this new aggregated record (because it will throw an exception later when MAS tries to read a potentially empty record).
        // Although, MAS *should* fix this scenario later on when the deleted record gets processed, let's be pro-active.
        if (formerMatchSet.size() > 0) {
            List<Long> deleteThese = new ArrayList<Long>();
            for (Long num : formerMatchSet) {
                if (repo.getRecord(num).getDeleted()) {
                    LOG.info(
                            "MAS:  processBibUpdateActive: we found a bib that's been marked for deletion in formerMatchSet; we are skipping this record: "
                                    + num);
                    deleteThese.add(num);
                }
            }
            formerMatchSet.removeAll(deleteThese);
        }

        if (formerMatchSet.size() > 0) {
            Long oldOutputId = getBibOutputId(formerMatchSet.iterator().next());
            oldOutput = getRecord(oldOutputId);

            InputRecord record = masRsm.getRecordOfSourceRecord(formerMatchSet, repo, scores);
            xml = mergeBibSet(record, formerMatchSet, repo);

            // inject 001/003 if necessary (based on custom.properties settings)
            xml = injectNew001(oldOutputId, xml);

            oldOutput.setMode(Record.STRING_MODE);
            oldOutput.setFormat(marc21);
            oldOutput.setStatus(Record.ACTIVE);

            // Set the XML to the updated XML - remerged and reconstituted the xml

            // Do NOT create a new record, update, the OLD record!
            // Set the XML to the updated XML - reconstituted the xml
            oldOutput.setOaiXml(xml);

            // we need the clear out the old updatedAt value
            // so that the MST will correctly set it later (when repo is persisted)
            // issue: mst-549
            ((Record) oldOutput).setUpdatedAt(null);

            // Add the updated record
            oldOutput.setType("b");
            results.add(oldOutput);
            processedAlready = true;
        }
    }

    // If the match points change at all, we must re-match/merge all records in the set
    if (!processedAlready) {
        LOG.info(
                "MAS:  processBibUpdateActive: matchpoints HAVE changed; need to re-match/merge, i.e., delete-then-re-add.");

        results = processBibDelete(r);

        // processBibDelete nukes all the record's score data; must re-add it
        addAndPersistScores(r, smr);

        results.addAll(processBibNewActive(r, smr, repo));
    }

    return results;
}

From source file:net.cbtltd.rest.nextpax.A_Handler.java

private void updateInactiveProducts(HashSet<String> productsProceeded) {
    LOG.debug("Starting update for inactive products");
    String partyId = getAltpartyid();
    final SqlSession sqlSession = RazorServer.openSession();
    HashSet<String> activeProducts = new HashSet<String>();
    activeProducts.addAll(sqlSession.getMapper(ProductMapper.class).activeProductAltIdListBySupplier(partyId));
    try {//from   ww w .  j  ava 2  s.c  om
        activeProducts.removeAll(productsProceeded);
        for (String altId : activeProducts) {
            Product product = sqlSession.getMapper(ProductMapper.class).altread(new NameId(partyId, altId));
            product.setState(Product.FINAL);
            sqlSession.getMapper(ProductMapper.class).update(product);
            LOG.debug("Product " + product.getName() + ", " + product.getId()
                    + " inactive. Moving to Final state.");
        }
        LOG.debug("Update for inactive products finished, no errors");
    } catch (Throwable e) {
        LOG.error("Could not finish update for inactive products, reason: " + e.getMessage());
    }
}

From source file:org.nuxeo.ecm.platform.usermanager.UserManagerImpl.java

@Override
public String[] getUsersForPermission(String perm, ACP acp, DocumentModel context) {
    PermissionProvider permissionProvider = Framework.getService(PermissionProvider.class);
    // using a hashset to avoid duplicates
    HashSet<String> usernames = new HashSet<String>();

    ACL merged = acp.getMergedACLs("merged");
    // The list of permission that is has "perm" as its (compound)
    // permission
    ArrayList<ACE> filteredACEbyPerm = new ArrayList<ACE>();

    List<String> currentPermissions = getLeafPermissions(perm);

    for (ACE ace : merged.getACEs()) {
        // Checking if the permission contains the permission we want to
        // check (we use the security service method for coumpound
        // permissions)
        List<String> acePermissions = getLeafPermissions(ace.getPermission());

        // Everything is a special permission (not compound)
        if (SecurityConstants.EVERYTHING.equals(ace.getPermission())) {
            acePermissions = Arrays.asList(permissionProvider.getPermissions());
        }/*w w  w . j  a  v  a 2  s  . c  om*/

        if (acePermissions.containsAll(currentPermissions)) {
            // special case: everybody perm grant false, don't take in
            // account the previous ace
            if (SecurityConstants.EVERYONE.equals(ace.getUsername()) && !ace.isGranted()) {
                break;
            }
            filteredACEbyPerm.add(ace);
        }
    }

    for (ACE ace : filteredACEbyPerm) {
        String aceUsername = ace.getUsername();
        List<String> users = null;
        // If everyone, add/remove all the users
        if (SecurityConstants.EVERYONE.equals(aceUsername)) {
            users = getUserIds();
        }
        // if a group, add/remove all the user from the group (and
        // subgroups)
        if (users == null) {
            NuxeoGroup group;
            group = getGroup(aceUsername, context);
            if (group != null) {
                users = getUsersInGroupAndSubGroups(aceUsername, context);
            }

        }
        // otherwise, add the user
        if (users == null) {
            users = new ArrayList<String>();
            users.add(aceUsername);
        }
        if (ace.isGranted()) {
            usernames.addAll(users);
        } else {
            usernames.removeAll(users);
        }
    }
    return usernames.toArray(new String[usernames.size()]);
}

From source file:org.metaservice.core.OntologyToLatexConverter.java

public String generate(String prefix, String namespace)
        throws RepositoryException, IOException, RDFParseException, SailException {

    RepositoryResult<Statement> result;

    if (namespace == null) {
        result = connection.getStatements(null, VANN.PREFERRED_NAMESPACE_URI, null, true);
        if (result.hasNext()) {
            namespace = result.next().getObject().stringValue();
        } else {//from   w w w . j  ava 2  s .  c  o m
            result = connection.getStatements(null, RDF.TYPE, OWL.ONTOLOGY, true);
            namespace = result.next().getSubject().stringValue();
        }
    }
    if (prefix == null) {
        prefix = connection.getStatements(null, VANN.PREFERRED_NAMESPACE_PREFIX, null, true).next().getObject()
                .stringValue();
    }
    StringBuilder out = new StringBuilder();

    HashSet<URI> classes = new HashSet<>();
    HashSet<URI> properties = new HashSet<>();
    HashSet<URI> objectProperties = new HashSet<>();
    HashSet<URI> dataProperties = new HashSet<>();
    HashSet<URI> annotationProperties = new HashSet<>();
    HashSet<URI> things = new HashSet<>();

    result = connection.getStatements(null, RDF.TYPE, RDFS.CLASS, true);
    result.enableDuplicateFilter();
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            classes.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.OBJECTPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            objectProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.DATATYPEPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            dataProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.ANNOTATIONPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            annotationProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, RDF.PROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            properties.add(subject);
        }
    }
    result = connection.getStatements(null, RDF.TYPE, OWL.THING, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            things.add(subject);
        }
    }

    out.append("package org.metaservice.api.rdf.vocabulary;\n" + "\n" + "import org.openrdf.model.*;\n"
            + "import org.openrdf.model.impl.*;\n\n\n\n");
    out.append("/**\n").append(" * This is an automatically generated class\n")
            .append(" * Generator: " + OntologyToLatexConverter.class.getCanonicalName() + "\n")
            .append(" * @see <a href=\"" + namespace + "\">" + prefix + "</a>\n").append(" */\n")
            .append("public class ").append(prefix.toUpperCase()).append("{\n\n");

    out.append("    public static final String NAMESPACE = \"").append(namespace).append("\";\n\n");
    out.append("    public static final String PREFIX = \"").append(prefix).append("\";\n\n");

    out.append("    public static final Namespace NS = new NamespaceImpl(PREFIX, NAMESPACE);\n\n");

    properties.removeAll(objectProperties);
    properties.removeAll(dataProperties);
    properties.removeAll(annotationProperties);
    things.removeAll(properties);
    things.removeAll(classes);
    things.removeAll(objectProperties);
    things.removeAll(dataProperties);
    things.removeAll(annotationProperties);

    if (classes.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// CLASSES\n");
        out.append("////////////////////////\n\n\n");
        addPrettyClass(classes, out, connection, "_CLASS");
    }
    if (objectProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// OBJECT PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(objectProperties, out, connection, "_PROPERTY");
    }
    if (dataProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// DATA PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(dataProperties, out, connection, "_PROPERTY");
    }
    if (annotationProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// ANNOTATION PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(annotationProperties, out, connection, "_PROPERTY");
    }
    if (properties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(properties, out, connection, "_PROPERTY");
    }
    if (things.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// THINGS\n");
        out.append("////////////////////////\n\n\n");
        addPretty(things, out, connection, "_THING");
    }

    return out.toString();
}

From source file:org.metaservice.core.OntologyToJavaConverter.java

public String generate(String prefix, String namespace)
        throws RepositoryException, IOException, RDFParseException, SailException {

    RepositoryResult<Statement> result;

    if (namespace == null) {
        result = connection.getStatements(null, VANN.PREFERRED_NAMESPACE_URI, null, true);
        if (result.hasNext()) {
            namespace = result.next().getObject().stringValue();
        } else {/*  w w w .  j a  v a 2s .c  o m*/
            result = connection.getStatements(null, RDF.TYPE, OWL.ONTOLOGY, true);
            namespace = result.next().getSubject().stringValue();
        }
    }
    if (prefix == null) {
        prefix = connection.getStatements(null, VANN.PREFERRED_NAMESPACE_PREFIX, null, true).next().getObject()
                .stringValue();
    }
    StringBuilder out = new StringBuilder();

    HashSet<URI> classes = new HashSet<>();
    HashSet<URI> properties = new HashSet<>();
    HashSet<URI> objectProperties = new HashSet<>();
    HashSet<URI> dataProperties = new HashSet<>();
    HashSet<URI> annotationProperties = new HashSet<>();
    HashSet<URI> things = new HashSet<>();

    result = connection.getStatements(null, RDF.TYPE, RDFS.CLASS, true);
    result.enableDuplicateFilter();
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            classes.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.OBJECTPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            objectProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.DATATYPEPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            dataProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, OWL.ANNOTATIONPROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            annotationProperties.add(subject);
        }
    }

    result = connection.getStatements(null, RDF.TYPE, RDF.PROPERTY, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            properties.add(subject);
        }
    }
    result = connection.getStatements(null, RDF.TYPE, OWL.THING, true);
    while (result.hasNext()) {
        Resource resource = result.next().getSubject();
        if (resource instanceof org.openrdf.model.URI && resource.toString().startsWith(namespace)) {
            URI subject = (URI) resource;
            things.add(subject);
        }
    }

    out.append("package org.metaservice.api.rdf.vocabulary;\n" + "\n" + "import org.openrdf.model.*;\n"
            + "import org.openrdf.model.impl.*;\n\n\n\n");
    out.append("/**\n").append(" * This is an automatically generated class\n")
            .append(" * Generator: " + OntologyToJavaConverter.class.getCanonicalName() + "\n")
            .append(" * @see <a href=\"" + namespace + "\">" + prefix + "</a>\n").append(" */\n")
            .append("public class ").append(prefix.toUpperCase()).append("{\n\n");

    out.append("    public static final String NAMESPACE = \"").append(namespace).append("\";\n\n");
    out.append("    public static final String PREFIX = \"").append(prefix).append("\";\n\n");

    out.append("    public static final Namespace NS = new NamespaceImpl(PREFIX, NAMESPACE);\n\n");

    properties.removeAll(objectProperties);
    properties.removeAll(dataProperties);
    properties.removeAll(annotationProperties);
    things.removeAll(properties);
    things.removeAll(classes);
    things.removeAll(objectProperties);
    things.removeAll(dataProperties);
    things.removeAll(annotationProperties);

    if (classes.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// CLASSES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(classes, out, connection, "_CLASS");
    }
    if (objectProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// OBJECT PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(objectProperties, out, connection, "_PROPERTY");
    }
    if (dataProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// DATA PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(dataProperties, out, connection, "_PROPERTY");
    }
    if (annotationProperties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// ANNOTATION PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(annotationProperties, out, connection, "_PROPERTY");
    }
    if (properties.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// PROPERTIES\n");
        out.append("////////////////////////\n\n\n");
        addPretty(properties, out, connection, "_PROPERTY");
    }
    if (things.size() > 0) {
        out.append("////////////////////////\n");
        out.append("// THINGS\n");
        out.append("////////////////////////\n\n\n");
        addPretty(things, out, connection, "_THING");
    }

    out.append("    static{\n");
    out.append("        ValueFactory valueFactory = ValueFactoryImpl.getInstance();\n\n");
    for (URI c : Iterables.concat(classes, objectProperties, dataProperties, annotationProperties, properties,
            things)) {
        out.append("        ").append(nameMap.get(c)).append(" = valueFactory.createURI(NAMESPACE,\"")
                .append(c.getLocalName()).append("\");\n");
    }
    out.append("    }\n");//static
    out.append("}\n");//class

    return out.toString();
}

From source file:org.wso2.carbon.apimgt.hostobjects.APIProviderHostObject.java

/**
 * This method is to functionality of update design API in API-Provider     *
 * @param cx      Rhino context//ww  w  .j  ava2  s .co  m
 * @param thisObj Scriptable object
 * @param args    Passing arguments
 * @param funObj  Function object
 * @return true if the API was added successfully
 * @throws APIManagementException Wrapped exception by org.wso2.carbon.apimgt.api.APIManagementException
 */
public static boolean jsFunction_updateAPIDesign(Context cx, Scriptable thisObj, Object[] args, Function funObj)
        throws APIManagementException, ScriptException, FaultGatewaysException {

    if (args == null || args.length == 0) {
        handleException("Invalid number of input parameters.");
    }

    boolean success = false;

    NativeObject apiData = (NativeObject) args[0];
    String provider = String.valueOf(apiData.get("provider", apiData));
    String name = (String) apiData.get("apiName", apiData);
    String version = (String) apiData.get("version", apiData);
    FileHostObject fileHostObject = (FileHostObject) apiData.get("imageUrl", apiData);
    //        String contextVal = (String) apiData.get("context", apiData);
    String description = (String) apiData.get("description", apiData);

    /* Business Information*/
    String techOwner = (String) apiData.get("techOwner", apiData);
    String techOwnerEmail = (String) apiData.get("techOwnerEmail", apiData);
    String bizOwner = (String) apiData.get("bizOwner", apiData);
    String bizOwnerEmail = (String) apiData.get("bizOwnerEmail", apiData);

    //        String context = contextVal.startsWith("/") ? contextVal : ("/" + contextVal);
    //        String providerDomain = MultitenantUtils.getTenantDomain(provider);

    //TODO: check and remove
    /*  if(!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(providerDomain)) {
    //Create tenant aware context for API
    context= "/t/"+ providerDomain+context;
      }*/

    String tags = (String) apiData.get("tags", apiData);
    Set<String> tag = new HashSet<String>();

    if (tags != null) {
        if (tags.contains(",")) {
            String[] userTag = tags.split(",");
            tag.addAll(Arrays.asList(userTag).subList(0, tags.split(",").length));
        } else {
            tag.add(tags);
        }
    }

    String visibility = (String) apiData.get("visibility", apiData);
    String visibleRoles = "";

    if (visibility != null && visibility.equals(APIConstants.API_RESTRICTED_VISIBILITY)) {
        visibleRoles = (String) apiData.get("visibleRoles", apiData);
    }

    if (provider != null) {
        provider = APIUtil.replaceEmailDomain(provider);
    }
    provider = (provider != null ? provider.trim() : null);
    name = (name != null ? name.trim() : null);
    version = (version != null ? version.trim() : null);
    APIIdentifier apiId = new APIIdentifier(provider, name, version);
    APIProvider apiProvider = getAPIProvider(thisObj);
    API api = null;
    boolean isTenantFlowStarted = false;
    String tenantDomain;
    try {
        tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(provider));
        if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
            isTenantFlowStarted = true;
            PrivilegedCarbonContext.startTenantFlow();
            PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
        }
        api = apiProvider.getAPI(apiId);
        boolean isValid = apiProvider.isAPIUpdateValid(api);
        if (!isValid) {
            throw new APIManagementException(" User doesn't have permission for update");
        }
    } finally {
        if (isTenantFlowStarted) {
            PrivilegedCarbonContext.endTenantFlow();
        }
    }
    if (apiData.containsKey("wsdl")) {
        String wsdl = (String) apiData.get("wsdl", apiData);
        if (StringUtils.isNotEmpty(wsdl)) {
            api.setWsdlUrl(wsdl);
        }
    }

    if (apiData.get("swagger", apiData) != null) {
        // Read URI Templates from swagger resource and set it to api object
        Set<URITemplate> uriTemplates = definitionFromSwagger20.getURITemplates(api,
                (String) apiData.get("swagger", apiData));
        api.setUriTemplates(uriTemplates);

        // Save the swagger definition in the registry
        apiProvider.saveSwagger20Definition(api.getId(), (String) apiData.get("swagger", apiData));
    }

    api.setDescription(StringEscapeUtils.escapeHtml(description));
    HashSet<String> deletedTags = new HashSet<String>(api.getTags());
    deletedTags.removeAll(tag);
    api.removeTags(deletedTags);
    api.addTags(tag);
    api.setBusinessOwner(bizOwner);
    api.setBusinessOwnerEmail(bizOwnerEmail);
    api.setTechnicalOwner(techOwner);
    api.setTechnicalOwnerEmail(techOwnerEmail);
    api.setVisibility(visibility);
    api.setVisibleRoles(visibleRoles != null ? visibleRoles.trim() : null);
    api.setLastUpdated(new Date());

    return saveAPI(apiProvider, api, fileHostObject, false);
}

From source file:ca.uhn.fhir.jpa.dao.SearchBuilder.java

/**
 * THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
 * //from  w  ww .j  a va2s . com
 * @param theLastUpdated
 */
public static HashSet<Long> loadReverseIncludes(FhirContext theContext, EntityManager theEntityManager,
        Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
        DateRangeParam theLastUpdated) {
    if (theMatches.size() == 0) {
        return new HashSet<Long>();
    }
    if (theRevIncludes == null || theRevIncludes.isEmpty()) {
        return new HashSet<Long>();
    }
    String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";

    Collection<Long> nextRoundMatches = theMatches;
    HashSet<Long> allAdded = new HashSet<Long>();
    HashSet<Long> original = new HashSet<Long>(theMatches);
    ArrayList<Include> includes = new ArrayList<Include>(theRevIncludes);

    int roundCounts = 0;
    StopWatch w = new StopWatch();

    boolean addedSomeThisRound;
    do {
        roundCounts++;

        HashSet<Long> pidsToInclude = new HashSet<Long>();
        Set<Long> nextRoundOmit = new HashSet<Long>();

        for (Iterator<Include> iter = includes.iterator(); iter.hasNext();) {
            Include nextInclude = iter.next();
            if (nextInclude.isRecurse() == false) {
                iter.remove();
            }

            boolean matchAll = "*".equals(nextInclude.getValue());
            if (matchAll) {
                String sql;
                sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids)";
                TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
                q.setParameter("target_pids", nextRoundMatches);
                List<ResourceLink> results = q.getResultList();
                for (ResourceLink resourceLink : results) {
                    if (theReverseMode) {
                        // if (theEverythingModeEnum.isEncounter()) {
                        // if (resourceLink.getSourcePath().equals("Encounter.subject") ||
                        // resourceLink.getSourcePath().equals("Encounter.patient")) {
                        // nextRoundOmit.add(resourceLink.getSourceResourcePid());
                        // }
                        // }
                        pidsToInclude.add(resourceLink.getSourceResourcePid());
                    } else {
                        pidsToInclude.add(resourceLink.getTargetResourcePid());
                    }
                }
            } else {

                List<String> paths;
                RuntimeSearchParam param = null;
                if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU1) {
                    paths = Collections.singletonList(nextInclude.getValue());
                } else {
                    String resType = nextInclude.getParamType();
                    if (isBlank(resType)) {
                        continue;
                    }
                    RuntimeResourceDefinition def = theContext.getResourceDefinition(resType);
                    if (def == null) {
                        ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue());
                        continue;
                    }

                    String paramName = nextInclude.getParamName();
                    param = isNotBlank(paramName) ? def.getSearchParam(paramName) : null;
                    if (param == null) {
                        ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue());
                        continue;
                    }

                    paths = param.getPathsSplit();
                }

                String targetResourceType = defaultString(nextInclude.getParamTargetType(), null);
                for (String nextPath : paths) {
                    String sql;
                    boolean haveTargetTypesDefinedByParam = param != null && param.getTargets() != null
                            && param.getTargets().isEmpty() == false;
                    if (targetResourceType != null) {
                        sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r."
                                + searchFieldName
                                + " IN (:target_pids) AND r.myTargetResourceType = :target_resource_type";
                    } else if (haveTargetTypesDefinedByParam) {
                        sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r."
                                + searchFieldName
                                + " IN (:target_pids) AND r.myTargetResourceType in (:target_resource_types)";
                    } else {
                        sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r."
                                + searchFieldName + " IN (:target_pids)";
                    }
                    TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
                    q.setParameter("src_path", nextPath);
                    q.setParameter("target_pids", nextRoundMatches);
                    if (targetResourceType != null) {
                        q.setParameter("target_resource_type", targetResourceType);
                    } else if (haveTargetTypesDefinedByParam) {
                        q.setParameter("target_resource_types", param.getTargets());
                    }
                    List<ResourceLink> results = q.getResultList();
                    for (ResourceLink resourceLink : results) {
                        if (theReverseMode) {
                            Long pid = resourceLink.getSourceResourcePid();
                            pidsToInclude.add(pid);
                        } else {
                            Long pid = resourceLink.getTargetResourcePid();
                            pidsToInclude.add(pid);
                        }
                    }
                }
            }
        }

        if (theLastUpdated != null && (theLastUpdated.getLowerBoundAsInstant() != null
                || theLastUpdated.getUpperBoundAsInstant() != null)) {
            pidsToInclude = new HashSet<Long>(
                    filterResourceIdsByLastUpdated(theEntityManager, theLastUpdated, pidsToInclude));
        }
        for (Long next : pidsToInclude) {
            if (original.contains(next) == false && allAdded.contains(next) == false) {
                theMatches.add(next);
            }
        }

        pidsToInclude.removeAll(nextRoundOmit);

        addedSomeThisRound = allAdded.addAll(pidsToInclude);
        nextRoundMatches = pidsToInclude;
    } while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);

    ourLog.info("Loaded {} {} in {} rounds and {} ms", new Object[] { allAdded.size(),
            theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart() });

    return allAdded;
}