Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:org.efaps.db.QueryBuilder.java

/**
 * Add a type to the QueryBuilder. Search for the common parent
 * and use it as baseType./*from   www . j av  a 2s.co m*/
 * @param _type types to be added to the QueryBuilder
 * @throws EFapsException on error
 */
public void addType(final Type... _type) throws EFapsException {
    final List<Type> allType = new ArrayList<>();
    if (this.types.isEmpty()) {
        final Type type = Type.get(this.typeUUID);
        if (type.isAbstract()) {
            final List<Type> childTypes = type.getChildTypes().stream().filter(t -> !t.isAbstract())
                    .collect(Collectors.toList());
            allType.addAll(childTypes);
        } else {
            allType.add(type);
        }
    }
    for (final UUID type : this.types) {
        allType.add(Type.get(type));
    }
    for (final Type type : _type) {
        if (type.isAbstract()) {
            final List<Type> childTypes = type.getChildTypes().stream().filter(t -> !t.isAbstract())
                    .collect(Collectors.toList());
            allType.addAll(childTypes);
        } else {
            allType.add(type);
        }
    }

    //make for every type a list of types up to the parent
    final List<List<Type>> typeLists = new ArrayList<>();
    for (final Type type : allType) {
        final List<Type> typesTmp = new ArrayList<>();
        typeLists.add(typesTmp);
        Type tmpType = type;
        while (tmpType != null) {
            typesTmp.add(tmpType);
            tmpType = tmpType.getParentType();
        }
    }

    final Set<Type> common = new LinkedHashSet<>();
    if (!typeLists.isEmpty()) {
        final Iterator<List<Type>> iterator = typeLists.iterator();
        common.addAll(iterator.next());
        while (iterator.hasNext()) {
            common.retainAll(iterator.next());
        }
    }
    if (common.isEmpty()) {
        throw new EFapsException(QueryBuilder.class, "noCommon", allType);
    } else {
        // first common type
        this.typeUUID = common.iterator().next().getUUID();
        for (final Type type : allType) {
            this.types.add(type.getUUID());
        }
    }
    // special case handling
    if (this.types.size() == 1 && this.types.iterator().next().equals(this.typeUUID)) {
        this.types.clear();
    }
}

From source file:fi.vm.sade.organisaatio.dao.impl.OrganisaatioDAOImpl.java

private List<OrgPerustieto> retrieveParentsAndChildren(List<OrgPerustieto> baseResult, Set<String> oids,
        boolean suunnitellut, boolean lakkautetut) {
    Set<String> procOids = new TreeSet<>();
    procOids.add(ophOid);/*from w w  w .  j a  va  2 s.com*/
    List<OrgPerustieto> ret = new ArrayList<>();

    Set<String> ppoids = new TreeSet<>();

    for (OrgPerustieto opt : baseResult) {
        if (procOids.add(opt.getOid())) {
            ret.add(opt);
            appendChildOrganisations(ret, procOids, opt, oids, suunnitellut, lakkautetut);
        }
        for (String poid : opt.getParentOidPath().split("\\|")) {
            ppoids.add(poid);
        }
    }

    // poista tyhj stringi jos sellainen on
    ppoids.remove("");

    if (!oids.isEmpty()) {
        ppoids.retainAll(oids);
    }

    for (String poid : ppoids) {
        if (procOids.add(poid)) {
            appendParentOrganisation(ret, poid, suunnitellut, lakkautetut);
        }
    }

    return ret;
}

From source file:ubic.pubmedgate.resolve.ResolutionRDFModel.java

public Set<Resource> getTermsFromConcepts(Set<Resource> concepts) {
    Set<Resource> result = new HashSet<Resource>();
    for (Resource r : concepts) {
        result.addAll(JenaUtil.getObjects(model.listStatements(r, null, (RDFNode) null)));
    }/*  www. j av  a2s.  co m*/
    // intersect with all neuroterms
    Set<Resource> terms = getTerms();
    result.retainAll(terms);
    return result;
}

From source file:com.limegroup.gnutella.xml.LimeXMLReplyCollection.java

/**
 * Returns a Set of matching {@link LimeXMLDocument}s for a passed in Set of metadata fields.
 * The query string is broken down into keywords, and only results common
 * to all keywords are returned./*  w  ww  .  j av  a  2s .  c  o m*/
 * <p/>
 * <ol>
 *    <li>Extract keywords from query</li>
 *    <li>For each keyword, search the metadata fields for matches (names of metadata fields are passed in)</li>
 *    <li>Return the matching LimeXMLDocuments common to all keywords</li>
 * </ol>
 * <p/>
 * NOTE: Caller of this method MUST SYNCHRONIZE on {@link #LOCK}
 *
 * @param metadataFields names of metadata fields to search for matches
 * @param query the query string to use for the search
 * @return LimeXMLDocuments
 */
private Set<LimeXMLDocument> getMatchingDocumentsIntersectKeywords(Set<String> metadataFields, String query) {
    Set<LimeXMLDocument> matches = new IdentityHashSet<LimeXMLDocument>();
    Set<String> keywords = QueryUtils.extractKeywords(query, true);

    for (String keyword : keywords) {

        Set<LimeXMLDocument> allMatchedDocsForKeyword = getMatchingDocumentsForMetadata(metadataFields,
                keyword);

        // matches contains all common lime xml docs that match
        // all keywords in the query
        if (matches.size() == 0) {
            matches.addAll(allMatchedDocsForKeyword);
        } else {
            matches.retainAll(allMatchedDocsForKeyword);
        }

        // if no docs in common, there is no chance of a match
        if (matches.size() == 0) {
            return Collections.emptySet();
        }
    }
    return matches;
}

From source file:org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.BaseGraph.java

/**
 * Remove all vertices in the graph that aren't on a path from the reference source vertex to the reference sink vertex
 *
 * More aggressive reference pruning algorithm than removeVerticesNotConnectedToRefRegardlessOfEdgeDirection,
 * as it requires vertices to not only be connected by a series of directed edges but also prunes away
 * paths that do not also meet eventually with the reference sink vertex
 *//*from  w  ww.  j a v  a 2  s.c o m*/
public void removePathsNotConnectedToRef() {
    if (getReferenceSourceVertex() == null || getReferenceSinkVertex() == null) {
        throw new IllegalStateException("Graph must have ref source and sink vertices");
    }

    // get the set of vertices we can reach by going forward from the ref source
    final Set<V> onPathFromRefSource = new HashSet<>(vertexSet().size());
    for (final V v : new BaseGraphIterator<>(this, getReferenceSourceVertex(), false, true)) {
        onPathFromRefSource.add(v);
    }

    // get the set of vertices we can reach by going backward from the ref sink
    final Set<V> onPathFromRefSink = new HashSet<>(vertexSet().size());
    for (final V v : new BaseGraphIterator<>(this, getReferenceSinkVertex(), true, false)) {
        onPathFromRefSink.add(v);
    }

    // we want to remove anything that's not in both the sink and source sets
    final Set<V> verticesToRemove = new HashSet<>(vertexSet());
    onPathFromRefSource.retainAll(onPathFromRefSink);
    verticesToRemove.removeAll(onPathFromRefSource);
    removeAllVertices(verticesToRemove);

    // simple sanity checks that this algorithm is working.
    if (getSinks().size() > 1) {
        throw new IllegalStateException(
                "Should have eliminated all but the reference sink, but found " + getSinks());
    }

    if (getSources().size() > 1) {
        throw new IllegalStateException(
                "Should have eliminated all but the reference source, but found " + getSources());
    }
}

From source file:com.davidsoergel.trees.AbstractRootedPhylogeny.java

/**
 * {@inheritDoc}// w  w w.ja va  2s . co m
 */
public BasicRootedPhylogeny<T> extractIntersectionTree(Collection<T> leafIdsA, Collection<T> leafIdsB,
        NodeNamer<T> namer) throws NoSuchNodeException, TreeException {
    Set<PhylogenyNode<T>> allTreeNodesA = new HashSet<PhylogenyNode<T>>();
    for (T id : leafIdsA) {
        allTreeNodesA.addAll(getNode(id).getAncestorPath());
    }

    Set<PhylogenyNode<T>> allTreeNodesB = new HashSet<PhylogenyNode<T>>();
    for (T id : leafIdsB) {
        allTreeNodesB.addAll(getNode(id).getAncestorPath());
    }

    allTreeNodesA.retainAll(allTreeNodesB);

    // now allTreeNodesA contains all nodes that are in common between the two input leaf sets, including internal nodes

    // remove internal nodes
    for (PhylogenyNode<T> node : new HashSet<PhylogenyNode<T>>(allTreeNodesA)) {
        allTreeNodesA.remove(node.getParent());
    }

    return extractTreeWithLeaves(allTreeNodesA, false, MutualExclusionResolutionMode.EXCEPTION);
}

From source file:org.apache.juddi.query.FindEntityByCombinedCategoryQuery.java

public List<?> select(EntityManager em, FindQualifiers fq, CategoryBag categoryBag, List<?> keysIn,
        DynamicQuery.Parameter... restrictions) {

    // If keysIn is not null and empty, then search is over.
    if ((keysIn != null) && (keysIn.size() == 0))
        return keysIn;

    if (categoryBag == null)
        return keysIn;

    List<KeyedReference> keyRefsInCategories = categoryBag.getKeyedReference();
    if (keyRefsInCategories == null || keyRefsInCategories.size() == 0)
        return keysIn;

    Map<KeyedReference, Set<String>> map = new HashMap<KeyedReference, Set<String>>();
    //1. First match at the top level (i.e. categoryBag on business)
    findEntityByCategoryQuery(map, em, fq, categoryBag, entityField, entityNameChild, keysIn, restrictions);
    //2. Now match at the second level (i.e. categoryBag on services for businesses)
    findEntityByCategoryQuery(map, em, fq, categoryBag, entityField2, entityNameChild2, keysIn, restrictions);
    //3. Now match at the third level (i.e. categoryBag on binding for businesses)
    //   This does only apply to businesses (not for services)
    if (entityNameChild3 != null) {
        findEntityByCategoryQuery(map, em, fq, categoryBag, entityField3, entityNameChild3, keysIn,
                restrictions);/*  w w  w .  j a v a  2  s  . co m*/
    }

    //Now build the results taking into account AND/OR/LIKE
    Set<String> resultingEntityKeys = new HashSet<String>();
    if (fq.isOrAllKeys()) {
        //in this case get ALL businessKeys
        for (KeyedReference keyRef : map.keySet()) {
            resultingEntityKeys.addAll(map.get(keyRef));
        }
    } else if (fq.isOrLikeKeys()) {
        // any keyedReference filters that come from the same namespace (e.g. have the same tModelKey value) 
        // are ORd together rather than ANDd
        // 1. OR if we have keys with similar namespaces (keyValue)
        Map<String, Set<String>> likeMap = new HashMap<String, Set<String>>();
        for (KeyedReference keyRef : map.keySet()) {
            String keyValue = keyRef.getKeyValue();
            if (likeMap.containsKey(keyValue)) {
                likeMap.get(keyValue).addAll(map.get(keyRef));
            } else {
                likeMap.put(keyValue, map.get(keyRef));
            }
        }
        // 2. Now AND the likeMap
        boolean firstTime = true;
        for (String keyValue : likeMap.keySet()) {
            if (firstTime) {
                resultingEntityKeys = map.get(keyValue);
                firstTime = false;
            } else {
                resultingEntityKeys.retainAll(map.get(keyValue));
            }
        }
    } else {
        // AND keys by default, in this case each entity (business or service)
        // needs to have ALL keys
        boolean firstTime = true;
        for (KeyedReference keyRef : map.keySet()) {
            if (firstTime) {
                resultingEntityKeys = map.get(keyRef);
                firstTime = false;
            } else {
                resultingEntityKeys.retainAll(map.get(keyRef));
            }
        }
    }
    return new ArrayList<String>(resultingEntityKeys);
}

From source file:com.glaf.base.modules.sys.springmvc.SysUserController.java

/**
 * //from   w  w  w  . j  ava2s  .  c om
 * 
 * @param request
 * @param modelMap
 * @return
 */
@RequestMapping(params = "method=setRole")
public ModelAndView setRole(HttpServletRequest request, ModelMap modelMap) {
    logger.debug(RequestUtils.getParameterMap(request));
    ViewMessages messages = new ViewMessages();
    long userId = ParamUtil.getIntParameter(request, "user_id", 0);
    SysUser user = sysUserService.findById(userId);// 

    if (user != null) {// 
        long[] id = ParamUtil.getLongParameterValues(request, "id");// ???
        if (id != null) {
            Set<SysDeptRole> delRoles = new HashSet<SysDeptRole>();
            Set<SysDeptRole> oldRoles = user.getRoles();
            Set<SysDeptRole> newRoles = new HashSet<SysDeptRole>();
            for (int i = 0; i < id.length; i++) {
                logger.debug("id[" + i + "]=" + id[i]);
                SysDeptRole role = sysDeptRoleService.findById(id[i]);// 
                if (role != null) {
                    newRoles.add(role);// 
                }
            }

            oldRoles.retainAll(newRoles);// ??
            delRoles.removeAll(newRoles);// ??
            newRoles.removeAll(oldRoles);// ??
            user.setUpdateBy(RequestUtils.getActorId(request));

            if (sysUserService.updateRole(user, delRoles, newRoles)) {// ??
                messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_success"));
            } else {// ?
                messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_failure"));
            }
        }
    }
    MessageUtils.addMessages(request, messages);
    return new ModelAndView("show_msg", modelMap);
}

From source file:org.jactr.modules.pm.aural.audicon.map.KindFeatureMap.java

/**
 * @see org.jactr.modules.pm.common.memory.map.IFeatureMap#getCandidateRealObjects(ChunkTypeRequest, Set)
 *//*from  w  ww .java2s.com*/
public void getCandidateRealObjects(ChunkTypeRequest request, Set<IIdentifier> container) {
    Set<IIdentifier> identifiers = new HashSet<IIdentifier>();

    boolean firstIteration = true;
    for (IConditionalSlot cSlot : request.getConditionalSlots())
        if (cSlot.getName().equalsIgnoreCase(IAuralModule.KIND_SLOT)) {
            Object value = toKindString(cSlot.getValue());
            Collection<IIdentifier> eval = Collections.EMPTY_LIST;
            switch (cSlot.getCondition()) {
            case IConditionalSlot.EQUALS:
                if (value != null)
                    eval = equals(value.toString());
                break;
            case IConditionalSlot.NOT_EQUALS:
                if (value != null)
                    eval = not(cSlot.getValue().toString());
                else
                    eval = all();
                break;
            default:
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(getClass().getSimpleName() + " can only handle equals and not equals");
                break;
            }

            if (eval.size() == 0)
                break;

            if (firstIteration) {
                identifiers.addAll(eval);
                firstIteration = false;
            } else
                identifiers.retainAll(eval);
        }

}

From source file:net.sf.jabref.gui.entryeditor.EntryEditor.java

private void setupFieldPanels() {
    tabbed.removeAll();/*from   w w  w . j av a2s.c o  m*/
    tabs.clear();

    EntryType type = EntryTypes.getTypeOrDefault(entry.getType(),
            this.frame.getCurrentBasePanel().getBibDatabaseContext().getMode());

    // required fields
    List<String> requiredFields = addRequiredTab(type);

    // optional fields
    List<String> displayedOptionalFields = new ArrayList<>();

    if ((type.getOptionalFields() != null) && !type.getOptionalFields().isEmpty()) {
        if (!frame.getCurrentBasePanel().getBibDatabaseContext().isBiblatexMode()) {
            addOptionalTab(type);
        } else {
            displayedOptionalFields.addAll(type.getPrimaryOptionalFields());
            displayedOptionalFields.addAll(type.getSecondaryOptionalFields());

            addOptionalTab(type);

            Set<String> deprecatedFields = new HashSet<>(EntryConverter.FIELD_ALIASES_TEX_TO_LTX.keySet());
            deprecatedFields.add(FieldName.YEAR);
            deprecatedFields.add(FieldName.MONTH);
            List<String> secondaryOptionalFields = type.getSecondaryOptionalFields();
            List<String> optionalFieldsNotPrimaryOrDeprecated = new ArrayList<>(secondaryOptionalFields);
            optionalFieldsNotPrimaryOrDeprecated.removeAll(deprecatedFields);

            // Get list of all optional fields of this entry and their aliases
            Set<String> optionalFieldsAndAliases = new HashSet<>();
            for (String field : type.getOptionalFields()) {
                optionalFieldsAndAliases.add(field);
                if (EntryConverter.FIELD_ALIASES_LTX_TO_TEX.containsKey(field)) {
                    optionalFieldsAndAliases.add(EntryConverter.FIELD_ALIASES_LTX_TO_TEX.get(field));
                }
            }

            // Get all optional fields which are deprecated
            Set<String> usedOptionalFieldsDeprecated = new HashSet<>(deprecatedFields);
            usedOptionalFieldsDeprecated.retainAll(optionalFieldsAndAliases);

            // Add tabs
            EntryEditorTab optPan2 = new EntryEditorTab(frame, panel, optionalFieldsNotPrimaryOrDeprecated,
                    this, false, true, Localization.lang("Optional fields 2"));
            if (optPan2.fileListEditor != null) {
                fileListEditor = optPan2.fileListEditor;
            }
            tabbed.addTab(Localization.lang("Optional fields 2"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(),
                    optPan2.getPane(), Localization.lang("Show optional fields"));
            tabs.add(optPan2);

            if (!usedOptionalFieldsDeprecated.isEmpty()) {
                EntryEditorTab optPan3;
                optPan3 = new EntryEditorTab(frame, panel, new ArrayList<>(usedOptionalFieldsDeprecated), this,
                        false, true, Localization.lang("Deprecated fields"));
                if (optPan3.fileListEditor != null) {
                    fileListEditor = optPan3.fileListEditor;
                }
                tabbed.addTab(Localization.lang("Deprecated fields"),
                        IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), optPan3.getPane(),
                        Localization.lang("Show deprecated BibTeX fields"));
                tabs.add(optPan3);
            }
        }
    }

    // other fields
    List<String> displayedFields = Stream.concat(requiredFields.stream(), displayedOptionalFields.stream())
            .map(String::toLowerCase).collect(Collectors.toList());
    List<String> otherFields = entry.getFieldNames().stream().map(String::toLowerCase)
            .filter(f -> !displayedFields.contains(f)).collect(Collectors.toList());
    otherFields.remove(BibEntry.KEY_FIELD);
    otherFields.removeAll(Globals.prefs.getCustomTabFieldNames());

    if (!otherFields.isEmpty()) {
        addOtherTab(otherFields);
    }

    // general fields from preferences
    addGeneralTabs();
    // source tab
    addSourceTab();
}