Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:org.jasig.portlet.contacts.control.PortletViewController.java

@ModelAttribute("domains")
public Set<ContactDomain> getDomains(PortletPreferences prefs) {
    log.debug("finding Domains to return");
    final List<String> domainActive = Arrays.asList(prefs.getValues("domainsActive", new String[0]));

    String[] defaultOn = prefs.getValues("defaultOn", new String[0]);
    String[] userOn = prefs.getValues("domainOn", new String[0]);
    String[] userOff = prefs.getValues("domainOff", new String[0]);

    Set<String> domains = new HashSet<String>();
    domains.addAll(Arrays.asList(defaultOn));
    domains.addAll(Arrays.asList(userOn));
    domains.removeAll(Arrays.asList(userOff));
    domains.retainAll(domainActive);//from   ww  w .j  av  a2  s.  c  o  m

    Set<ContactDomain> activeDomains = new TreeSet<ContactDomain>(new Comparator<ContactDomain>() {

        @Override
        public int compare(ContactDomain o1, ContactDomain o2) {
            int index1 = domainActive.indexOf(o1.getName());
            int index2 = domainActive.indexOf(o2.getName());
            return index1 - index2;
        }
    });
    for (ContactDomain domain : contactDomains) {
        if (domains.contains(domain.getName())) {
            activeDomains.add(domain);
        }
    }
    log.debug("returning " + activeDomains.size() + "domains");
    return activeDomains;
}

From source file:com.textocat.textokit.eval.GoldStandardBasedEvaluation.java

private void evaluate(CAS goldCas, CAS sysCas) {
    FSIterator<AnnotationFS> goldAnnoIter = annotationExtractor.extract(goldCas);
    Set<AnnotationFS> goldProcessed = new HashSet<AnnotationFS>();
    // system annotations that exactly match a gold one
    Set<AnnotationFS> sysMatched = newHashSet();
    // matches/*  w ww.j  av a 2  s . co m*/
    LinkedHashMap<AnnotationFS, MatchInfo> matchesMap = newLinkedHashMap();
    while (goldAnnoIter.hasNext()) {
        AnnotationFS goldAnno = goldAnnoIter.next();
        if (goldProcessed.contains(goldAnno)) {
            continue;
        }
        MatchInfo mi = new MatchInfo();
        matchesMap.put(goldAnno, mi);

        Set<AnnotationFS> candidates = newLinkedHashSet(matchingStrategy.searchCandidates(goldAnno));

        candidates.removeAll(sysMatched);
        AnnotationFS exactSys = matchingStrategy.searchExactMatch(goldAnno, candidates);
        if (exactSys != null) {
            // sanity check
            assert candidates.contains(exactSys);
            mi.exact = exactSys;
            sysMatched.add(exactSys);
        }
        mi.partialSet.addAll(candidates);

        goldProcessed.add(goldAnno);
    }

    // filter partials that match a next gold
    for (MatchInfo mi : matchesMap.values()) {
        mi.partialSet.removeAll(sysMatched);
    }

    // report for each gold anno
    for (AnnotationFS goldAnno : matchesMap.keySet()) {
        // assert order declared in EvaluationListener javadoc
        MatchInfo mi = matchesMap.get(goldAnno);
        boolean matchedExactly = mi.exact != null;
        if (matchedExactly) {
            evalCtx.reportExactMatch(goldAnno, mi.exact);
        }
        for (AnnotationFS partialSys : mi.partialSet) {
            evalCtx.reportPartialMatch(goldAnno, partialSys);
        }
        if (!matchedExactly) {
            evalCtx.reportMissing(goldAnno);
        }
    }

    // report spurious (false positives)
    FSIterator<AnnotationFS> sysAnnoIter = annotationExtractor.extract(sysCas);
    while (sysAnnoIter.hasNext()) {
        AnnotationFS sysAnno = sysAnnoIter.next();
        if (!sysMatched.contains(sysAnno)) {
            evalCtx.reportSpurious(sysAnno);
        }
    }
}

From source file:org.biopax.validator.rules.ConversionToComplexAssemblyRule.java

public void check(final Validation validation, Conversion thing) {
    //for thread safety (concurrency) we're using a new fetcher here rather than static one 
    Fetcher fetcher = new Fetcher(SimpleEditorMap.L3, new Filter<PropertyEditor>() {
        //complex.component only
        public boolean filter(PropertyEditor editor) {
            return editor.getProperty().equals("component");
        }//from w  ww . j av a  2 s.com
    });
    Set<PhysicalEntity> left = new HashSet<PhysicalEntity>(getPEsRecursively(thing.getLeft(), fetcher)); //need a mutable set
    Set<PhysicalEntity> right = getPEsRecursively(thing.getRight(), fetcher);
    left.removeAll(right);

    int complexDiff = getComplexCount(thing.getLeft()) - getComplexCount(thing.getRight());
    if (left.isEmpty() // when there are no modifications really, but different no. complexes or participants
            && (complexDiff != 0 || thing.getLeft().size() - thing.getRight().size() != 0))
        error(validation, thing, "wrong.conversion.class", false, thing.getModelInterface());

}

From source file:com.qmetry.qaf.automation.util.PropertyUtil.java

public void addAll(Map<String, String> props) {
    Set<String> keys = props.keySet();
    keys.removeAll(System.getProperties().keySet());
    Iterator<String> iterator = keys.iterator();
    while (iterator.hasNext()) {
        String key = iterator.next();
        ConfigurationManager.getBundle().setProperty(key, props.get(key));
    }//from   w ww .  j  a  v a  2  s .  c o m
}

From source file:com.gsoc.ijosa.liquidgalaxycontroller.PW.collection.PwsClient.java

/**
 * Send an HTTP request to the PWS to resolve a set of URLs.
 * @param broadcastUrls The URLs to resolve.
 * @param pwsResultCallback The callback to be run when the response is received.
 *//*from  ww w.java  2  s  .co m*/
public void resolve(final Collection<String> broadcastUrls, final PwsResultCallback pwsResultCallback) {
    // Create the response callback.
    final long startTime = new Date().getTime();
    JsonObjectRequest.RequestCallback requestCallback = new JsonObjectRequest.RequestCallback() {
        private void recordResponse() {
            pwsResultCallback.onResponseReceived(new Date().getTime() - startTime);
        }

        public void onResponse(JSONObject result) {
            recordResponse();

            // Build the metadata from the response.
            JSONArray foundMetadata;
            try {
                foundMetadata = result.getJSONArray("metadata");
            } catch (JSONException e) {
                pwsResultCallback.onPwsResultError(broadcastUrls, 200, e);
                return;
            }

            // Loop through the metadata for each url.
            Set<String> foundUrls = new HashSet<>();
            for (int i = 0; i < foundMetadata.length(); i++) {
                PwsResult pwsResult = null;
                try {
                    JSONObject jsonUrlMetadata = foundMetadata.getJSONObject(i);
                    pwsResult = new PwsResult.Builder(jsonUrlMetadata.getString("id"),
                            jsonUrlMetadata.getString("url")).setTitle(jsonUrlMetadata.optString("title"))
                                    .setDescription(jsonUrlMetadata.optString("description"))
                                    .setIconUrl(jsonUrlMetadata.optString("icon"))
                                    .setGroupId(jsonUrlMetadata.optString("groupId")).build();
                } catch (JSONException e) {
                    continue;
                }
                pwsResultCallback.onPwsResult(pwsResult);
                foundUrls.add(pwsResult.getRequestUrl());
            }

            // See which urls the PWS didn't give us a response for.
            Set<String> missed = new HashSet<>(broadcastUrls);
            missed.removeAll(foundUrls);
            for (String url : missed) {
                pwsResultCallback.onPwsResultAbsent(url);
            }
        }

        public void onError(int responseCode, Exception e) {
            recordResponse();
            pwsResultCallback.onPwsResultError(broadcastUrls, responseCode, e);
        }
    };

    // Create the request.
    String targetUrl = constructPwsUrl(RESOLVE_SCAN_PATH);
    JSONObject payload = new JSONObject();
    try {
        JSONArray urls = new JSONArray();
        for (String url : broadcastUrls) {
            JSONObject obj = new JSONObject();
            obj.put("url", url);
            urls.put(obj);
        }
        payload.put("objects", urls);
    } catch (JSONException e) {
        pwsResultCallback.onPwsResultError(broadcastUrls, 0, e);
        return;
    }
    Request request;
    try {
        request = new JsonObjectRequest(targetUrl, payload, requestCallback);
    } catch (MalformedURLException e) {
        pwsResultCallback.onPwsResultError(broadcastUrls, 0, e);
        return;
    }
    makeRequest(request);
}

From source file:net.projectmonkey.spring.acl.service.SimpleACLService.java

@Override
public Map<ObjectIdentity, Acl> readAclsById(final List<ObjectIdentity> identities, final List<Sid> sids)
        throws NotFoundException {
    Assert.notNull(identities, "At least one Object Identity required");
    Assert.isTrue(identities.size() > 0, "At least one Object Identity required");
    Assert.noNullElements(identities.toArray(new ObjectIdentity[0]),
            "Null object identities are not permitted");

    Map<ObjectIdentity, Acl> result = aclRepository.getAclsById(identities, sids);

    /*//from ww w.  ja v a 2 s .com
     * Check we found an ACL for every requested object. Where ACL's do not
     * exist for some objects throw a suitable exception.
     */
    Set<ObjectIdentity> remainingIdentities = new HashSet<ObjectIdentity>(identities);
    if (result.size() != remainingIdentities.size()) {
        remainingIdentities.removeAll(result.keySet());
        throw new NotFoundException(
                "Unable to find ACL information for object identities '" + remainingIdentities + "'");
    }
    return result;
}

From source file:hudson.plugins.sonar.utils.PathResolverOperator.java

protected Map<String, Set<String>> mergeInternalProperty(Map<String, Set<String>> rawResults, String mergedKey,
        String includeKey, String excludeKey) {
    Map<String, Set<String>> resultMap = Maps.newHashMap(rawResults);
    Set<String> mergedSet = retrieveSet(rawResults, mergedKey);
    mergedSet.addAll(retrieveSet(rawResults, includeKey));
    mergedSet.removeAll(retrieveSet(rawResults, excludeKey));

    resultMap.put(mergedKey, mergedSet);
    resultMap.remove(includeKey);/*from   w w  w.  j  a  v  a2s . c  o m*/
    resultMap.remove(excludeKey);
    return resultMap;
}

From source file:com.github.horrorho.inflatabledonkey.cloud.AssetPool.java

public Map<Asset, Optional<List<ByteString>>> put(StorageHostChunkList container,
        Collection<ByteString> chunkChecksums) {
    synchronized (lock) {
        if (assetChunks == null) {
            throw new IllegalStateException("not authorized");
        }/*from  w w  w. jav a2  s  .  c o  m*/
        Set<ByteString> missing = container.getChunkInfoList().stream().map(ChunkInfo::getChunkChecksum)
                .collect(toSet());
        missing.removeAll(chunkChecksums);
        logger.debug("-- debug() - missing: {}", missing);

        Map<Asset, Optional<List<ByteString>>> map = putElements(chunkChecksums);
        map.putAll(voidElements(missing));
        return map;
    }
}

From source file:ilarkesto.form.MultiItemFormField.java

public Collection<T> getSelectableItems() {
    Set<T> result = new HashSet<T>(selectableItems);
    result.removeAll(value);
    return result;
}

From source file:com.vmware.photon.controller.cloudstore.dcp.monitors.CloudStoreCache.java

/**
 * Given an expand query response and a path, this method will update the cache's
 * view for that particular path. As a side effect of the update, certain events will
 * be triggered when resources are added, removed and modified.
 *
 * @param queryResponse     The response from expanding the factory service path
 * @param path              A path to the factory service
 *//*from w ww.j a v a 2s  . c  o  m*/
private void processQuery(ExpandQueryResponse queryResponse, String path) {
    Map<String, CachedDocument> pathResources = currentData.get(path);
    Set<String> newPaths = new HashSet();
    for (String fullPath : queryResponse.documents.keySet()) {
        newPaths.add(getResourceId(fullPath));
    }

    Class documentType = pathTypes.get(path);

    Set<String> purgePaths = new HashSet(pathResources.keySet());
    purgePaths.removeAll(newPaths);

    for (String uri : queryResponse.documents.keySet()) {
        String id = getResourceId(uri);

        String newJson = (String) queryResponse.documents.get(uri);
        Operation op = new Operation();
        op.setBody(newJson);
        ServiceDocument document = (ServiceDocument) op.getBody(documentType);
        CachedDocument newDocument = new CachedDocument(document, getVersion(newJson));

        CachedDocument currentDocument = pathResources.get(id);
        if (currentDocument == null) {
            // First time seeing this path, add it to the current view and
            // emit a notification
            pathResources.put(id, newDocument);
            onAdd(path, id, newDocument.getDocument());
        } else {
            if (currentDocument.getVersion() < newDocument.getVersion()) {
                // There is a newer version of this path, update
                // the current view
                pathResources.put(id, newDocument);
                onUpdate(path, id, newDocument.getDocument());
            } else if (currentDocument.getVersion() > newDocument.getVersion()) {
                logger.error("Ignoring event, resource {}/{} version decreased from {} to {}", path, id,
                        currentDocument.getVersion(), newDocument.getVersion());
            }
        }
    }

    // Only keep the paths that are in the intersection of
    // current paths and newly seen paths

    for (String oldId : purgePaths) {
        // can delete an existing path from currentData, if we are querying
        // inconsistent replicas between queries
        CachedDocument document = pathResources.get(oldId);
        pathResources.remove(oldId);
        onRemove(path, oldId, document.document);
    }
}