List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:org.wso2.carbon.identity.consent.mgt.services.ConsentUtilityService.java
/** * If the consent is not given for a PII * * @param keySet//from w w w .j a va 2s .co m * @param receipt * @return * @throws ConsentUtilityServiceException */ public Set<String> filterPIIsFromReceipt(Set<String> keySet, ReceiptInput receipt) throws ConsentUtilityServiceException { if (keySet == null || receipt == null) { throw new ConsentUtilityServiceException("Key set and receipt should not be null"); } List<ReceiptServiceInput> services = receipt.getServices(); Set<String> consentedPIIs = new HashSet<>(); for (ReceiptServiceInput service : services) { List<ReceiptPurposeInput> purposes = service.getPurposes(); for (ReceiptPurposeInput consentPurpose : purposes) { List<PIICategoryValidity> piiCategories = consentPurpose.getPiiCategory(); for (PIICategoryValidity piiCategory : piiCategories) { consentedPIIs.add(getPIIName(consentPurpose.getPurposeId(), piiCategory.getId())); } } } keySet.retainAll(consentedPIIs); return keySet; }
From source file:org.structr.core.property.EndNode.java
@Override public SearchAttribute getSearchAttribute(SecurityContext securityContext, BooleanClause.Occur occur, T searchValue, boolean exactMatch, final Query query) { final Predicate<GraphObject> predicate = query != null ? query.toPredicate() : null; final SourceSearchAttribute attr = new SourceSearchAttribute(occur); final Set<GraphObject> intersectionResult = new LinkedHashSet<>(); boolean alreadyAdded = false; if (searchValue != null && !StringUtils.isBlank(searchValue.toString())) { if (exactMatch) { switch (occur) { case MUST: if (!alreadyAdded) { // the first result is the basis of all subsequent intersections intersectionResult.addAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); // the next additions are intersected with this one alreadyAdded = true; } else { intersectionResult.retainAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); }/* w ww .j a v a2 s.com*/ break; case SHOULD: intersectionResult.addAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); break; case MUST_NOT: break; } } else { intersectionResult .addAll(getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); } attr.setResult(intersectionResult); } else { // experimental filter attribute that // removes entities with a non-empty // value in the given field return new EmptySearchAttribute(this, null); } return attr; }
From source file:nl.strohalm.cyclos.services.ads.AdServiceImpl.java
private void setupGroupFilters(final AbstractAdQuery query) { Collection<GroupFilter> groupFilters = query.getGroupFilters(); if (CollectionUtils.isNotEmpty(groupFilters)) { // The full text search cannot handle group filters directly. Groups must be assigned groupFilters = fetchService.fetch(groupFilters, GroupFilter.Relationships.GROUPS); Set<MemberGroup> groups = new HashSet<MemberGroup>(); Set<MemberGroup> xGroups = new HashSet<MemberGroup>(); if (query.getGroups() != null) { groups.addAll(query.getGroups()); }/*from www . j a va 2 s.com*/ for (final GroupFilter groupFilter : groupFilters) { xGroups.addAll(groupFilter.getGroups()); } if (groups.isEmpty()) { groups = xGroups; } else { groups.retainAll(xGroups); } query.setGroupFilters(null); query.setGroups(groups); } }
From source file:org.structr.core.property.StartNode.java
@Override public SearchAttribute getSearchAttribute(SecurityContext securityContext, BooleanClause.Occur occur, S searchValue, boolean exactMatch, final Query query) { final Predicate<GraphObject> predicate = query != null ? query.toPredicate() : null; final SourceSearchAttribute attr = new SourceSearchAttribute(occur); final Set<GraphObject> intersectionResult = new LinkedHashSet<>(); boolean alreadyAdded = false; if (searchValue != null && !StringUtils.isBlank(searchValue.toString())) { if (exactMatch) { switch (occur) { case MUST: if (!alreadyAdded) { // the first result is the basis of all subsequent intersections intersectionResult.addAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); // the next additions are intersected with this one alreadyAdded = true; } else { intersectionResult.retainAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); }/*from w w w.ja v a 2 s .com*/ break; case SHOULD: intersectionResult.addAll( getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); break; case MUST_NOT: break; } } else { intersectionResult .addAll(getRelatedNodesReverse(securityContext, searchValue, declaringClass, predicate)); } attr.setResult(intersectionResult); } else { // experimental filter attribute that // removes entities with a non-empty // value in the given field return new EmptySearchAttribute(this, null); } return attr; }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkLikelihoodFromGTTBL.java
private void computeNodeHeightUpperbound(Network network, Map<NetNode, Double> node2constraints) { Map<NetNode, Set<String>> node2taxa = new HashMap<>(); for (Object o : Networks.postTraversal(network)) { NetNode node = (NetNode) o;//w ww .j av a2 s . c o m Set<String> taxa = new HashSet<>(); double upperBound = Double.POSITIVE_INFINITY; if (node.isLeaf()) { taxa.add(node.getName()); } else if (node.isNetworkNode()) { NetNode childNode = (NetNode) node.getChildren().iterator().next(); if (!childNode.isLeaf()) upperBound = node2constraints.get(childNode); taxa.addAll(node2taxa.get(childNode)); } else { Set<String> intersection = null; List<NetNode> childNodes = null; for (Object childO : node.getChildren()) { NetNode childNode = (NetNode) childO; if (childNodes == null) { childNodes = new ArrayList<>(); } childNodes.add(childNode); if (intersection == null) { intersection = new HashSet<>(); intersection.addAll(node2taxa.get(childNode)); } else { intersection.retainAll(node2taxa.get(childNode)); } taxa.addAll(node2taxa.get(childNode)); } for (int i = 0; i < childNodes.size(); i++) { Set<String> taxa1 = node2taxa.get(childNodes.get(i)); for (int j = i + 1; j < childNodes.size(); j++) { Set<String> taxa2 = node2taxa.get(childNodes.get(j)); for (String taxon1 : taxa1) { if (intersection.contains(taxon1)) continue; for (String taxon2 : taxa2) { if (intersection.contains(taxon2)) continue; upperBound = Math.min(upperBound, _pair2time.get(new UnorderedPair(taxon1, taxon2))); } } } } } if (!node.isLeaf()) { node2constraints.put(node, upperBound); } node2taxa.put(node, taxa); } }
From source file:org.rhq.core.pc.content.ContentManager.java
/** * Handles the results received from the call to the facet to discover content. See * {@link ContentFacet#discoverDeployedPackages(org.rhq.core.domain.content.PackageType)}. * * @param details description of content that was returned from the facet * @param resourceId resource against which the content were found * * @return domain model representation of the details specified * * @throws Exception if there is an error from any subsequent calls made to the facet *//* www . jav a 2s . c om*/ private ContentDiscoveryReport handleDiscoveredContent(Set<ResourcePackageDetails> details, int resourceId) throws Exception { // The plugin should at least return an empty set, but check for null too. if (details == null) { return null; } InventoryManager inventoryManager = PluginContainer.getInstance().getInventoryManager(); ResourceContainer container = inventoryManager.getResourceContainer(resourceId); Set<ResourcePackageDetails> updatedPackageSet = new HashSet<ResourcePackageDetails>(details); Set<ResourcePackageDetails> existingInstalledPackagesSet = container.getInstalledPackages(); if (existingInstalledPackagesSet == null) { existingInstalledPackagesSet = new HashSet<ResourcePackageDetails>(); } // Strip out content that have been removed (i.e. not returned on the latest discovery) int originalPackageCount = existingInstalledPackagesSet.size(); existingInstalledPackagesSet.retainAll(updatedPackageSet); int removedPackagesCount = originalPackageCount - existingInstalledPackagesSet.size(); if (removedPackagesCount > 0) { if (log.isDebugEnabled()) { log.debug("Removed [" + removedPackagesCount + "] obsolete packages for resource id [" + resourceId + "]"); } } // Strip from updated list content that are already known for the resource, we don't need to do anything updatedPackageSet.removeAll(existingInstalledPackagesSet); // Remaining content in updated list are "new" content if (!updatedPackageSet.isEmpty()) { if (log.isDebugEnabled()) { log.debug("Found [" + updatedPackageSet.size() + "] new packages for resource id [" + resourceId + "]"); } } // Add new content (yes, existingInstalledPackagesSet is same as details, but use the container's reference) existingInstalledPackagesSet.addAll(updatedPackageSet); // Add merged (current) list to the resource container container.setInstalledPackages(existingInstalledPackagesSet); // Package and send to server ContentDiscoveryReport report = new ContentDiscoveryReport(); report.addAllDeployedPackages(existingInstalledPackagesSet); report.setResourceId(resourceId); ContentServerService contentServerService = getContentServerService(); if (contentServerService != null) { // if there are 1+ installed packages to report OR there are 0 but there used to be packages installed, // then send up the report to be merged if (!existingInstalledPackagesSet.isEmpty() || originalPackageCount != 0) { if (log.isDebugEnabled()) { log.debug("Merging [" + existingInstalledPackagesSet.size() + "] discovered packages for resource id [" + resourceId + "] with Server"); } contentServerService.mergeDiscoveredPackages(report); } } return report; }
From source file:org.apache.hyracks.algebricks.rewriter.rules.PushSortDownRule.java
@Override public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { ILogicalOperator operator = opRef.getValue(); if (operator.getOperatorTag() != LogicalOperatorTag.ORDER) { return false; }/*from w w w . j a va 2 s . c o m*/ // Gets used variables in the sort operator. OrderOperator orderOperator = (OrderOperator) operator; List<Pair<IOrder, Mutable<ILogicalExpression>>> orderKeys = orderOperator.getOrderExpressions(); Set<LogicalVariable> orderUsedVars = new HashSet<LogicalVariable>(); for (Pair<IOrder, Mutable<ILogicalExpression>> orderKey : orderKeys) { orderKey.second.getValue().getUsedVariables(orderUsedVars); } Mutable<ILogicalOperator> inputOpRef = orderOperator.getInputs().get(0); ILogicalOperator inputOperator = inputOpRef.getValue(); // Only pushes sort through assign: // 1. Blocking operators like sort/group/join cannot be pushed through. // 2. Data reducing operators like select/project should not be pushed through. // 3. Order-destroying operator like unnest/unnest-map cannot be pushed through. if (inputOperator.getOperatorTag() != LogicalOperatorTag.ASSIGN) { return false; } Set<LogicalVariable> inputProducedVars = new HashSet<LogicalVariable>(); VariableUtilities.getProducedVariables(inputOperator, inputProducedVars); // Intersects used variables in the sort and variables produced by inputOperator. orderUsedVars.retainAll(inputProducedVars); if (!orderUsedVars.isEmpty()) { // If the sort uses any variable that is produced by this operator. return false; } // Switches sort and its input operator. opRef.setValue(inputOperator); inputOpRef.setValue(inputOperator.getInputs().get(0).getValue()); inputOperator.getInputs().get(0).setValue(orderOperator); // Re-computes the type environments. context.computeAndSetTypeEnvironmentForOperator(orderOperator); context.computeAndSetTypeEnvironmentForOperator(inputOperator); return true; }
From source file:gov.nih.nci.ispy.service.clinical.ClinicalFileBasedQueryService.java
private Set<String> addToPatientDIDs(Set<String> patientDIDs, Set<String> queryResult) { if (patientDIDs == null) { patientDIDs = new HashSet<String>(); patientDIDs.addAll(queryResult); } else {//w w w .j a v a2 s. c om patientDIDs.retainAll(queryResult); } return patientDIDs; }
From source file:org.etudes.jforum.dao.generic.GenericSearchDAO.java
private void topicsByKeyword(SearchData sd) throws Exception { boolean isLike = "like".equals(SystemGlobals.getValue(ConfigKeys.SEARCH_WORD_MATCHING).trim()); String sql = isLike ? SystemGlobals.getSql("SearchModel.searchByLikeWord") : SystemGlobals.getSql("SearchModel.searchByWord"); PreparedStatement p = JForum.getConnection().prepareStatement(sql); Map eachWordMap = new HashMap(); String context = ToolManager.getCurrentPlacement().getContext(); // Get the post ids to which the words are associated to for (int i = 0; i < sd.getKeywords().length; i++) { if (isLike) { //Mallika's new code beg p.setString(1, context);//from ww w . j a v a2 s . c o m //Mallika's new code end, line below changes from 1 to 2 p.setString(2, "%" + sd.getKeywords()[i].toLowerCase() + "%"); } else { //Mallika's new code beg p.setString(1, context); //Mallika's new code end, line below changes from 1 to 2 p.setString(2, sd.getKeywords()[i].toLowerCase()); } Set postsIds = new HashSet(); ResultSet rs = p.executeQuery(); while (rs.next()) { postsIds.add(new Integer(rs.getInt("post_id"))); } if (postsIds.size() > 0) { eachWordMap.put(sd.getKeywords()[i], postsIds); } } // [wordName] = { each, post, id } // If seach type is OR, then get all words // If it is AND, then we want only the ids common to all words Set postsIds = null; if (sd.getUseAllWords()) { for (Iterator iter = eachWordMap.values().iterator(); iter.hasNext();) { if (postsIds == null) { postsIds = new HashSet(eachWordMap.values().size()); postsIds.addAll((HashSet) iter.next()); } else { postsIds.retainAll((HashSet) iter.next()); } } } else { postsIds = new HashSet(); for (Iterator iter = eachWordMap.values().iterator(); iter.hasNext();) { postsIds.addAll((HashSet) iter.next()); } } if (postsIds == null || postsIds.size() == 0) { return; } // Time to get ready to search for the topics ids StringBuffer sb = new StringBuffer(1024); for (Iterator iter = postsIds.iterator(); iter.hasNext();) { sb.append(iter.next()).append(","); } sb.delete(sb.length() - 1, sb.length()); // Search for the ids, inserting them in the helper table sql = SystemGlobals.getSql("SearchModel.insertTopicsIds"); sql = sql.replaceAll(":posts:", sb.toString()); p = JForum.getConnection().prepareStatement(sql); p.setString(1, SessionFacade.getUserSession().getSessionId()); p.executeUpdate(); // Now that we have the topics ids, it's time to make a copy from the // topics table, to make the search faster ( damn, next version I'll // remove the search functionality. Look for this code's size ) this.selectTopicData(); p.close(); }
From source file:org.ohmage.service.CampaignServices.java
/** * Begins with all of the campaigns that exist in the system and then * removes those that don't match the parameterized criteria. If a * parameter is null, it is ignored. Therefore, if all parameters are null, * then all campaign IDs are returned./*w ww .j a va 2 s .c o m*/ * * @param partialCampaignId Only return campaigns whose ID contains this * value. * * @param partialCampaignName Only return campaigns whose name contains * this value. * * @param partialDescription Only return campaigns whose description * contains this value. * * @param partialXml Only return campaigns whose XML contains this value. * * @param partialAuthoredBy Only return campaigns whose authored by value * contains this value. * * @param startDate Only return campaigns that were created on or after * this date. * * @param endDate Only return campaigns that were created on or before this * date. * * @param privacyState Only return campaigns with this privacy state. * * @param runningState Only return campaigns with this running state. * * @return The set of campaign IDs. * * @throws ServiceException There was an error. */ public Set<String> campaignIdSearch(final String partialCampaignId, final String partialCampaignName, final String partialDescription, final String partialXml, final String partialAuthoredBy, final DateTime startDate, final DateTime endDate, final Campaign.PrivacyState privacyState, final Campaign.RunningState runningState) throws ServiceException { try { Set<String> result = null; if (partialCampaignId != null) { result = new HashSet<String>(campaignQueries.getCampaignsFromPartialId(partialCampaignId)); } if (partialCampaignName != null) { List<String> campaignIds = campaignQueries.getCampaignsFromPartialName(partialCampaignName); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (partialDescription != null) { List<String> campaignIds = campaignQueries.getCampaignsFromPartialDescription(partialDescription); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (partialAuthoredBy != null) { List<String> campaignIds = campaignQueries.getCampaignsFromPartialAuthoredBy(partialAuthoredBy); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (startDate != null) { List<String> campaignIds = campaignQueries.getCampaignsOnOrAfterDate(startDate); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (endDate != null) { List<String> campaignIds = campaignQueries.getCampaignsOnOrBeforeDate(endDate); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (privacyState != null) { List<String> campaignIds = campaignQueries.getCampaignsWithPrivacyState(privacyState); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (runningState != null) { List<String> campaignIds = campaignQueries.getCampaignsWithRunningState(runningState); if (result == null) { result = new HashSet<String>(campaignIds); } else { result.retainAll(campaignIds); } } if (result == null) { result = new HashSet<String>(campaignQueries.getAllCampaignIds()); } return result; } catch (DataAccessException e) { throw new ServiceException(e); } }