List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:edu.stanford.muse.index.IndexUtils.java
/** version that stores actual dates instead of just counts for each facet */ public static Map<String, Collection<DetailedFacetItem>> computeDetailedFacets(Collection<Document> docs, Archive archive) {/* ww w . ja v a 2 s.c o m*/ AddressBook addressBook = archive.addressBook; GroupAssigner groupAssigner = archive.groupAssigner; Map<String, Collection<DetailedFacetItem>> facetMap = new LinkedHashMap<String, Collection<DetailedFacetItem>>(); // Note: order is important here -- the facets will be displayed in the order they are inserted in facetMap // current order: sentiments, groups, people, direction, folders /* disabling sentiment facets if (indexer != null) { List<DetailedFacetItem> sentimentItems = new ArrayList<DetailedFacetItem>(); Set<Document> docSet = new LinkedHashSet<Document>(docs); // rather brute-force, compute docs for all sentiments and then intersect... // a better way might be to process the selected messages and see which sentiments they reflect Map<String, String> captionToQueryMap; if (lexicon != null && !ModeConfig.isPublicMode()) captionToQueryMap = lexicon.getCaptionToQueryMap(docs); else captionToQueryMap = new LinkedHashMap<>(); for (String sentiment : captionToQueryMap.keySet()) { String query = captionToQueryMap.get(sentiment); Indexer.QueryOptions options = new Indexer.QueryOptions(); //options.setQueryType(Indexer.QueryType.ORIGINAL); options.setSortBy(Indexer.SortBy.RELEVANCE); // to avoid unnecessary sorting Collection<Document> docsForTerm = indexer.docsForQuery(query, options); docsForTerm.retainAll(docSet); sentimentItems.add(new DetailedFacetItem(sentiment, captionToQueryMap.get(sentiment), new ArrayList<Document>(docsForTerm), "sentiment", sentiment)); } facetMap.put("sentiments", sentimentItems); } */ Set<Document> docSet = new LinkedHashSet<Document>(docs); Map<String, Set<Document>> tagToDocs = new LinkedHashMap<String, Set<Document>>(); for (Document d : docs) { if (!Util.nullOrEmpty(d.comment)) { String tag = d.comment.toLowerCase(); Set<Document> set = tagToDocs.get(tag); if (set == null) { set = new LinkedHashSet<Document>(); tagToDocs.put(tag, set); } set.add(d); } } if (addressBook != null) { // groups if (!ModeConfig.isPublicMode() && groupAssigner != null) { Map<SimilarGroup<String>, DetailedFacetItem> groupMap = partitionDocsByGroup(docs, groupAssigner); facetMap.put("groups", groupMap.values()); } // people Map<Contact, DetailedFacetItem> peopleMap = partitionDocsByPerson(docs, addressBook); facetMap.put("correspondent", peopleMap.values()); // direction Map<String, DetailedFacetItem> directionMap = partitionDocsByDirection(docs, addressBook); if (directionMap.size() > 1) facetMap.put("direction", directionMap.values()); // flags -- provide them only if they have at least 2 types in these docs. if all docs have the same value for a particular flag, no point showing it. Map<String, DetailedFacetItem> doNotTransferMap = partitionDocsByDoNotTransfer(docs); if (doNotTransferMap.size() > 1) facetMap.put("transfer", doNotTransferMap.values()); Map<String, DetailedFacetItem> transferWithRestrictionsMap = partitionDocsByTransferWithRestrictions( docs); if (transferWithRestrictionsMap.size() > 1) facetMap.put("restrictions", transferWithRestrictionsMap.values()); Map<String, DetailedFacetItem> reviewedMap = partitionDocsByReviewed(docs); if (reviewedMap.size() > 1) facetMap.put("reviewed", reviewedMap.values()); List<DetailedFacetItem> tagItems = new ArrayList<DetailedFacetItem>(); Set<Document> unannotatedDocs = new LinkedHashSet<Document>(docSet); for (String tag : tagToDocs.keySet()) { Set<Document> docsForTag = tagToDocs.get(tag); docsForTag.retainAll(docSet); unannotatedDocs.removeAll(docsForTag); tagItems.add(new DetailedFacetItem(tag, tag, new HashSet<Document>(docsForTag), "annotation", tag)); } if (unannotatedDocs.size() > 0) tagItems.add(new DetailedFacetItem("none", "none", new HashSet<Document>(unannotatedDocs), "annotation", "" /* empty value for annotation */)); if (tagItems.size() > 1) facetMap.put("annotations", tagItems); // attachments if (!ModeConfig.isPublicMode()) { Map<String, DetailedFacetItem> attachmentTypesMap = partitionDocsByAttachmentType(docs); facetMap.put("attachment type", attachmentTypesMap.values()); } } if (!ModeConfig.isPublicMode()) { Map<String, DetailedFacetItem> folderNameMap = partitionDocsByFolder(docs); if (folderNameMap.size() > 1) facetMap.put("folders", folderNameMap.values()); } // sort so that in each topic, the heaviest facets are first for (String s : facetMap.keySet()) { Collection<DetailedFacetItem> detailedFacets = facetMap.get(s); List<DetailedFacetItem> list = new ArrayList<DetailedFacetItem>(detailedFacets); Collections.sort(list); facetMap.put(s, list); } return facetMap; }
From source file:org.apache.falcon.resource.proxy.SchedulableEntityManagerProxy.java
@POST @Path("update/{type}/{entity}") @Produces({ MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON }) @Monitored(event = "update") @Override/* w w w .ja v a 2 s .c om*/ public APIResult update(@Context HttpServletRequest request, @Dimension("entityType") @PathParam("type") final String type, @Dimension("entityName") @PathParam("entity") final String entityName, @Dimension("colo") @QueryParam("colo") String ignore, @QueryParam("skipDryRun") final Boolean skipDryRun) { final HttpServletRequest bufferedRequest = new BufferedRequest(request); final Set<String> oldColos = getApplicableColos(type, entityName); final Set<String> newColos = getApplicableColos(type, getEntity(bufferedRequest, type)); final Set<String> mergedColos = new HashSet<String>(); mergedColos.addAll(oldColos); mergedColos.retainAll(newColos); //Common colos where update should be called newColos.removeAll(oldColos); //New colos where submit should be called oldColos.removeAll(mergedColos); //Old colos where delete should be called Map<String, APIResult> results = new HashMap<String, APIResult>(); boolean result = true; if (!oldColos.isEmpty()) { results.put(FALCON_TAG + "/delete", new EntityProxy(type, entityName) { @Override protected Set<String> getColosToApply() { return oldColos; } @Override protected APIResult doExecute(String colo) throws FalconException { return getConfigSyncChannel(colo).invoke("delete", bufferedRequest, type, entityName, colo); } }.execute()); } if (!mergedColos.isEmpty()) { results.put(FALCON_TAG + "/update", new EntityProxy(type, entityName) { @Override protected Set<String> getColosToApply() { return mergedColos; } @Override protected APIResult doExecute(String colo) throws FalconException { return getConfigSyncChannel(colo).invoke("update", bufferedRequest, type, entityName, colo, skipDryRun); } }.execute()); } if (!newColos.isEmpty()) { results.put(FALCON_TAG + "/submit", new EntityProxy(type, entityName) { @Override protected Set<String> getColosToApply() { return newColos; } @Override protected APIResult doExecute(String colo) throws FalconException { return getConfigSyncChannel(colo).invoke("submit", bufferedRequest, type, colo); } }.execute()); } for (APIResult apiResult : results.values()) { if (apiResult.getStatus() != APIResult.Status.SUCCEEDED) { result = false; } } // update only if all are updated if (!embeddedMode && result) { results.put(PRISM_TAG, super.update(bufferedRequest, type, entityName, currentColo, skipDryRun)); } return consolidateResult(results, APIResult.class); }
From source file:org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.ProducedVariableVisitor.java
@Override public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException { Set<LogicalVariable> producedVars = new HashSet<>(); Set<LogicalVariable> liveVars = new HashSet<>(); for (ILogicalPlan p : op.getNestedPlans()) { for (Mutable<ILogicalOperator> r : p.getRoots()) { VariableUtilities.getProducedVariablesInDescendantsAndSelf(r.getValue(), producedVars); VariableUtilities.getSubplanLocalLiveVariables(r.getValue(), liveVars); }//from www .j a v a2s . c o m } producedVars.retainAll(liveVars); producedVariables.addAll(producedVars); return null; }
From source file:com.bluexml.side.Integration.alfresco.sql.synchronization.nodeService.NodeServiceImpl.java
private void createCore(NodeRef nodeRef) { QName nodeType = nodeService.getType(nodeRef); String type_name = nodeType.getLocalName(); List<String> sqlQueries = new ArrayList<String>(); List<QName> parentNames = nodeHelper.getParentAndSelfQNames(nodeRef); Map<QName, Serializable> nodeProperties = nodeService.getProperties(nodeRef); for (QName type_qname : parentNames) { type_name = type_qname.getLocalName(); String simplified_type_name = databaseDictionary.resolveClassAsTableName(type_name); Map<String, String> properties = new LinkedHashMap<String, String>(); // We will only process the properties which are related // to the current type TypeDefinition currentTypeDefinition = dictionaryService.getType(type_qname); Map<QName, PropertyDefinition> currentTypeProperties = new HashMap<QName, PropertyDefinition>(); currentTypeProperties.putAll(currentTypeDefinition.getProperties()); for (AspectDefinition ad : currentTypeDefinition.getDefaultAspects()) { currentTypeProperties.putAll(ad.getProperties()); }//from w w w . jav a2s.c o m Set<QName> iterablePropertiesKeySet = new HashSet<QName>(nodeProperties.keySet()); Set<QName> currentTypePropertiesKeySet = currentTypeProperties.keySet(); iterablePropertiesKeySet.retainAll(currentTypePropertiesKeySet); for (QName key : iterablePropertiesKeySet) { if (filterer.acceptPropertyQName(key)) { PropertyDefinition propertyDefinition = dictionaryService.getProperty(key); String value = getSQLFormatFromSerializable(nodeProperties.get(key), propertyDefinition); String originalName = key.getLocalName(); String resolvedColumnName = databaseDictionary.resolveAttributeAsColumnName(originalName, type_name); properties.put((resolvedColumnName != null ? resolvedColumnName : originalName), value); } } String ids = StringUtils.join(properties.keySet().iterator(), " , "); String values = StringUtils.join(properties.values().iterator(), " , "); if (!properties.isEmpty()) { String sql_query = String.format("INSERT INTO %1$s ( %2$s ) VALUES ( %3$s )", simplified_type_name, ids, values); sqlQueries.add(sql_query); } else { logger.error("You must accept at least the node id in the definition of the node filterer"); } } executeSQLQuery(sqlQueries); }
From source file:pt.ua.scaleus.api.API.java
public Dataset getDataset(String name) { Dataset dataset = null;/*from w ww.jav a 2s. c o m*/ if (datasets.containsKey(name)) { dataset = datasets.get(name); } else { log.debug("Loading... " + name); Dataset dataset_aux = TDBFactory.createDataset(directory + name); Model dataset_model = ModelFactory.createOntologyModel(); dataset_aux.begin(ReadWrite.READ); try { dataset_model = dataset_aux.getDefaultModel(); } finally { dataset_aux.end(); } // Define the index mapping EntityDefinition entDef = new EntityDefinition("uri", "text"); entDef.setPrimaryPredicate(RDFS.label.asNode()); entDef.setUidField("uid");//synchronized within the index entDef.setLangField("lang"); //Disable properties from index used for literals and resources at same time ExtendedIterator<Statement> op = dataset_model.listStatements(); Set<Property> literal_props = new HashSet<>(); Set<Property> other_props = new HashSet<>(); while (op.hasNext()) { Statement stat = op.next(); if (stat.getObject().isLiteral()) { literal_props.add(stat.getPredicate()); } else { other_props.add(stat.getPredicate()); } } Set<Property> intersection = new HashSet<>(literal_props); intersection.retainAll(other_props); literal_props.removeAll(intersection); for (Property prop : literal_props) { entDef.setPrimaryPredicate(prop); } // Lucene, in memory. // Join together into a dataset TextIndexConfig config = new TextIndexConfig(entDef); config.setValueStored(true);//save literals File indexFile = new File(index + name); boolean indexIsCreated = indexFile.exists(); try { Directory dir = new SimpleFSDirectory(indexFile); dataset = TextDatasetFactory.createLucene(dataset_aux, dir, config); } catch (IOException ex) { log.error("Index disabled in " + name); dataset = dataset_aux; //Lucene is disable here } dataset.begin(ReadWrite.WRITE); try { Model model = dataset.getDefaultModel(); if (!indexIsCreated) { log.debug("Indexing... " + name); model.removeAll(); model.add(dataset_model); } else { model.setNsPrefixes(dataset_model.getNsPrefixMap()); } dataset.commit(); } finally { dataset.end(); } datasets.put(name, dataset); } return dataset; }
From source file:com.amalto.core.storage.inmemory.InMemoryJoinResults.java
public CloseableIterator<DataRecord> process(Storage storage, InMemoryJoinNode node) { // Additional logging if (LOGGER.isTraceEnabled()) { InMemoryJoinResults.trace(node); }/*from ww w. ja va 2 s .com*/ if (LOGGER.isDebugEnabled()) { InMemoryJoinResults.debug(node); } // Evaluate direct children Set<Object> childIds = new HashSet<Object>(); for (InMemoryJoinNode child : node.children.keySet()) { Set<Object> childrenIds = InMemoryJoinResults._evaluateConditions(storage, child); switch (node.merge) { case UNION: case NONE: childIds.addAll(childrenIds); break; case INTERSECTION: childIds.retainAll(childrenIds); break; default: throw new IllegalArgumentException("Not supported: " + child.merge); } } // Compute DataRecord results if (childIds.isEmpty()) { return EmptyIterator.INSTANCE; } else { FieldMetadata field = node.type.getKeyFields().iterator().next(); // TODO Support compound keys. Condition condition = InMemoryJoinResults.buildConditionFromValues(node.expression.getCondition(), field, childIds); UserQueryBuilder qb = from(node.type).where(condition); for (TypedExpression typedExpression : node.expression.getSelectedFields()) { TypedExpression mappedExpression = typedExpression.accept(new VisitorAdapter<TypedExpression>() { @Override public TypedExpression visit(Alias alias) { return new Alias(alias.getTypedExpression().accept(this), alias.getAliasName()); } @Override public TypedExpression visit(Field field) { FieldMetadata fieldMetadata = field.getFieldMetadata(); TypeMapping typeMapping = mappings .getMappingFromDatabase(fieldMetadata.getContainingType()); FieldMetadata user = typeMapping.getUser(fieldMetadata); return new Field(user); } }); qb.select(mappedExpression); } return (CloseableIterator<DataRecord>) storage.fetch(qb.getSelect()).iterator(); } }
From source file:com.intel.podm.allocation.validation.ComputerSystemCollector.java
private Set<ComputerSystem> getCommonComputerSystems(List<Set<ComputerSystem>> allComputerSystems) throws RequestValidationException { Set<ComputerSystem> commonComputerSystems = newHashSet(); Violations violations = new Violations(); for (Set<ComputerSystem> computerSystems : allComputerSystems) { if (isEmpty(computerSystems)) { throw new RequestValidationException(violations .addViolation("Allocation of assets on chassis without computer system is not supported.")); }/* www. j a v a2s. c om*/ if (isEmpty(commonComputerSystems)) { commonComputerSystems.addAll(computerSystems); } else { commonComputerSystems.retainAll(computerSystems); if (isEmpty(commonComputerSystems)) { throw new RequestValidationException( violations.addViolation("Allocation of assets on multiple chassis is not supported.")); } } } return commonComputerSystems; }
From source file:opennlp.tools.fca.BasicLevelMetrics.java
public double[] simJ_SMC(ArrayList<Integer> intent1, ArrayList<Integer> intent2) { double simJ = 0; double simSMC = 0; Set<Integer> intersection = new HashSet<Integer>(); intersection.addAll(intent1);/*from w ww .j a va2 s . c o m*/ intersection.retainAll(intent2); Set<Integer> union = new HashSet<Integer>(); union.addAll(intent1); union.addAll(intent2); int fn = 0; Set<Integer> unionOut = new HashSet<Integer>(); unionOut.addAll(this.attributes); unionOut.removeAll(union); simSMC = (this.attributes.size() > 0) ? 1. * (intersection.size() + unionOut.size()) / this.attributes.size() : 0; simJ = (union.size() > 0) ? 1. * intersection.size() / union.size() : 0; return new double[] { simJ, simSMC }; }
From source file:org.sakaiproject.lessonbuildertool.service.LessonsGradeInfoProvider.java
public Map<String, List<String>> getAllExternalAssignments(String gradebookUid, Collection<String> studentIds) { //System.out.println("isassignmentgrouped lesson-builder:1788 " + isAssignmentGrouped("lesson-builder:1788")); //System.out.println("isassignmentgrouped lesson-builder:comment:7289 " + isAssignmentGrouped("lesson-builder:comment:7289")); //System.out.println("isUserInPath " + isUserInPath("c08d3ac9-c717-472a-ad91-7ce0b434f42f", lessonsAccess.getPagePaths(1788L,false),"60c04ab8-40e5-4eb6-9f8d-7006ed023109")); //System.out.println("isUserInPath " + isUserInPath("c08d3ac9-c717-472a-ad91-7ce0b434f42f", lessonsAccess.getItemPaths(7289L),"60c04ab8-40e5-4eb6-9f8d-7006ed023109")); //HashSet<String> userset = new HashSet<String>(); //userset.add("c08d3ac9-c717-472a-ad91-7ce0b434f42f"); //userset.add("9d1a25ba-4735-48c4-bd5e-ff329f9f7749"); //System.out.println("usersInPath " + usersInPath(userset, lessonsAccess.getPagePaths(1788L,false),"60c04ab8-40e5-4eb6-9f8d-7006ed023109")); //System.out.println("userInPath " + usersInPath(userset, lessonsAccess.getItemPaths(7289L),"60c04ab8-40e5-4eb6-9f8d-7006ed023109")); //System.out.println(isAssignmentVisible("lesson-builder:1788", "c08d3ac9-c717-472a-ad91-7ce0b434f42f")); //System.out.println(isAssignmentVisible("lesson-builder:comment:7289", "c08d3ac9-c717-472a-ad91-7ce0b434f42f")); //System.out.println(getExternalAssignmentsForCurrentUser("60c04ab8-40e5-4eb6-9f8d-7006ed023109")); Map<String, List<String>> allExternals = new HashMap<String, List<String>>(); String ref = "/site/" + gradebookUid; List<User> allowedUsers = securityService.unlockUsers(SimplePage.PERMISSION_LESSONBUILDER_READ, ref); if (allowedUsers.size() < 1) return allExternals; // no users allowed, nothing to do List<String> allowedIds = new ArrayList<String>(); for (User user : allowedUsers) allowedIds.add(user.getId());/*from w w w. ja v a 2 s . co m*/ // remove any user without lesson builder read in the site studentIds.retainAll(allowedIds); for (String studentId : studentIds) { allExternals.put(studentId, new ArrayList<String>()); } List<SimplePageItem> externalItems = dao.findGradebookItems(gradebookUid); for (SimplePageItem item : externalItems) { Set<Path> paths = lessonsAccess.getItemPaths(item.getId()); Set<String> users = usersInPath(studentIds, paths, gradebookUid); // add this assignment to all users that are in the groups for (String userId : users) if (allExternals.containsKey(userId)) { if (item.getGradebookId() != null) allExternals.get(userId).add(item.getGradebookId()); if (item.getAltGradebook() != null) allExternals.get(userId).add(item.getAltGradebook()); } } List<SimplePage> externalPages = dao.findGradebookPages(gradebookUid); for (SimplePage page : externalPages) { Set<Path> paths = lessonsAccess.getPagePaths(page.getPageId(), false); Set<String> users = usersInPath(studentIds, paths, gradebookUid); // add this assignment to all users that are in the groups for (String userId : users) if (allExternals.containsKey(userId)) { if (page.getGradebookPoints() != null) allExternals.get(userId).add("lesson-builder:" + page.getPageId()); } } // now handle other tools. If we modified their groups, we need to find the original groups // and return the users that match those groups // find list of items we've modified // map of external ID to list of original groups for that item Map<String, ArrayList<String>> otherTools = getExternalAssigns(gradebookUid); // for each externalId for (Map.Entry<String, ArrayList<String>> entry : otherTools.entrySet()) { String externalId = entry.getKey(); // if no group restriction if (entry.getValue() == null) { // add this item to all students for (String userId : studentIds) if (allExternals.containsKey(userId)) allExternals.get(userId).add(externalId); } else { // otherwise find users that are in the groups Set<String> okUsers = new HashSet<String>( authzGroupService.getAuthzUsersInGroups(new HashSet<String>(entry.getValue()))); okUsers.retainAll(studentIds); // and add this item just to them for (String u : okUsers) if (allExternals.containsKey(u)) { allExternals.get(u).add(externalId); } } } //System.out.println("getAllExternalAssignments " + studentIds + " " + allExternals); return allExternals; }
From source file:org.alfresco.repo.version.VersionableAspect.java
/** * On update properties policy behaviour * //from w w w .j av a2 s . com * If applicable and "cm:autoVersionOnUpdateProps" is TRUE then version the node on properties update (even if no content updates) * * @since 3.2 */ @SuppressWarnings({ "unchecked", "rawtypes" }) public void onUpdateProperties(NodeRef nodeRef, Map<QName, Serializable> before, Map<QName, Serializable> after) { if ((this.nodeService.exists(nodeRef) == true) && !lockService.isLockedAndReadOnly(nodeRef) && (this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_VERSIONABLE) == true) && (this.nodeService.hasAspect(nodeRef, ContentModel.ASPECT_TEMPORARY) == false)) { onUpdatePropertiesBehaviour.disable(); try { Map<NodeRef, NodeRef> versionedNodeRefs = (Map) AlfrescoTransactionSupport .getResource(KEY_VERSIONED_NODEREFS); if (versionedNodeRefs == null || versionedNodeRefs.containsKey(nodeRef) == false) { boolean autoVersionProps = false; Boolean value = (Boolean) this.nodeService.getProperty(nodeRef, ContentModel.PROP_AUTO_VERSION_PROPS); if (value != null) { // If the value is not null then autoVersionProps = value.booleanValue(); } if (autoVersionProps == true) { // Check for explicitly excluded props - if one or more excluded props changes then do not auto-version on this event (even if other props changed) if (excludedOnUpdatePropQNames.size() > 0) { Set<QName> propNames = new HashSet<QName>(after.size() * 2); propNames.addAll(after.keySet()); propNames.addAll(before.keySet()); propNames.retainAll(excludedOnUpdatePropQNames); if (propNames.size() > 0) { for (QName prop : propNames) { Serializable beforeValue = before.get(prop); Serializable afterValue = after.get(prop); if (EqualsHelper.nullSafeEquals(beforeValue, afterValue) != true) { // excluded - do not version return; } } } // drop through and auto-version } // Create the auto-version Map<String, Serializable> versionProperties = new HashMap<String, Serializable>(4); versionProperties.put(Version.PROP_DESCRIPTION, I18NUtil.getMessage(MSG_AUTO_VERSION_PROPS)); versionProperties.put(VersionModel.PROP_VERSION_TYPE, VersionType.MINOR); createVersionImpl(nodeRef, versionProperties); } } } finally { onUpdatePropertiesBehaviour.enable(); } } }