List of usage examples for java.util Set remove
boolean remove(Object o);
From source file:base.Engine.java
static Set<TreeSet<PatternInstance>> resolveTree(Set<TreeSet<PatternInstance>> returnSet, Set<PatternInstance> colls) { TreeSet<PatternInstance> workingTree = new TreeSet<>(new InstanceComparator()); for (PatternInstance p : colls) { TreeSet<PatternInstance> onThisTree = null; for (TreeSet<PatternInstance> it : returnSet) { if (it.contains(p)) { onThisTree = it;//from w ww. ja v a 2s . co m break; } } if (onThisTree == null) { workingTree.add(p); } else { workingTree.addAll(onThisTree); returnSet.remove(onThisTree); } } returnSet.add(workingTree); return returnSet; }
From source file:models.NotificationEvent.java
private static Set<User> getReceivers(User sender, PullRequest pullRequest) { Set<User> watchers = getDefaultReceivers(pullRequest); watchers.remove(sender); return watchers; }
From source file:net.ontopia.topicmaps.utils.TopicMapSynchronizer.java
/** * INTERNAL: Updates the target topic in the usual way, but does not * delete associations. Instead, it registers its findings using the * AssociationTracker. It is then up to the caller to delete * unwanted associations. The general principle is that associations * are wanted as long as there is one source that wants them; the * method will therefore feel free to copy new associations from the * source. In addition, associations to topics outside the set of * topics being synchronized must be kept because they cannot be * synchronized (they belong to the topics not being synchronized). *///from ww w .ja v a2 s .c om private static void update(TopicMapIF target, TopicIF source, DeciderIF<TMObjectIF> tfilter, DeciderIF<TMObjectIF> sfilter, AssociationTracker tracker) { TopicMapBuilderIF builder = target.getBuilder(); // find target TopicIF targett = getTopic(target, source); if (targett == null) { targett = builder.makeTopic(); log.debug("Updating new target {} with source {}", targett, source); } else { log.debug("Updating existing target {} with source {}", targett, source); } targett = copyIdentifiers(targett, source); // synchronize types Set<TopicIF> origtypes = new CompactHashSet<TopicIF>(targett.getTypes()); Iterator<TopicIF> topicIterator = source.getTypes().iterator(); while (topicIterator.hasNext()) { TopicIF stype = topicIterator.next(); TopicIF ttype = getOrCreate(target, stype); if (origtypes.contains(ttype)) origtypes.remove(ttype); else targett.addType(ttype); } topicIterator = origtypes.iterator(); while (topicIterator.hasNext()) targett.removeType(topicIterator.next()); // synchronize names Map<String, TopicNameIF> originalTopicNames = new HashMap<String, TopicNameIF>(); Iterator<TopicNameIF> topicnameIterator = targett.getTopicNames().iterator(); while (topicnameIterator.hasNext()) { TopicNameIF bn = topicnameIterator.next(); if (tfilter.ok(bn)) { log.debug(" target name included {}", bn); originalTopicNames.put(KeyGenerator.makeTopicNameKey(bn), bn); } else { log.debug(" target name excluded {}", bn); } } topicnameIterator = source.getTopicNames().iterator(); while (topicnameIterator.hasNext()) { TopicNameIF sbn = topicnameIterator.next(); if (!sfilter.ok(sbn)) { log.debug(" source name excluded {}", sbn); continue; } log.debug(" source name included {}", sbn); TopicIF ttype = getOrCreate(target, sbn.getType()); Collection<TopicIF> tscope = translateScope(target, sbn.getScope()); String key = KeyGenerator.makeScopeKey(tscope) + "$" + KeyGenerator.makeTopicKey(ttype) + "$$" + sbn.getValue(); if (originalTopicNames.containsKey(key)) { TopicNameIF tbn = originalTopicNames.get(key); update(tbn, sbn, tfilter); originalTopicNames.remove(key); } else { TopicNameIF tbn = builder.makeTopicName(targett, ttype, sbn.getValue()); addScope(tbn, tscope); addReifier(tbn, sbn.getReifier(), tfilter, sfilter, tracker); update(tbn, sbn, tfilter); log.debug(" target name added {}", tbn); } } topicnameIterator = originalTopicNames.values().iterator(); while (topicnameIterator.hasNext()) { TopicNameIF tbn = topicnameIterator.next(); log.debug(" target name removed {}", tbn); tbn.remove(); } // synchronize occurrences Map<String, OccurrenceIF> originalOccurrences = new HashMap<String, OccurrenceIF>(); Iterator<OccurrenceIF> occurrenceIterator = targett.getOccurrences().iterator(); while (occurrenceIterator.hasNext()) { OccurrenceIF occ = occurrenceIterator.next(); if (tfilter.ok(occ)) { log.debug(" target occurrence included: {}", occ); originalOccurrences.put(KeyGenerator.makeOccurrenceKey(occ), occ); } else { log.debug(" target occurrence excluded {}", occ); } } occurrenceIterator = source.getOccurrences().iterator(); while (occurrenceIterator.hasNext()) { OccurrenceIF socc = occurrenceIterator.next(); if (!sfilter.ok(socc)) { log.debug(" source occurrence excluded {}", socc); continue; } log.debug(" source occurrence included: {}", socc); TopicIF ttype = getOrCreate(target, socc.getType()); Collection<TopicIF> tscope = translateScope(target, socc.getScope()); String key = KeyGenerator.makeScopeKey(tscope) + "$" + KeyGenerator.makeTopicKey(ttype) + KeyGenerator.makeDataKey(socc); if (originalOccurrences.containsKey(key)) originalOccurrences.remove(key); else { OccurrenceIF tocc = builder.makeOccurrence(targett, ttype, ""); CopyUtils.copyOccurrenceData(tocc, socc); addScope(tocc, tscope); addReifier(tocc, socc.getReifier(), tfilter, sfilter, tracker); log.debug(" target occurrence added {}", tocc); } } occurrenceIterator = originalOccurrences.values().iterator(); while (occurrenceIterator.hasNext()) { OccurrenceIF tocc = occurrenceIterator.next(); log.debug(" target occurrence removed {}", tocc); tocc.remove(); } // synchronize associations // originals tracked by AssociationTracker, not the 'origs' set Iterator<AssociationRoleIF> roleIterator = targett.getRoles().iterator(); while (roleIterator.hasNext()) { AssociationRoleIF role = roleIterator.next(); AssociationIF assoc = role.getAssociation(); if (tfilter.ok(assoc) && tracker.isWithinSyncSet(assoc)) { log.debug(" target association included: {}", assoc); tracker.unwanted(assoc); // means: unwanted if not found in source } else { log.debug(" target association excluded {}", assoc); } } roleIterator = source.getRoles().iterator(); while (roleIterator.hasNext()) { AssociationRoleIF role = roleIterator.next(); AssociationIF sassoc = role.getAssociation(); if (!sfilter.ok(sassoc)) { log.debug(" source association excluded {}", sassoc); continue; } log.debug(" source association included: {}", sassoc); TopicIF ttype = getOrCreate(target, sassoc.getType()); Collection<TopicIF> tscope = translateScope(target, sassoc.getScope()); String key = KeyGenerator.makeTopicKey(ttype) + "$" + KeyGenerator.makeScopeKey(tscope) + "$" + makeRoleKeys(target, sassoc.getRoles()); if (!tracker.isKnown(key)) { // if the key is not known it means this association does not // exist in the target, and so we must create it AssociationIF tassoc = builder.makeAssociation(ttype); addScope(tassoc, tscope); addReifier(tassoc, sassoc.getReifier(), tfilter, sfilter, tracker); Iterator<AssociationRoleIF> it2 = sassoc.getRoles().iterator(); while (it2.hasNext()) { role = it2.next(); builder.makeAssociationRole(tassoc, getOrCreate(target, role.getType()), getOrCreate(target, role.getPlayer())); } log.debug(" target association added {}", tassoc); } tracker.wanted(key); } // run duplicate suppression DuplicateSuppressionUtils.removeDuplicates(targett); DuplicateSuppressionUtils.removeDuplicateAssociations(targett); }
From source file:com.github.sevntu.checkstyle.internal.ChecksTest.java
private static void validateEclipseCsMetaXmlFileRules(String pkg, Set<Class<?>> pkgModules, Set<Node> rules) throws Exception { for (Node rule : rules) { final NamedNodeMap attributes = rule.getAttributes(); final Node internalNameNode = attributes.getNamedItem("internal-name"); Assert.assertNotNull(pkg + " checkstyle-metadata.xml must contain an internal name", internalNameNode); final String internalName = internalNameNode.getTextContent(); final String classpath = "com.github.sevntu.checkstyle.checks." + pkg + "." + internalName; final Class<?> module = findModule(pkgModules, classpath); pkgModules.remove(module); Assert.assertNotNull("Unknown class found in " + pkg + " checkstyle-metadata.xml: " + internalName, module);//from ww w .j a v a2 s .c o m final Node nameAttribute = attributes.getNamedItem("name"); Assert.assertNotNull(pkg + " checkstyle-metadata.xml requires a name for " + internalName, nameAttribute); Assert.assertEquals(pkg + " checkstyle-metadata.xml requires a valid name for " + internalName, "%" + internalName + ".name", nameAttribute.getTextContent()); final Node parentAttribute = attributes.getNamedItem("parent"); Assert.assertNotNull(pkg + " checkstyle-metadata.xml requires a parent for " + internalName, parentAttribute); Assert.assertEquals(pkg + " checkstyle-metadata.xml requires a valid parent for " + internalName, "TreeWalker", parentAttribute.getTextContent()); final Set<Node> children = XmlUtil.getChildrenElements(rule); validateEclipseCsMetaXmlFileRule(pkg, module, children); } }
From source file:de.tudarmstadt.ukp.wikipedia.util.GraphUtilities.java
/** Get a random subset (of size pSize) of the page set passed to the method. * @param pPageIDs The pages.//w w w . j ava 2 s .co m * @param pResultSetSize The size of the result set. * @return A random subset of the original page set of the given size or null, if the requested subset size is larger than the original page set. */ public static Set<Integer> getRandomPageSubset(Set<Integer> pPageIDs, int pResultSetSize) { Set<Integer> uniqueRandomSet = new HashSet<Integer>(); if (pPageIDs.size() < pResultSetSize) { logger.error("Requested subset size is larger than the original page set size."); return null; } Random rand = new Random(); Object[] pageIdArray = pPageIDs.toArray(); // If pSize is quite close to the size of the original pageSet the probability of generating the offset of the last missing pageIDs is quite low, with the consequence of unpredictable run-time. // => if more than the half of pages should be included in the result set, better remove random numbers than adding them if (pResultSetSize > (pPageIDs.size() / 2)) { uniqueRandomSet.addAll(pPageIDs); while (uniqueRandomSet.size() > pResultSetSize) { int randomOffset = rand.nextInt(pPageIDs.size()); if (uniqueRandomSet.contains(pageIdArray[randomOffset])) { uniqueRandomSet.remove(pageIdArray[randomOffset]); } } } else { while (uniqueRandomSet.size() < pResultSetSize) { int randomOffset = rand.nextInt(pPageIDs.size()); if (!uniqueRandomSet.contains(pageIdArray[randomOffset])) { uniqueRandomSet.add((Integer) pageIdArray[randomOffset]); } } } return uniqueRandomSet; }
From source file:MapSet.java
public void remove(K key, V value) { Set<V> values = get(key); if (values != null) { values.remove(value); } }
From source file:com.github.sevntu.checkstyle.internal.ChecksTest.java
private static void validateSonarProperties(Class<?> module, Set<Node> parameters) { final String moduleName = module.getName(); final Set<String> properties = getFinalProperties(module); for (Node parameter : parameters) { final NamedNodeMap attributes = parameter.getAttributes(); final Node paramKeyNode = attributes.getNamedItem("key"); Assert.assertNotNull(moduleName + " requires a key for unknown parameter in sonar", paramKeyNode); final String paramKey = paramKeyNode.getTextContent(); Assert.assertFalse(moduleName + " requires a valid key for unknown parameter in sonar", paramKey.isEmpty());/*from w ww . ja v a2 s . c o m*/ Assert.assertTrue(moduleName + " has an unknown parameter in sonar: " + paramKey, properties.remove(paramKey)); } for (String property : properties) { Assert.fail(moduleName + " parameter not found in sonar: " + property); } }
From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java
/** * Recusivly builds a substream-per-stream ordered tree graph using the * join information supplied for outer joins and from the query graph (where clause). * <p>/*from www .j a v a 2 s . c om*/ * Required streams are considered first and their lookup is placed first in the list * to gain performance. * @param streamNum is the root stream number that supplies the incoming event to build the tree for * @param queryGraph contains where-clause stream relationship info * @param completedStreams is a temporary holder for streams already considered * @param substreamsPerStream is the ordered, tree-like structure to be filled * @param streamCallStack the query plan call stack of streams available via cursor * @param dependencyGraph - dependencies between historical streams * @throws ExprValidationException if the query planning failed */ protected static void recursiveBuildInnerJoin(int streamNum, Stack<Integer> streamCallStack, QueryGraph queryGraph, Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream, DependencyGraph dependencyGraph) throws ExprValidationException { // add this stream to the set of completed streams completedStreams.add(streamNum); // check if the dependencies have been satisfied if (dependencyGraph.hasDependency(streamNum)) { Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum); for (Integer dependentStream : dependencies) { if (!streamCallStack.contains(dependentStream)) { throw new ExprValidationException( "Historical stream " + streamNum + " parameter dependency originating in stream " + dependentStream + " cannot or may not be satisfied by the join"); } } } // Determine the streams we can navigate to from this stream Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum); // remove streams with a dependency on other streams not yet processed Integer[] navigableStreamArr = navigableStreams.toArray(new Integer[navigableStreams.size()]); for (int navigableStream : navigableStreamArr) { if (dependencyGraph.hasUnsatisfiedDependency(navigableStream, completedStreams)) { navigableStreams.remove(navigableStream); } } // remove those already done navigableStreams.removeAll(completedStreams); // if we are a leaf node, we are done if (navigableStreams.isEmpty()) { substreamsPerStream.put(streamNum, new int[0]); return; } // First the outer (required) streams to this stream, then the inner (optional) streams int[] substreams = new int[navigableStreams.size()]; substreamsPerStream.put(streamNum, substreams); int count = 0; for (int stream : navigableStreams) { substreams[count++] = stream; completedStreams.add(stream); } for (int stream : navigableStreams) { streamCallStack.push(stream); recursiveBuildInnerJoin(stream, streamCallStack, queryGraph, completedStreams, substreamsPerStream, dependencyGraph); streamCallStack.pop(); } }
From source file:models.NotificationEvent.java
public static void afterNewComment(Comment comment) { AbstractPosting post = comment.getParent(); NotificationEvent notiEvent = createFromCurrentUser(comment); notiEvent.title = formatReplyTitle(post); Set<User> receivers = getReceivers(post); receivers.addAll(getMentionedUsers(comment.contents)); receivers.remove(UserApp.currentUser()); notiEvent.receivers = receivers;/*from ww w. j a v a2 s . c o m*/ notiEvent.eventType = NEW_COMMENT; notiEvent.oldValue = null; notiEvent.newValue = comment.contents; notiEvent.resourceType = comment.asResource().getType(); notiEvent.resourceId = comment.asResource().getId(); NotificationEvent.add(notiEvent); }
From source file:models.NotificationEvent.java
public static void afterNewCommentWithState(Comment comment, State state) { AbstractPosting post = comment.getParent(); NotificationEvent notiEvent = createFromCurrentUser(comment); notiEvent.title = formatReplyTitle(post); Set<User> receivers = getReceivers(post); receivers.addAll(getMentionedUsers(comment.contents)); receivers.remove(UserApp.currentUser()); notiEvent.receivers = receivers;/*w ww. ja v a2s . c o m*/ notiEvent.eventType = NEW_COMMENT; notiEvent.oldValue = null; notiEvent.newValue = comment.contents + "\n" + state.state(); notiEvent.resourceType = comment.asResource().getType(); notiEvent.resourceId = comment.asResource().getId(); NotificationEvent.add(notiEvent); }