List of usage examples for com.google.common.collect Sets difference
public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2)
From source file:co.mitro.core.server.data.DBHistoricalUserState.java
public DBHistoricalUserState(ListMySecretsAndGroupKeysResponse resp, Map<Integer, GroupInfo> orgIdToOrg, long timestampMs) { this.userId = resp.myUserId; this.timestampMs = timestampMs; // sort the users this.visibleUsers = Sets.newTreeSet(resp.autocompleteUsers); numVisibleUsersInSameDomain = 0;// www . j a v a2 s. c o m String myDomain = userId.split("@")[1]; for (String u : visibleUsers) { if (myDomain.equals(u.split("@")[1])) { ++numVisibleUsersInSameDomain; } } this.organizations = Lists.newArrayList(); this.secrets = resp.secretToPath.values(); this.groups = resp.groups.values(); numGroups = groups.size(); Set<Integer> myPrivateGroups = Sets.newHashSet(); Set<Integer> seenOrgs = Sets.newHashSet(); for (GroupInfo gi : groups) { if (gi.isNonOrgPrivateGroup || gi.isOrgPrivateGroup) { myPrivateGroups.add(gi.groupId); } if (gi.owningOrgId != null && seenOrgs.add(gi.owningOrgId)) { organizations.add(orgIdToOrg.get(gi.owningOrgId)); } if (gi.isTopLevelOrg && seenOrgs.add(gi.groupId)) { organizations.add(orgIdToOrg.get(gi.groupId)); } } numOrganizations = organizations.size(); numSecrets = secrets.size(); numVisibleUsers = visibleUsers.size(); Set<Integer> sharedSecrets = new HashSet<Integer>(); for (Secret s : secrets) { // the user should be excluded from this list. Set<String> usersExcludingMe = Sets.difference(Sets.newHashSet(s.users), ImmutableSet.of(userId)); Set<Integer> groupsExcludingMe = Sets.difference(Sets.newHashSet(s.groups), myPrivateGroups); if (!(usersExcludingMe.isEmpty() && groupsExcludingMe.isEmpty())) { sharedSecrets.add(s.secretId); } } numSharedSecrets = sharedSecrets.size(); }
From source file:org.onosproject.routing.RouterInterfaceManager.java
/** * Changes the set of interfaces configured on the router. * * @param newConfiguredInterfaces new set of router interfaces *///from w ww. java 2s. c om public void changeConfiguredInterfaces(Set<String> newConfiguredInterfaces) { Set<String> oldConfiguredInterfaces = configuredInterfaces; configuredInterfaces = ImmutableSet.copyOf(newConfiguredInterfaces); if (newConfiguredInterfaces.isEmpty() && !oldConfiguredInterfaces.isEmpty()) { // Reverted to using all interfaces. Provision interfaces that // weren't previously in the configured list getInterfacesForDevice(routerDeviceId).filter(intf -> !oldConfiguredInterfaces.contains(intf.name())) .forEach(this::provision); } else if (!newConfiguredInterfaces.isEmpty() && oldConfiguredInterfaces.isEmpty()) { // Began using an interface list. Unprovision interfaces that // are not in the new interface list. getInterfacesForDevice(routerDeviceId).filter(intf -> !newConfiguredInterfaces.contains(intf.name())) .forEach(this::unprovision); } else { // The existing interface list was changed. Set<String> toUnprovision = Sets.difference(oldConfiguredInterfaces, newConfiguredInterfaces); Set<String> toProvision = Sets.difference(newConfiguredInterfaces, oldConfiguredInterfaces); toUnprovision.forEach(name -> getInterfacesForDevice(routerDeviceId) .filter(intf -> intf.name().equals(name)).findFirst().ifPresent(this::unprovision)); toProvision.forEach(name -> getInterfacesForDevice(routerDeviceId) .filter(intf -> intf.name().equals(name)).findFirst().ifPresent(this::provision)); } configuredInterfaces = newConfiguredInterfaces; }
From source file:org.apache.cassandra.db.lifecycle.View.java
public Sets.SetView<SSTableReader> nonCompactingSStables() { return Sets.difference(sstables, compacting); }
From source file:org.eclipse.sw360.portal.tags.TagUtils.java
private static <U extends TFieldIdEnum, T extends TBase<T, U>> void displaySet(StringBuilder display, Set<String> oldFieldValue, Set<String> updateFieldValue, Set<String> deletedFieldValue, U field, String prefix, String key) { String oldDisplay = null;/* ww w .ja v a 2 s . c o m*/ String deleteDisplay = "n.a. (modified list)"; String updateDisplay = null; if (oldFieldValue != null) { oldDisplay = getDisplayString(TType.SET, oldFieldValue); } if (updateFieldValue != null) { updateDisplay = getDisplayString(TType.SET, Sets.difference(Sets.union(nullToEmptySet(oldFieldValue), nullToEmptySet(updateFieldValue)), nullToEmptySet(deletedFieldValue))); } if (isNullOrEmpty(updateDisplay) && isNullOrEmpty(oldDisplay)) { return; } if (isNullOrEmpty(updateDisplay)) { updateDisplay = NOT_SET; } if (isNullOrEmpty(oldDisplay)) { oldDisplay = NOT_SET; } String keyString = isNullOrEmpty(key) ? "" : " [" + key + "]"; display.append(String.format("<tr><td>%s:</td>", prefix + field.getFieldName() + keyString)); display.append(String.format("<td>%s</td>", oldDisplay, prefix + field.getFieldName() + keyString)); display.append(String.format("<td>%s</td>", deleteDisplay, prefix + field.getFieldName() + keyString)); display.append( String.format("<td>%s</td></tr> ", updateDisplay, prefix + field.getFieldName() + keyString)); }
From source file:org.opencms.ui.apps.CmsAppHierarchyBuilder.java
/** * Builds the tree of categories and apps.<p> * * This tree will only include those categories which are reachable by following the parent chain of * an available app configuration up to the root category (null). * * @return the root node of the tree//from w ww . j a va 2 s . c o m */ public CmsAppCategoryNode buildHierarchy() { // STEP 0: Initialize everything and sort categories by priority Collections.sort(m_appCategoryList, new Comparator<I_CmsAppCategory>() { public int compare(I_CmsAppCategory cat1, I_CmsAppCategory cat2) { return ComparisonChain.start().compare(cat1.getPriority(), cat2.getPriority()).result(); } }); m_rootNode = new CmsAppCategoryNode(); m_nodes.clear(); m_nodes.put(null, m_rootNode); // STEP 1: Create a node for each category for (I_CmsAppCategory category : m_appCategoryList) { m_nodes.put(category.getId(), new CmsAppCategoryNode(category)); } // STEP 2: Assign category nodes to nodes for their parent category for (CmsAppCategoryNode node : m_nodes.values()) { if (node != m_rootNode) { addNodeToItsParent(node); } } // STEP 3: Assign app configs to category nodes for (I_CmsWorkplaceAppConfiguration appConfig : m_appConfigs) { addAppConfigToCategory(appConfig); } // STEP 4: Validate whether there are unused categories / apps Set<String> usedNodes = findReachableNodes(m_rootNode, new HashSet<String>()); if (usedNodes.size() < m_nodes.size()) { LOG.warn("Unused app categories: " + Sets.difference(m_nodes.keySet(), usedNodes)); } Set<String> unusedApps = Sets.newHashSet(); for (I_CmsWorkplaceAppConfiguration appConfig : m_appConfigs) { if (!usedNodes.contains(appConfig.getAppCategory())) { unusedApps.add(appConfig.getId()); } } if (unusedApps.size() > 0) { LOG.warn("Unused apps: " + unusedApps); } // STEP 5: Remove parts of the hierarchy which don't contain any apps m_rootNode.removeApplessSubtrees(); // STEP 6: Sort all categories and app configurations for each node m_rootNode.sortRecursively(); return m_rootNode; }
From source file:google.registry.util.CollectionUtils.java
/** Copy an {@link ImmutableSet} and remove members. */ @SafeVarargs//from ww w . j av a2 s.c o m public static <T> ImmutableSet<T> difference(Set<T> set, T... toRemove) { return Sets.difference(set, ImmutableSet.copyOf(toRemove)).immutableCopy(); }
From source file:com.nearinfinity.honeycomb.mysql.Util.java
/** * Retrieve from a list of indices which ones have been changed. * * @param indices Table indices//ww w. j a va 2s . com * @param oldRecords Old MySQL row * @param newRecords New MySQL row * @return List of changed indices */ public static ImmutableList<IndexSchema> getChangedIndices(Collection<IndexSchema> indices, Map<String, ByteBuffer> oldRecords, Map<String, ByteBuffer> newRecords) { if (indices.isEmpty()) { return ImmutableList.of(); } MapDifference<String, ByteBuffer> diff = Maps.difference(oldRecords, newRecords); Set<String> changedColumns = Sets.difference(Sets.union(newRecords.keySet(), oldRecords.keySet()), diff.entriesInCommon().keySet()); ImmutableList.Builder<IndexSchema> changedIndices = ImmutableList.builder(); for (IndexSchema index : indices) { Set<String> indexColumns = ImmutableSet.copyOf(index.getColumns()); if (!Sets.intersection(changedColumns, indexColumns).isEmpty()) { changedIndices.add(index); } } return changedIndices.build(); }
From source file:org.dllearner.algorithms.properties.MultiPropertyAxiomLearner.java
public void start() { startTime = System.currentTimeMillis(); checkConfigOptions();//from w ww . j a v a 2 s .c o m // check if entity is empty int popularity = reasoner.getPopularity(entity); if (popularity == 0) { logger.warn("Cannot make axiom suggestions for empty " + entity.getEntityType().getName() + " " + entity.toStringID()); return; } results = Maps.newConcurrentMap(); EntityType<?> entityType = entity.getEntityType(); // check for axiom types that are not appropriate for the given entity Set<AxiomType<? extends OWLAxiom>> possibleAxiomTypes = AxiomAlgorithms.getAxiomTypes(entityType); SetView<AxiomType<? extends OWLAxiom>> notAllowed = Sets.difference(axiomTypes, possibleAxiomTypes); if (!notAllowed.isEmpty()) { logger.warn("Not supported axiom types for entity " + entity + " :" + notAllowed); } Set<AxiomType<? extends OWLAxiom>> todo = Sets.intersection(axiomTypes, possibleAxiomTypes); // compute samples for axiom types Set<AxiomTypeCluster> sampleClusters = AxiomAlgorithms.getSameSampleClusters(entityType); ExecutorService tp = Executors.newFixedThreadPool(maxNrOfThreads); for (final AxiomTypeCluster cluster : sampleClusters) { final SetView<AxiomType<? extends OWLAxiom>> sampleAxiomTypes = Sets .intersection(cluster.getAxiomTypes(), todo); if (!sampleAxiomTypes.isEmpty()) { tp.submit(() -> { try { SparqlEndpointKS ks1 = MultiPropertyAxiomLearner.this.ks; // get sample if enabled if (useSampling) { Model sample = generateSample(entity, cluster); // if sampling failed, we skip if (sample == null) { return; } // if the sample is empty, we skip and show warning if (sample.isEmpty()) { logger.warn("Empty sample. Skipped learning."); return; } ks1 = new LocalModelBasedSparqlEndpointKS(sample); } // process each axiom type for (AxiomType<? extends OWLAxiom> axiomType : sampleAxiomTypes) { try { List<EvaluatedAxiom<OWLAxiom>> result = applyAlgorithm(axiomType, ks1); results.put(axiomType, result); } catch (Exception e) { logger.error("An error occurred while generating " + axiomType.getName() + " axioms for " + OWLAPIUtils.getPrintName(entity.getEntityType()) + " " + entity.toStringID(), e); } } } catch (Exception e) { logger.error("Failed to process " + cluster, e); } }); } } try { tp.shutdown(); tp.awaitTermination(1, TimeUnit.HOURS); } catch (InterruptedException e) { e.printStackTrace(); } // // for (AxiomType<? extends OWLAxiom> axiomType : todo) { // try { // applyAlgorithm(entity, axiomType, useSampling ? axiomType2Ks.get(axiomType) : ks); // } catch (Exception e) { // logger.error("Error occurred while generating " + axiomType.getName() + " for entity " + entity, e); // } // } }
From source file:dagger.internal.codegen.SubcomponentFactoryMethodValidation.java
private SetView<TypeElement> ownedModules(ComponentNode component, BindingGraph graph) { return Sets.difference(((ComponentNodeImpl) component).componentDescriptor().transitiveModuleTypes(), inheritedModules(component, graph)); }
From source file:es.usc.citius.composit.core.composition.search.ForwardServiceDiscoverer.java
public ServiceMatchNetwork<E, T> search(Signature<E> signature) { Set<E> availableInputs = new HashSet<E>(signature.getInputs()); Set<E> newOutputs = new HashSet<E>(signature.getInputs()); Set<E> unmatchedOutputs = new HashSet<E>(signature.getOutputs()); Set<Operation<E>> usedServices = new HashSet<Operation<E>>(); Map<Operation<E>, Set<E>> unmatchedInputMap = new HashMap<Operation<E>, Set<E>>(); List<Set<Operation<E>>> leveledOps = new LinkedList<Set<Operation<E>>>(); boolean checkExpectedOutputs = !signature.getOutputs().isEmpty(); boolean stop; Stopwatch timer = Stopwatch.createStarted(); Stopwatch levelTimer = Stopwatch.createUnstarted(); int level = 0; do {//from w w w. ja v a2 s . c o m HashSet<Operation<E>> candidates = new HashSet<Operation<E>>(); levelTimer.start(); candidates.addAll(discovery.findOperationsConsumingSome(newOutputs)); log.info("(Level {}) {} potential candidates selected in {}", level++, candidates.size(), levelTimer.toString()); // Remove services that cannot be invoked with the available inputs for (Iterator<Operation<E>> it = candidates.iterator(); it.hasNext();) { Operation<E> candidate = it.next(); // Retrieve the unmatched inputs for this operation Set<E> unmatchedInputs = unmatchedInputMap.get(candidate); if (unmatchedInputs == null) { unmatchedInputs = candidate.getSignature().getInputs(); } // Check if the new concepts match some unmatched inputs Set<E> matched = matcher.partialMatch(newOutputs, unmatchedInputs).getTargetElements(); // Don't check invokability if (relaxedMatchCondition) { // Remove only if there is no match at all if (matched.isEmpty()) { it.remove(); } else { boolean isNew = usedServices.add(candidate); if (!isNew) it.remove(); } } else { // Update the unmatchedInputs unmatchedInputs = Sets.newHashSet(Sets.difference(unmatchedInputs, matched)); unmatchedInputMap.put(candidate, unmatchedInputs); // If there are no unmatched inputs, the service is invokable! if (!unmatchedInputs.isEmpty()) { it.remove(); } else { // Invokable operation, check if it was used previously boolean isNew = usedServices.add(candidate); if (!isNew) it.remove(); } } } log.info("\t + [{}] operations selected for this level in {}", candidates.size(), levelTimer.toString()); log.debug("\t\t Candidates: {}", candidates); // Collect the new outputs of the new candidates Set<E> nextOutputs = Operations.outputs(candidates); // Check unmatched outputs Set<E> matchedOutputs = matcher.partialMatch(Sets.union(newOutputs, nextOutputs), unmatchedOutputs) .getTargetElements(); //Set<Resource> matchedOutputs = matcher.matched(newOutputs, unmatchedOutputs); // Update the unmatched outputs unmatchedOutputs = Sets.newHashSet(Sets.difference(unmatchedOutputs, matchedOutputs)); // Update for the next iteration availableInputs.addAll(newOutputs); newOutputs = nextOutputs; // Add the discovered ops if (!candidates.isEmpty()) leveledOps.add(candidates); log.debug("\t + Available inputs: {}, new outputs: {}", availableInputs.size(), newOutputs.size()); // Stop condition. Stop if there are no more candidates and/or expected outputs are satisfied. stop = (checkExpectedOutputs) ? candidates.isEmpty() || unmatchedOutputs.isEmpty() : candidates.isEmpty(); levelTimer.reset(); } while (!stop); // Add the source and sink operations Source<E> sourceOp = new Source<E>(signature.getInputs()); Sink<E> sinkOp = new Sink<E>(signature.getOutputs()); leveledOps.add(0, Collections.<Operation<E>>singleton(sourceOp)); leveledOps.add(leveledOps.size(), Collections.<Operation<E>>singleton(sinkOp)); Stopwatch networkWatch = Stopwatch.createStarted(); // Create a service match network with the discovered services DirectedAcyclicSMN<E, T> matchNetwork = new DirectedAcyclicSMN<E, T>(new HashLeveledServices<E>(leveledOps), this.matcher); log.info(" > Service match network computed in {}", networkWatch.stop().toString()); log.info("Service Match Network created with {} levels (including source and sink) and {} operations.", leveledOps.size(), matchNetwork.listOperations().size()); log.info("Forward Discovery done in {}", timer.toString()); this.unmatchedInputMap = unmatchedInputMap; return matchNetwork; }