Example usage for com.google.common.collect Sets difference

List of usage examples for com.google.common.collect Sets difference

Introduction

In this page you can find the example usage for com.google.common.collect Sets difference.

Prototype

public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2) 

Source Link

Document

Returns an unmodifiable view of the difference of two sets.

Usage

From source file:org.opendaylight.controller.netconf.impl.NetconfServerSessionNegotiatorFactory.java

private ImmutableSet<String> validateBaseCapabilities(final Set<String> baseCapabilities) {
    // Check base capabilities to be supported by the server
    final Sets.SetView<String> unknownBaseCaps = Sets.difference(baseCapabilities, DEFAULT_BASE_CAPABILITIES);
    Preconditions.checkArgument(unknownBaseCaps.isEmpty(),
            "Base capabilities that will be supported by netconf server have to be subset of %s, unknown base capabilities: %s",
            DEFAULT_BASE_CAPABILITIES, unknownBaseCaps);

    final ImmutableSet.Builder<String> b = ImmutableSet.builder();
    b.addAll(baseCapabilities);/*from w  w  w  .j  av  a 2  s . co  m*/
    // Base 1.0 capability is supported by default
    b.add(XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_0);
    return b.build();
}

From source file:org.fenixedu.bennu.core.groups.UserGroup.java

@Override
public Group minus(Group group) {
    if (group instanceof UserGroup) {
        return UserGroup.of(Sets.difference(members(), ((UserGroup) group).members()));
    }/*from   w w  w .j  ava2  s  .com*/
    return super.minus(group);
}

From source file:org.eclipse.sw360.portal.tags.CompareAttachments.java

private void renderAttachments(JspWriter jspWriter, Set<Attachment> currentAttachments,
        Set<Attachment> addedAttachments, Set<Attachment> deletedAttachments, String contextType,
        String contextId) throws JspException, IOException {

    Map<String, Attachment> currentAttachmentsById = getAttachmentsById(currentAttachments);
    Map<String, Attachment> addedAttachmentsById = getAttachmentsById(addedAttachments);
    Map<String, Attachment> deletedAttachmentsById = getAttachmentsById(deletedAttachments);

    Set<String> currentAttachmentIds = currentAttachmentsById.keySet();
    Set<String> addedAttachmentIds = addedAttachmentsById.keySet();
    Set<String> deletedAttachmentIds = deletedAttachmentsById.keySet();
    Set<String> commonAttachmentIds = Sets.intersection(deletedAttachmentIds, addedAttachmentIds);

    addedAttachmentIds = Sets.difference(addedAttachmentIds, commonAttachmentIds);
    deletedAttachmentIds = Sets.difference(deletedAttachmentIds, commonAttachmentIds);
    deletedAttachmentIds = Sets.intersection(deletedAttachmentIds, currentAttachmentIds);//remove what was deleted already in the database

    renderAttachmentList(jspWriter, currentAttachmentsById, deletedAttachmentIds, "Deleted", contextType,
            contextId);/*  w  ww .j  a va 2  s. c  o m*/
    renderAttachmentList(jspWriter, addedAttachmentsById, addedAttachmentIds, "Added", contextType, contextId);
    renderAttachmentComparison(jspWriter, currentAttachmentsById, deletedAttachmentsById, addedAttachmentsById,
            commonAttachmentIds);
}

From source file:org.apache.giraph.utils.TaskIdsPermitsBarrier.java

/**
 * Wait until permits have been required desired number of times,
 * and all required permits are available
 *
 * @param expectedTaskIds List of task ids which we are waiting permits from
 *//*from ww w  .ja v a  2  s .  co  m*/
public synchronized void waitForRequiredPermits(Set<Integer> expectedTaskIds) {
    while (arrivedTaskIds.size() < expectedTaskIds.size() || waitingOnPermits > 0) {
        try {
            wait(MSEC_PERIOD);
        } catch (InterruptedException e) {
            throw new IllegalStateException("waitForRequiredPermits: " + "InterruptedException occurred");
        }
        progressable.progress();
        if (LOG.isInfoEnabled()) {
            if (arrivedTaskIds.size() < expectedTaskIds.size()) {
                String logSuffix = "";
                if (expectedTaskIds.size() - arrivedTaskIds.size() <= MAX_TASK_IDS_TO_LOG) {
                    Sets.SetView<Integer> difference = Sets.difference(expectedTaskIds, arrivedTaskIds);
                    logSuffix = ", task ids: " + difference;
                }
                logger.info("waitForRequiredPermits: " + "Waiting for "
                        + (expectedTaskIds.size() - arrivedTaskIds.size())
                        + " more tasks to send their aggregator data" + logSuffix);
            } else {
                logger.info("waitForRequiredPermits: " + "Waiting for " + waitingOnPermits
                        + " more aggregator requests");
            }
        }
    }

    // Reset for the next time to use
    arrivedTaskIds.clear();
    waitingOnPermits = 0;
}

From source file:org.apache.james.jmap.model.MessageProperties.java

private MessageProperties selectBody() {
    ImmutableSet<MessageProperty> messageProperties = buildOutputMessageProperties();
    if (messageProperties.contains(MessageProperty.body)) {
        return usingProperties(
                Sets.difference(Sets.union(messageProperties, ImmutableSet.of(MessageProperty.textBody)),
                        ImmutableSet.of(MessageProperty.body)));
    }/*from   w  w w.  j  a  va 2  s  .co m*/
    return this;
}

From source file:net.minecraftforge.fml.common.toposort.TopologicalSort.java

public static <T> void explore(T node, DirectedGraph<T> graph, List<T> sortedResult, Set<T> visitedNodes,
        Set<T> expandedNodes) {
    // Have we been here before?
    if (visitedNodes.contains(node)) {
        // And have completed this node before
        if (expandedNodes.contains(node)) {
            // Then we're fine
            return;
        }/*from  w w  w.  jav  a2s  .c o m*/

        FMLLog.severe("Mod Sorting failed.");
        FMLLog.severe("Visting node %s", node);
        FMLLog.severe("Current sorted list : %s", sortedResult);
        FMLLog.severe("Visited set for this node : %s", visitedNodes);
        FMLLog.severe("Explored node set : %s", expandedNodes);
        SetView<T> cycleList = Sets.difference(visitedNodes, expandedNodes);
        FMLLog.severe("Likely cycle is in : %s", cycleList);
        throw new ModSortingException("There was a cycle detected in the input graph, sorting is not possible",
                node, cycleList);
    }

    // Visit this node
    visitedNodes.add(node);

    // Recursively explore inbound edges
    for (T inbound : graph.edgesFrom(node)) {
        explore(inbound, graph, sortedResult, visitedNodes, expandedNodes);
    }

    // Add ourselves now
    sortedResult.add(node);
    // And mark ourselves as explored
    expandedNodes.add(node);
}

From source file:org.eclipse.sirius.table.ui.tools.internal.editor.provider.DTableEditorUtil.java

/**
 * Updates the viewer columns according to the given {@link DTable} :
 * creates, deletes and update columns width if needed.
 * /*from   w  ww  .  j a va 2  s .  com*/
 * @param treeViewerManager
 *            the table viewer to update
 * @param dTable
 *            the {@link DTable} reflecting the expected viewer state
 */
public static void updateViewerColumns(AbstractDTableViewerManager treeViewerManager, DTable dTable) {
    DTableTreeViewer dTableTreeViewer = (DTableTreeViewer) treeViewerManager.getTreeViewer();
    TreeColumn[] treeColumns = dTableTreeViewer.getTree().getColumns();

    // Step 1: update header column width
    TreeColumn treeColumn = treeColumns[0];
    TreeColumnWidthQuery treeColumnWidthQuery = new TreeColumnWidthQuery(treeColumn);
    Display.getDefault().syncExec(treeColumnWidthQuery);
    int widgetWidth = treeColumnWidthQuery.getResult();
    if (dTable.getHeaderColumnWidth() != widgetWidth && dTable.getHeaderColumnWidth() > 0) {
        treeColumn.setWidth(dTable.getHeaderColumnWidth());
    }

    // Step 2: update other columns width
    Set<DColumn> handledDColumns = Sets.newLinkedHashSet();
    for (int i = 1; i < treeColumns.length; i++) {
        treeColumn = treeColumns[i];
        DColumn dColumn = (DColumn) treeColumn.getData(DTableViewerManager.TABLE_COLUMN_DATA);

        if (dColumn != null && dTable.getColumns().contains(dColumn)) {
            handledDColumns.add(dColumn);
            treeColumnWidthQuery = new TreeColumnWidthQuery(treeColumn);
            Display.getDefault().syncExec(treeColumnWidthQuery);
            widgetWidth = treeColumnWidthQuery.getResult();
            // If the DColumn as a default width (0) then resizing at
            // opening should not impact the model
            if (dColumn.isVisible() && dColumn.getWidth() > 0 && dColumn.getWidth() != widgetWidth) {
                treeColumn.setWidth(dColumn.getWidth());
            }
        } else {
            // Step 3: handle deleted columns
            ((DTableViewerManager) treeViewerManager).removeOldColumn(dColumn);

        }
    }
    // Step 4: handle added columns
    for (DColumn newColumn : Sets.difference(Sets.newLinkedHashSet(dTable.getColumns()), handledDColumns)) {
        int position = dTable.getColumns().indexOf(newColumn) + 1;
        addNewColumn(treeViewerManager, position, newColumn);
    }

    // Step 5: update expanded elemeents
    dTableTreeViewer.setExpandedElements(TableHelper.getExpandedLines(dTable).toArray());
}

From source file:edu.udo.scaffoldhunter.plugins.dataimport.impl.sdf.SDFImportPluginResults.java

/**
 * @param arguments//from   w  ww .  j  av a2s.  c om
 */
public SDFImportPluginResults(SDFImportPluginArguments arguments) {
    this.arguments = arguments;
    this.sourceProperties = new TreeMap<String, PropertyDefinition>(String.CASE_INSENSITIVE_ORDER);
    this.numMolecules = 0;

    File sdfFile = new File(arguments.getFilename());
    IAtomContainer cur;
    Set<String> notNumeric = Sets.newHashSet();
    IteratingMDLReader reader = null;
    try {
        reader = new IteratingMDLReader(new FileInputStream(sdfFile), SilentChemObjectBuilder.getInstance());
        while (reader.hasNext()) {
            // only properties required here, no need to configure molecule
            cur = reader.next();
            for (Entry<Object, Object> e : cur.getProperties().entrySet()) {
                if (e.getValue() == null) {
                    continue;
                }
                if (!sourceProperties.containsKey(e.getKey())) {
                    sourceProperties.put((String) e.getKey(), null);
                }
                if (e.getValue().toString().isEmpty()) {
                    continue;
                }
                if (!notNumeric.contains(e.getKey())) {
                    boolean numeric = true;
                    try {
                        double d = Double.parseDouble((String) e.getValue());
                        if (Double.isNaN(d) || Double.isInfinite(d)) {
                            numeric = false;
                        }
                    } catch (NumberFormatException ex) {
                        numeric = false;
                    }
                    if (!numeric)
                        notNumeric.add((String) e.getKey());
                }
            }
            numMolecules++;
        }
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        try {
            if (reader != null)
                reader.close();
        } catch (IOException ex) {
        }
    }
    probablyNumeric = Sets.difference(sourceProperties.keySet(), notNumeric);
}

From source file:com.progressiveaccess.cmlspeech.analysis.FunctionalGroupsFilter.java

private void subsumeSubsets() {
    if (this.workingSets.isEmpty()) {
        return;/*from w  w  w .ja  v a  2s .c om*/
    }
    Integer count = 0;
    while (this.workingSets.size() > count) {
        final RichFunctionalGroup outer = this.workingSets.get(count++);
        Integer steps = this.workingSets.size() - 1;
        while (steps >= count) {
            final RichFunctionalGroup inner = this.workingSets.get(steps--);
            if (Sets.difference(inner.getComponents(), outer.getComponents()).isEmpty()) {
                this.workingSets.remove(inner);
            }
        }
    }
}

From source file:com.palantir.atlasdb.cli.command.SweepCommand.java

@Override
public int execute(final AtlasDbServices services) {
    SweepTaskRunner sweepRunner = services.getSweepTaskRunner();

    if (!((namespace != null) ^ (table != null) ^ sweepAllTables)) {
        System.err.println("Specify one of --namespace, --table, or --all options.");
        return 1;
    }// w ww  . j  a  v  a 2  s . c  o m
    if ((namespace != null) && (row != null)) {
        System.err.println("Cannot specify a start row (" + row
                + ") when sweeping multiple tables (in namespace " + namespace + ")");
        return 1;
    }

    Map<TableReference, Optional<byte[]>> tableToStartRow = Maps.newHashMap();

    if ((table != null)) {
        Optional<byte[]> startRow = Optional.of(new byte[0]);
        if (row != null) {
            startRow = Optional.of(decodeStartRow(row));
        }
        tableToStartRow.put(TableReference.createUnsafe(table), startRow);
    } else if (namespace != null) {
        Set<TableReference> tablesInNamespace = services.getKeyValueService().getAllTableNames().stream()
                .filter(tableRef -> tableRef.getNamespace().getName().equals(namespace))
                .collect(Collectors.toSet());
        for (TableReference table : tablesInNamespace) {
            tableToStartRow.put(table, Optional.of(new byte[0]));
        }
    } else if (sweepAllTables) {
        tableToStartRow.putAll(Maps.asMap(Sets.difference(services.getKeyValueService().getAllTableNames(),
                AtlasDbConstants.hiddenTables), Functions.constant(Optional.of(new byte[0]))));
    }

    for (Map.Entry<TableReference, Optional<byte[]>> entry : tableToStartRow.entrySet()) {
        final TableReference table = entry.getKey();
        Optional<byte[]> startRow = entry.getValue();

        final AtomicLong cellsExamined = new AtomicLong();
        final AtomicLong cellsDeleted = new AtomicLong();

        while (startRow.isPresent()) {
            Stopwatch watch = Stopwatch.createStarted();
            SweepResults results = sweepRunner.run(table, batchSize, startRow.get());
            System.out.println(String.format(
                    "Swept from %s to %s in table %s in %d ms, examined %d unique cells, deleted %d cells.",
                    encodeStartRow(startRow), encodeEndRow(results.getNextStartRow()), table,
                    watch.elapsed(TimeUnit.MILLISECONDS), results.getCellsExamined(),
                    results.getCellsDeleted()));
            startRow = results.getNextStartRow();
            cellsDeleted.addAndGet(results.getCellsDeleted());
            cellsExamined.addAndGet(results.getCellsExamined());
            maybeSleep();
        }

        services.getTransactionManager().runTaskWithRetry((TxTask) t -> {
            SweepPriorityTable priorityTable = SweepTableFactory.of().getSweepPriorityTable(t);
            SweepPriorityTable.SweepPriorityRow row1 = SweepPriorityTable.SweepPriorityRow
                    .of(table.getQualifiedName());
            priorityTable.putWriteCount(row1, 0L);
            priorityTable.putCellsDeleted(row1, cellsDeleted.get());
            priorityTable.putCellsExamined(row1, cellsExamined.get());
            priorityTable.putLastSweepTime(row1, System.currentTimeMillis());

            System.out
                    .println(String.format("Finished sweeping %s, examined %d unique cells, deleted %d cells.",
                            table, cellsExamined.get(), cellsDeleted.get()));

            if (cellsDeleted.get() > 0) {
                Stopwatch watch = Stopwatch.createStarted();
                services.getKeyValueService().compactInternally(table);
                System.out.println(String.format("Finished performing compactInternally on %s in %d ms.", table,
                        watch.elapsed(TimeUnit.MILLISECONDS)));
            }
            return null;
        });
    }
    return 0;
}