Example usage for com.google.common.collect Sets difference

List of usage examples for com.google.common.collect Sets difference

Introduction

In this page you can find the example usage for com.google.common.collect Sets difference.

Prototype

public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2) 

Source Link

Document

Returns an unmodifiable view of the difference of two sets.

Usage

From source file:com.wrmsr.wava.basic.BasicLoopInfo.java

public static Map<Name, Name> getLoopParents(SetMultimap<Name, Name> loopContents) {
    Map<Name, Name> loopParents = new HashMap<>();
    Map<Name, Set<Name>> map = loopContents.keySet().stream()
            .collect(toHashMap(identity(), loop -> new HashSet<>()));
    for (Name cur : loopContents.keySet()) {
        map.get(cur).add(ENTRY_NAME);//w  w  w.  j a v a 2s  .  c om
        Set<Name> children = loopContents.get(cur);
        for (Name child : children) {
            if (!cur.equals(child) && loopContents.containsKey(child)) {
                map.get(child).add(cur);
            }
        }
    }
    Map<Name, Integer> loopDepths = map.entrySet().stream()
            .collect(toHashMap(entry -> entry.getKey(), entry -> entry.getValue().size()));
    loopDepths.put(ENTRY_NAME, 0);
    int maxDepth = loopDepths.values().stream().mapToInt(Integer::intValue).max().orElse(0);
    List<List<Name>> depthLoopsLists = IntStream.range(0, maxDepth + 1).boxed()
            .<List<Name>>map(i -> new ArrayList<>()).collect(toArrayList());
    loopDepths.forEach((loop, depth) -> depthLoopsLists.get(depth).add(loop));
    Set<Name> seen = new HashSet<>();
    for (int depth = 1; depth < depthLoopsLists.size(); ++depth) {
        for (Name loop : depthLoopsLists.get(depth)) {
            Name parent = getOnlyElement(Sets.difference(map.get(loop), seen));
            checkState(loopDepths.get(parent) == depth - 1);
            loopParents.put(loop, parent);
        }
        seen.addAll(depthLoopsLists.get(depth - 1));
    }
    checkState(loopContents.keySet().equals(loopParents.keySet()));
    return loopParents;
}

From source file:org.eclipse.sw360.portal.tags.DisplayProjectChanges.java

private void renderLinkedProjects(StringBuilder display, User user) {
    if (ensureSomethingTodoAndNoNullLinkedProjects()) {

        Set<String> changedProjectIds = Sets.intersection(additions.getLinkedProjects().keySet(),
                deletions.getLinkedProjects().keySet());
        Set<String> linkedProjectsInDb = nullToEmptyMap(actual.getLinkedProjects()).keySet();
        //keep only projects that are still in the database
        changedProjectIds = Sets.intersection(changedProjectIds, linkedProjectsInDb);

        Set<String> removedProjectIds = Sets.difference(deletions.getLinkedProjects().keySet(),
                changedProjectIds);/*from  ww w.  j a v a2s .c  o  m*/
        removedProjectIds = Sets.intersection(removedProjectIds, linkedProjectsInDb);

        Set<String> addedProjectIds = Sets.difference(additions.getLinkedProjects().keySet(),
                changedProjectIds);

        renderProjectLinkList(display, deletions.getLinkedProjects(), removedProjectIds,
                "Removed Project Links", user);
        renderProjectLinkList(display, additions.getLinkedProjects(), addedProjectIds, "Added Project Links",
                user);
        renderProjectLinkListCompare(display, actual.getLinkedProjects(), deletions.getLinkedProjects(),
                additions.getLinkedProjects(), changedProjectIds, user);
    }
}

From source file:com.zulily.omicron.scheduling.JobManager.java

/**
 * Updates the scheduled tasks and alert manager with any changes from the config or crontab
 *
 * @param configuration The more current global configuration instance
 * @param crontab       The more current crontab
 */// w  w w  .j  a  v  a  2  s .c  o m
public void updateConfiguration(final Configuration configuration, final Crontab crontab) {
    checkNotNull(configuration, "configuration");
    checkNotNull(crontab, "crontab");
    checkNotNull(alertManager, "alertManager");

    this.alertManager.updateConfiguration(configuration);

    final HashSet<Job> result = Sets.newHashSet();

    final HashSet<Job> jobUpdates = Sets.newHashSet();

    for (final CrontabExpression crontabExpression : crontab.getCrontabExpressions()) {

        // If there are overrides in the crontab for this expression, get them and apply them
        final Configuration configurationOverride = crontab.getConfigurationOverrides()
                .get(crontabExpression.getLineNumber());

        final Job job = new Job(crontabExpression,
                substituteVariables(crontabExpression.getCommand(), crontab.getVariables()),
                configurationOverride == null ? configuration : configurationOverride);

        jobUpdates.add(job);
    }

    // This is a view containing old scheduled tasks that have been removed or
    // reconfigured
    final Sets.SetView<Job> oldJobs = Sets.difference(jobSet, jobUpdates);

    info("CRON UPDATE: {0} tasks no longer scheduled or out of date", String.valueOf(oldJobs.size()));

    // This is a view of scheduled tasks that will not be updated by the cron reload
    final Sets.SetView<Job> existingJobs = Sets.intersection(jobSet, jobUpdates);

    info("CRON UPDATE: {0} tasks unchanged", String.valueOf(existingJobs.size()));

    // This is a view of scheduled tasks that are new or have been changed
    final Sets.SetView<Job> newJobs = Sets.difference(jobUpdates, jobSet);

    info("CRON UPDATE: {0} tasks are new or updated", String.valueOf(newJobs.size()));

    // Add all new tasks
    // keep references to old tasks that are still running
    // and transfer instances that haven't changed
    result.addAll(newJobs);

    for (final Job job : jobSet) {

        if (oldJobs.contains(job) && job.isRunning()) {

            job.setActive(false);
            result.add(job);

            retiredJobs.add(job);
        }

        if (existingJobs.contains(job)) {

            if (!job.isActive()) {
                // Did someone re-add a task that was running and then removed?
                // For whatever reason, it's now set to run again so just re-activate the instance
                info("CRON UPDATE: Reactivating {0}", job.toString());
                job.setActive(true);
            }

            result.add(job);
        }
    }

    this.jobSet = result;
}

From source file:com.thoughtworks.go.server.service.AdminsConfigService.java

private BulkUpdateAdminsResult validateUsersAndRolesForBulkUpdate(List<String> usersToRemove,
        List<String> rolesToRemove, Set<Admin> existingAdmins) {
    Set<CaseInsensitiveString> existingAdminNames = existingAdmins.stream().map(Admin::getName)
            .collect(Collectors.toSet());
    Sets.SetView<CaseInsensitiveString> invalidUsersToRemove = Sets.difference(caseInsensitive(usersToRemove),
            existingAdminNames);//w ww .  j a va  2  s . c om
    Sets.SetView<CaseInsensitiveString> invalidRolesToRemove = Sets.difference(caseInsensitive(rolesToRemove),
            existingAdminNames);
    BulkUpdateAdminsResult result = new BulkUpdateAdminsResult();
    if (invalidUsersToRemove.size() > 0) {
        result.setNonExistentUsers(invalidUsersToRemove);
        result.unprocessableEntity(
                "Update failed because some users or roles do not exist under super admins.");
    }
    if (invalidRolesToRemove.size() > 0) {
        result.setNonExistentRoles(invalidRolesToRemove);
        result.unprocessableEntity(
                "Update failed because some users or roles do not exist under super admins.");
    }
    return result;
}

From source file:org.apache.beam.runners.flink.FlinkJobInvocation.java

private <T extends FlinkPortablePipelineTranslator.TranslationContext> PipelineResult runPipelineWithTranslator(
        FlinkPortablePipelineTranslator<T> translator) throws Exception {
    LOG.info("Translating pipeline to Flink program.");

    // Don't let the fuser fuse any subcomponents of native transforms.
    // TODO(BEAM-6327): Remove the need for this.
    RunnerApi.Pipeline trimmedPipeline = makeKnownUrnsPrimitives(pipeline, Sets.difference(
            translator.knownUrns(), ImmutableSet.of(PTransformTranslation.ASSIGN_WINDOWS_TRANSFORM_URN)));

    // Fused pipeline proto.
    RunnerApi.Pipeline fusedPipeline = GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
    JobInfo jobInfo = JobInfo.create(id, pipelineOptions.getJobName(), retrievalToken,
            PipelineOptionsTranslation.toProto(pipelineOptions));

    FlinkPortablePipelineTranslator.Executor executor = translator.translate(
            translator.createTranslationContext(jobInfo, pipelineOptions, confDir, filesToStage),
            fusedPipeline);/*www  .  jav a  2  s  .  c  o m*/
    final JobExecutionResult result = executor.execute(pipelineOptions.getJobName());

    return FlinkRunner.createPipelineResult(result, pipelineOptions);
}

From source file:org.sonar.server.computation.filemove.FileMoveDetectionStep.java

@Override
public void execute() {
    // do nothing if no files in db (first analysis)
    Snapshot baseProjectSnapshot = analysisMetadataHolder.getBaseProjectSnapshot();
    if (baseProjectSnapshot == null) {
        LOG.debug("First analysis. Do nothing.");
        return;// www. j a  v a 2s.c  om
    }

    Map<String, DbComponent> dbFilesByKey = getDbFilesByKey(baseProjectSnapshot);
    if (dbFilesByKey.isEmpty()) {
        LOG.debug("Previous snapshot has no file. Do nothing.");
        return;
    }

    Map<String, Component> reportFilesByKey = getReportFilesByKey(this.rootHolder.getRoot());
    if (reportFilesByKey.isEmpty()) {
        LOG.debug("No files in report. Do nothing.");
        return;
    }

    Set<String> addedFileKeys = ImmutableSet
            .copyOf(Sets.difference(reportFilesByKey.keySet(), dbFilesByKey.keySet()));
    Set<String> removedFileKeys = ImmutableSet
            .copyOf(Sets.difference(dbFilesByKey.keySet(), reportFilesByKey.keySet()));

    // can find matches if at least one of the added or removed files groups is empty => abort
    if (addedFileKeys.isEmpty() || removedFileKeys.isEmpty()) {
        LOG.debug("Either no files added or no files removed. Do nothing.");
        return;
    }

    // retrieve file data from report
    Map<String, File> reportFileSourcesByKey = getReportFileSourcesByKey(reportFilesByKey, addedFileKeys);

    // compute score matrix
    ScoreMatrix scoreMatrix = computeScoreMatrix(dbFilesByKey, removedFileKeys, reportFileSourcesByKey);
    printIfDebug(scoreMatrix);

    // not a single match with score higher than MIN_REQUIRED_SCORE => abort
    if (scoreMatrix.getMaxScore() < MIN_REQUIRED_SCORE) {
        LOG.debug("max score in matrix is less than min required score (%s). Do nothing.", MIN_REQUIRED_SCORE);
        return;
    }

    MatchesByScore matchesByScore = MatchesByScore.create(scoreMatrix);

    ElectedMatches electedMatches = electMatches(removedFileKeys, reportFileSourcesByKey, matchesByScore);

    registerMatches(dbFilesByKey, reportFilesByKey, electedMatches);
}

From source file:org.apache.beam.runners.dataflow.worker.graph.InsertFetchAndFilterStreamingSideInputNodes.java

public MutableNetwork<Node, Edge> forNetwork(MutableNetwork<Node, Edge> network) {
    if (pipeline == null) {
        return network;
    }/*from  ww  w .j  a  v  a 2s.  c o m*/
    RehydratedComponents rehydratedComponents = RehydratedComponents.forComponents(pipeline.getComponents());

    for (ParallelInstructionNode node : ImmutableList
            .copyOf(Iterables.filter(network.nodes(), ParallelInstructionNode.class))) {
        // If this isn't a ParDo or doesn't execute in the SDK harness then we don't have
        // to worry about it.
        if (node.getParallelInstruction().getParDo() == null
                || !ExecutionLocation.SDK_HARNESS.equals(node.getExecutionLocation())) {
            continue;
        }

        ParDoInstruction parDoInstruction = node.getParallelInstruction().getParDo();
        CloudObject userFnSpec = CloudObject.fromSpec(parDoInstruction.getUserFn());
        String parDoPTransformId = getString(userFnSpec, PropertyNames.SERIALIZED_FN);

        // Skip ParDoInstruction nodes that contain payloads without side inputs.
        String userFnClassName = userFnSpec.getClassName();
        if ("CombineValuesFn".equals(userFnClassName) || "KeyedCombineFn".equals(userFnClassName)) {
            continue; // These nodes have CombinePayloads which have no side inputs.
        }

        RunnerApi.PTransform parDoPTransform = pipeline.getComponents()
                .getTransformsOrDefault(parDoPTransformId, null);

        // TODO: only the non-null branch should exist; for migration ease only
        if (parDoPTransform == null) {
            continue;
        }

        RunnerApi.ParDoPayload parDoPayload;
        try {
            parDoPayload = RunnerApi.ParDoPayload.parseFrom(parDoPTransform.getSpec().getPayload());
        } catch (InvalidProtocolBufferException exc) {
            throw new RuntimeException("ParDo did not have a ParDoPayload", exc);
        }

        // Skip any ParDo that doesn't have a side input.
        if (parDoPayload.getSideInputsMap().isEmpty()) {
            continue;
        }

        String mainInputPCollectionLocalName = Iterables.getOnlyElement(Sets
                .difference(parDoPTransform.getInputsMap().keySet(), parDoPayload.getSideInputsMap().keySet()));

        RunnerApi.WindowingStrategy windowingStrategyProto = pipeline.getComponents()
                .getWindowingStrategiesOrThrow(pipeline.getComponents()
                        .getPcollectionsOrThrow(parDoPTransform.getInputsOrThrow(mainInputPCollectionLocalName))
                        .getWindowingStrategyId());

        WindowingStrategy windowingStrategy;
        try {
            windowingStrategy = WindowingStrategyTranslation.fromProto(windowingStrategyProto,
                    rehydratedComponents);
        } catch (InvalidProtocolBufferException e) {
            throw new IllegalStateException(
                    String.format("Unable to decode windowing strategy %s.", windowingStrategyProto), e);
        }

        // Gather all the side input window mapping fns which we need to request the SDK to map
        ImmutableMap.Builder<PCollectionView<?>, RunnerApi.SdkFunctionSpec> pCollectionViewsToWindowMapingsFns = ImmutableMap
                .builder();
        parDoPayload.getSideInputsMap().forEach((sideInputTag,
                sideInput) -> pCollectionViewsToWindowMapingsFns.put(RegisterNodeFunction
                        .transformSideInputForRunner(pipeline, parDoPTransform, sideInputTag, sideInput),
                        sideInput.getWindowMappingFn()));
        Node streamingSideInputWindowHandlerNode = FetchAndFilterStreamingSideInputsNode.create(
                windowingStrategy, pCollectionViewsToWindowMapingsFns.build(),
                NameContext.create(null, node.getParallelInstruction().getOriginalName(),
                        node.getParallelInstruction().getSystemName(),
                        node.getParallelInstruction().getName()));

        // Rewire the graph such that streaming side inputs ParDos are preceded by a
        // node which filters any side inputs that aren't ready and fetches any ready side inputs.
        Edge mainInput = Iterables.getOnlyElement(network.inEdges(node));
        InstructionOutputNode predecessor = (InstructionOutputNode) network.incidentNodes(mainInput).source();
        InstructionOutputNode predecessorCopy = InstructionOutputNode.create(predecessor.getInstructionOutput(),
                predecessor.getPcollectionId());
        network.removeEdge(mainInput);
        network.addNode(streamingSideInputWindowHandlerNode);
        network.addNode(predecessorCopy);
        network.addEdge(predecessor, streamingSideInputWindowHandlerNode, mainInput.clone());
        network.addEdge(streamingSideInputWindowHandlerNode, predecessorCopy, mainInput.clone());
        network.addEdge(predecessorCopy, node, mainInput.clone());
    }
    return network;
}

From source file:io.bazel.rules.closure.worker.ErrorReporter.java

private void finish() {
    if (suppress.isPresent()) {
        Set<String> superfluous = Sets.difference(suppress.get(), Sets.union(suppressed, NEVER_SUPERFLUOUS));
        if (!superfluous.isEmpty()) {
            report(SUPERFLUOUS_SUPPRESS_ERROR, "Superfluous suppress codes: " + joinWords(superfluous));
        }//from ww  w  .j a va 2 s  .c  o  m
    }
}

From source file:org.onosproject.net.flow.impl.FlowRuleDriverProvider.java

@Override
public void executeBatch(FlowRuleBatchOperation batch) {
    ImmutableList.Builder<FlowRule> toAdd = ImmutableList.builder();
    ImmutableList.Builder<FlowRule> toRemove = ImmutableList.builder();
    for (FlowRuleBatchEntry fbe : batch.getOperations()) {
        if (fbe.operator() == ADD || fbe.operator() == MODIFY) {
            toAdd.add(fbe.target());//from www  .jav a  2 s.  c  o  m
        } else if (fbe.operator() == REMOVE) {
            toRemove.add(fbe.target());
        }
    }

    ImmutableList<FlowRule> rulesToAdd = toAdd.build();
    ImmutableList<FlowRule> rulesToRemove = toRemove.build();

    Collection<FlowRule> added = applyFlowRules(batch.deviceId(), rulesToAdd);
    Collection<FlowRule> removed = removeFlowRules(batch.deviceId(), rulesToRemove);

    Set<FlowRule> failedRules = Sets.union(Sets.difference(copyOf(rulesToAdd), copyOf(added)),
            Sets.difference(copyOf(rulesToRemove), copyOf(removed)));
    CompletedBatchOperation status = new CompletedBatchOperation(failedRules.isEmpty(), failedRules,
            batch.deviceId());
    providerService.batchOperationCompleted(batch.id(), status);
}

From source file:org.jclouds.scriptbuilder.ScriptBuilder.java

@VisibleForTesting
public static Map<String, String> resolveFunctionDependenciesForStatements(Map<String, String> knownFunctions,
        Iterable<Statement> statements, final OsFamily osFamily) {
    Builder<String, String> builder = ImmutableMap.builder();
    builder.putAll(knownFunctions);/*from   w w w.jav  a 2s  .  c  om*/
    Set<String> dependentFunctions = ImmutableSet.copyOf(
            Iterables.concat(Iterables.transform(statements, new Function<Statement, Iterable<String>>() {
                @Override
                public Iterable<String> apply(Statement from) {
                    return from.functionDependencies(osFamily);
                }
            })));
    for (String unresolved : Sets.difference(dependentFunctions, knownFunctions.keySet()))
        builder.put(unresolved, Utils.writeFunctionFromResource(unresolved, osFamily));
    return builder.build();
}