Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:gov.nih.nci.caarray.web.plugins.OsgiConfigurationProvider.java

/**
 * Loads XML config as well as Convention config from a bundle. Limitation: Constants and Beans are ignored on XML
 * config//  ww  w  .  j ava 2  s.  com
 *
 * @param bundle bundle to load from
 */
@SuppressWarnings("PMD.ExcessiveMethodLength")
protected void loadConfigFromBundle(Bundle bundle) {
    final String bundleName = bundle.getSymbolicName();
    if (LOG.isDebugEnabled()) {
        LOG.debug("Loading packages from bundle " + bundleName);
    }

    // init action context
    ActionContext ctx = ActionContext.getContext();
    if (ctx == null) {
        ctx = new ActionContext(new HashMap());
        ActionContext.setContext(ctx);
    }

    try {
        // the Convention plugin will use BundleClassLoaderInterface from the ActionContext to find resources
        // and load classes
        ctx.put(ClassLoaderInterface.CLASS_LOADER_INTERFACE, new BundleClassLoaderInterface());
        ctx.put(DefaultBundleAccessor.CURRENT_BUNDLE_NAME, bundleName);

        LOG.debug("Loading XML config from bundle " + bundleName);

        // XML config
        final BundlePackageLoader loader = new BundlePackageLoader();
        for (final PackageConfig pkg : loader.loadPackages(bundle, this.objectFactory,
                this.configuration.getPackageConfigs())) {
            this.configuration.addPackageConfig(pkg.getName(), pkg);
            this.bundleAccessor.addPackageFromBundle(bundle, pkg.getName());
        }

        // Convention
        // get the existing packages before reloading the provider (se we can figure out what are the new packages)
        final Set<String> packagesBeforeLoading = new HashSet(this.configuration.getPackageConfigNames());

        final PackageProvider conventionPackageProvider = this.configuration.getContainer()
                .getInstance(PackageProvider.class, "convention.packageProvider");
        if (conventionPackageProvider != null) {
            LOG.debug("Loading Convention config from bundle " + bundleName);
            conventionPackageProvider.loadPackages();
        }

        final Set<String> packagesAfterLoading = new HashSet(this.configuration.getPackageConfigNames());
        packagesAfterLoading.removeAll(packagesBeforeLoading);
        if (!packagesAfterLoading.isEmpty()) {
            // add the new packages to the map of bundle -> package
            for (final String packageName : packagesAfterLoading) {
                this.bundleAccessor.addPackageFromBundle(bundle, packageName);
            }
        }

        if (this.configuration.getRuntimeConfiguration() != null) {
            // if there is a runtime config, it meas that this method was called froma bundle start event
            // instead of the initial load, in that case, reload the config
            this.configuration.rebuildRuntimeConfiguration();
        }
    } finally {
        ctx.put(DefaultBundleAccessor.CURRENT_BUNDLE_NAME, null);
        ctx.put(ClassLoaderInterface.CLASS_LOADER_INTERFACE, null);
    }
}

From source file:com.teradata.benchto.driver.loader.BenchmarkLoader.java

public List<Benchmark> loadBenchmarks(String sequenceId) {
    try {//from www. ja v  a2  s.  c  om
        List<Path> benchmarkFiles = findBenchmarkFiles();

        benchmarkFiles = benchmarkFiles.stream().filter(activeBenchmarks()).collect(toList());

        benchmarkFiles.stream().forEach(path -> LOGGER.info("Benchmark file to be read: {}", path));

        List<Benchmark> allBenchmarks = loadBenchmarks(sequenceId, benchmarkFiles);
        LOGGER.debug("All benchmarks: {}", allBenchmarks);

        List<Benchmark> includedBenchmarks = allBenchmarks.stream()
                .filter(new BenchmarkByActiveVariablesFilter(properties)).collect(toList());

        Set<Benchmark> excludedBenchmarks = newLinkedHashSet(allBenchmarks);
        excludedBenchmarks.removeAll(includedBenchmarks);

        String formatString = createFormatString(allBenchmarks);
        LOGGER.info("Excluded Benchmarks:");
        printFormattedBenchmarksInfo(formatString, excludedBenchmarks);

        fillUniqueBenchmarkNames(includedBenchmarks);

        List<Benchmark> freshBenchmarks = ImmutableList.of();
        if (properties.isFrequencyCheckEnabled()) {
            freshBenchmarks = filterFreshBenchmarks(includedBenchmarks);
            LOGGER.info("Recently tested benchmarks:");
            printFormattedBenchmarksInfo(formatString, freshBenchmarks);
        }

        LOGGER.info("Selected Benchmarks:");
        includedBenchmarks.removeAll(freshBenchmarks);
        printFormattedBenchmarksInfo(formatString, includedBenchmarks);

        checkState(allBenchmarks.size() == includedBenchmarks.size() + excludedBenchmarks.size()
                + freshBenchmarks.size());

        return includedBenchmarks;
    } catch (IOException e) {
        throw new BenchmarkExecutionException("Could not load benchmarks", e);
    }
}

From source file:com.yahoo.pulsar.broker.namespace.NamespaceServiceTest.java

@Test
public void testSplitAndOwnBundles() throws Exception {

    OwnershipCache MockOwnershipCache = spy(pulsar.getNamespaceService().getOwnershipCache());
    doNothing().when(MockOwnershipCache).disableOwnership(any(NamespaceBundle.class));
    Field ownership = NamespaceService.class.getDeclaredField("ownershipCache");
    ownership.setAccessible(true);/*from  w  w w. j a va2 s. c  o  m*/
    ownership.set(pulsar.getNamespaceService(), MockOwnershipCache);
    NamespaceService namespaceService = pulsar.getNamespaceService();
    NamespaceName nsname = new NamespaceName("pulsar/global/ns1");
    DestinationName dn = DestinationName.get("persistent://pulsar/global/ns1/topic-1");
    NamespaceBundles bundles = namespaceService.getNamespaceBundleFactory().getBundles(nsname);
    NamespaceBundle originalBundle = bundles.findBundle(dn);

    // Split bundle and take ownership of split bundles
    CompletableFuture<Void> result = namespaceService.splitAndOwnBundle(originalBundle);

    try {
        result.get();
    } catch (Exception e) {
        // make sure: no failure
        fail("split bundle faild", e);
    }
    NamespaceBundleFactory bundleFactory = this.pulsar.getNamespaceService().getNamespaceBundleFactory();
    NamespaceBundles updatedNsBundles = bundleFactory.getBundles(nsname);

    // new updated bundles shouldn't be null
    assertNotNull(updatedNsBundles);
    List<NamespaceBundle> bundleList = updatedNsBundles.getBundles();
    assertNotNull(bundles);

    NamespaceBundleFactory utilityFactory = NamespaceBundleFactory.createFactory(Hashing.crc32());

    // (1) validate bundleFactory-cache has newly split bundles and removed old parent bundle
    Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles = splitBundles(utilityFactory, nsname, bundles,
            originalBundle);
    assertNotNull(splitBundles);
    Set<NamespaceBundle> splitBundleSet = new HashSet<>(splitBundles.getRight());
    splitBundleSet.removeAll(bundleList);
    assertTrue(splitBundleSet.isEmpty());

    // (2) validate LocalZookeeper policies updated with newly created split
    // bundles
    String path = joinPath(LOCAL_POLICIES_ROOT, nsname.toString());
    byte[] content = this.pulsar.getLocalZkCache().getZooKeeper().getData(path, null, new Stat());
    Policies policies = ObjectMapperFactory.getThreadLocal().readValue(content, Policies.class);
    NamespaceBundles localZkBundles = bundleFactory.getBundles(nsname, policies.bundles);
    assertTrue(updatedNsBundles.equals(localZkBundles));
    System.out.println(policies);

    // (3) validate ownership of new split bundles by local owner
    bundleList.stream().forEach(b -> {
        try {
            byte[] data = this.pulsar.getLocalZkCache().getZooKeeper().getData(ServiceUnitZkUtils.path(b), null,
                    new Stat());
            NamespaceEphemeralData node = ObjectMapperFactory.getThreadLocal().readValue(data,
                    NamespaceEphemeralData.class);
            Assert.assertEquals(node.getNativeUrl(), this.pulsar.getBrokerServiceUrl());
        } catch (Exception e) {
            fail("failed to setup ownership", e);
        }
    });

}

From source file:com.github.rinde.rinsim.central.RandomSolver.java

@Override
public ImmutableList<ImmutableList<Parcel>> solve(GlobalStateObject state) {
    checkArgument(!state.getVehicles().isEmpty(), "Need at least one vehicle.");
    final LinkedListMultimap<VehicleStateObject, Parcel> map = LinkedListMultimap.create();

    final Set<Parcel> available = newLinkedHashSet(state.getAvailableParcels());
    final Set<Parcel> destinations = newLinkedHashSet();
    for (final VehicleStateObject vso : state.getVehicles()) {
        destinations.addAll(vso.getDestination().asSet());
    }/*from www .  ja va  2 s  .c o  m*/
    available.removeAll(destinations);

    // do random assignment of available parcels
    for (final Parcel p : available) {
        final int index = randomGenerator.nextInt(state.getVehicles().size());
        map.put(state.getVehicles().get(index), p);
        map.put(state.getVehicles().get(index), p);
    }

    final ImmutableList.Builder<ImmutableList<Parcel>> builder = ImmutableList.builder();
    // insert contents, shuffle ordering, insert destination if applicable
    for (final VehicleStateObject vso : state.getVehicles()) {
        final List<Parcel> assigned = newArrayList(map.get(vso));
        final List<Parcel> conts = newArrayList(vso.getContents());
        conts.removeAll(vso.getDestination().asSet());
        assigned.addAll(conts);
        if (vso.getDestination().isPresent()
                && state.getAvailableParcels().contains(vso.getDestination().get())) {
            assigned.add(vso.getDestination().get());
        }
        Collections.shuffle(assigned, new RandomAdaptor(randomGenerator));
        if (vso.getDestination().isPresent()) {
            assigned.add(0, vso.getDestination().get());
        }
        builder.add(ImmutableList.copyOf(assigned));
    }
    return builder.build();
}

From source file:fungus.JungGraphObserver.java

public boolean execute() {
    if (CDState.getCycle() % period != 0)
        return false;

    MycoCast mycocast = (MycoCast) Network.get(0).getProtocol(mycocastPid);

    int bio = mycocast.countBiomass();
    int ext = mycocast.countExtending();
    int bra = mycocast.countBranching();
    int imm = mycocast.countImmobile();

    // Update vertices
    Set<MycoNode> activeNodes = new HashSet<MycoNode>();
    for (int i = 0; i < Network.size(); i++) {
        MycoNode n = (MycoNode) Network.get(i);
        activeNodes.add(n);//from w ww.j  a va  2s  .com
        HyphaData data = n.getHyphaData();
        //if (data.isBiomass()) { continue; }
        if (graph.containsVertex(n)) {
            graph.removeVertex(n);
        }
        if (!graph.containsVertex(n)) {
            graph.addVertex(n);
        }
    }
    Set<MycoNode> jungNodes = new HashSet<MycoNode>(graph.getVertices());
    jungNodes.removeAll(activeNodes);

    for (MycoNode n : jungNodes) {
        graph.removeVertex(n);
    }

    // Update edges
    for (int i = 0; i < Network.size(); i++) {
        MycoNode n = (MycoNode) Network.get(i);
        HyphaData data = n.getHyphaData();
        HyphaLink link = n.getHyphaLink();

        synchronized (graph) {

            // We now add in all links and tune out display in Visualizer
            java.util.List<MycoNode> neighbors = (java.util.List<MycoNode>) link.getNeighbors();

            //// Adding only links to hypha thins out links to biomass
            //    (java.util.List<MycoNode>) link.getHyphae();

            Collection<MycoNode> jungNeighbors = graph.getNeighbors(n);

            // Remove edges from Jung graph that are not in peersim graph
            for (MycoNode o : jungNeighbors) {
                if (!neighbors.contains(o)) {
                    MycoEdge edge = graph.findEdge(n, o);
                    while (edge != null) {
                        graph.removeEdge(edge);
                        edge = graph.findEdge(n, o);
                    }
                }
            }

            // Add missing edges to Jung graph that are in peersim graph
            for (MycoNode o : neighbors) {
                if (graph.findEdge(n, o) == null) {
                    MycoEdge edge = new MycoEdge();
                    graph.addEdge(edge, n, o, EdgeType.DIRECTED);
                }
            }
        }

        //log.finest("VERTICES: " + graph.getVertices());
        //log.finest("EDGES: " + graph.getEdges());
    }

    for (ChangeListener cl : changeListeners) {
        cl.stateChanged(new ChangeEvent(graph));
    }
    if (walking) {
        try {
            Thread.sleep(walkDelay);
        } catch (InterruptedException e) {
        }
        stepBlocked = false;
    }

    try {
        while (stepBlocked && !noBlock) {
            synchronized (JungGraphObserver.class) {
                JungGraphObserver.class.wait();
            }
        }
    } catch (InterruptedException e) {
        stepBlocked = true;
    }
    stepBlocked = true;
    //System.out.println(graph.toString());
    return false;
}

From source file:modelinspector.collectors.MostFrequentWordsCollector.java

@Override
public Set<String> getResult() {
    //        System.out.println(getName());
    //        System.out.println(result.size() + " - " + result);

    Set<String> missing = new HashSet<String>();

    // Collect the top-X words first
    for (Entry<String, Integer> e : wordList.entrySet()) {
        if (e.getValue() <= cutoff) {
            missing.add(e.getKey());/*from   w  w  w .ja  v a 2  s .  c o m*/
        }
    }

    // Remove all the observed ones
    missing.removeAll(result);

    return missing;
    //        return result;
}

From source file:ru.anr.base.BaseParent.java

/**
 * Performs an expectation cycle during the specified number of seconds and
 * checks the condition on each iteration.
 * /*w  ww  . j  a  va  2 s  .  c  o m*/
 * @param secs
 *            The number of seconds
 * @param sleepTime
 *            Sleep time in milliseconds
 * @param logProgress
 *            true, if it is required to log the progress
 * @param callback
 *            The callback
 * @param args
 *            The arguments
 * @return true, if the the number of attempts has been exceeded
 */
public static boolean waitCondition(int secs, int sleepTime, boolean logProgress, SleepCallback callback,
        Object... args) {

    int counter = 0;
    Set<Integer> s = new HashSet<>(PERCENTS);

    while (!callback.doAction(args)) {

        int tick = (100 * counter / (secs * 1000));
        List<Integer> r = filter(s, i -> i < tick);

        if (!r.isEmpty()) {
            if (logProgress) {
                log("Wait Progress: {} %", r.get(0));
            }
            s.removeAll(r);
        }
        counter += sleepTime;

        if (counter > (secs * 1000)) {
            break;
        }
        sleep(sleepTime);
    }
    return counter > (secs * 1000);
}

From source file:com.espertech.esper.dataflow.ops.BeaconSource.java

public DataFlowOpInitializeResult initialize(DataFlowOpInitializateContext context) throws Exception {
    initialDelayMSec = (long) (initialDelay * 1000);
    periodDelayMSec = (long) (interval * 1000);

    if (context.getOutputPorts().size() != 1) {
        throw new IllegalArgumentException("BeaconSource operator requires one output stream but produces "
                + context.getOutputPorts().size() + " streams");
    }/*from  w  w w  .  ja  va  2  s. c om*/

    // Check if a type is declared
    DataFlowOpOutputPort port = context.getOutputPorts().get(0);
    if (port.getOptionalDeclaredType() != null && port.getOptionalDeclaredType().getEventType() != null) {
        EventType outputEventType = port.getOptionalDeclaredType().getEventType();
        produceEventBean = port.getOptionalDeclaredType() != null
                && !port.getOptionalDeclaredType().isUnderlying();

        // compile properties to populate
        Set<String> props = allProperties.keySet();
        props.removeAll(PARAMETER_PROPERTIES);
        WriteablePropertyDescriptor[] writables = setupProperties(props.toArray(new String[props.size()]),
                outputEventType, context.getStatementContext());
        manufacturer = context.getServicesContext().getEventAdapterService().getManufacturer(outputEventType,
                writables, context.getServicesContext().getEngineImportService(), false);

        int index = 0;
        evaluators = new ExprEvaluator[writables.length];
        for (WriteablePropertyDescriptor writeable : writables) {

            final Object providedProperty = allProperties.get(writeable.getPropertyName());
            if (providedProperty instanceof ExprNode) {
                ExprNode exprNode = (ExprNode) providedProperty;
                ExprNode validated = ExprNodeUtility.validateSimpleGetSubtree(exprNode,
                        context.getStatementContext(), null);
                final ExprEvaluator exprEvaluator = validated.getExprEvaluator();
                final TypeWidener widener = TypeWidenerFactory.getCheckPropertyAssignType(
                        validated.toExpressionString(), exprEvaluator.getType(), writeable.getType(),
                        writeable.getPropertyName());
                if (widener != null) {
                    evaluators[index] = new ExprEvaluator() {
                        public Object evaluate(EventBean[] eventsPerStream, boolean isNewData,
                                ExprEvaluatorContext context) {
                            Object value = exprEvaluator.evaluate(eventsPerStream, isNewData, context);
                            return widener.widen(value);
                        }

                        public Class getType() {
                            return null;
                        }
                    };
                } else {
                    evaluators[index] = exprEvaluator;
                }
            } else if (providedProperty == null) {
                evaluators[index] = new ExprEvaluator() {
                    public Object evaluate(EventBean[] eventsPerStream, boolean isNewData,
                            ExprEvaluatorContext context) {
                        return null;
                    }

                    public Class getType() {
                        return null;
                    }
                };
            } else {
                evaluators[index] = new ExprEvaluator() {
                    public Object evaluate(EventBean[] eventsPerStream, boolean isNewData,
                            ExprEvaluatorContext context) {
                        return providedProperty;
                    }

                    public Class getType() {
                        return providedProperty.getClass();
                    }
                };
            }
            index++;
        }

        return null; // no changing types
    }

    // No type has been declared, we can create one
    String anonymousTypeName = context.getDataflowName() + "-beacon";
    Map<String, Object> types = new LinkedHashMap<String, Object>();
    Set<String> props = allProperties.keySet();
    props.removeAll(PARAMETER_PROPERTIES);

    int count = 0;
    evaluators = new ExprEvaluator[props.size()];
    for (String propertyName : props) {
        ExprNode exprNode = (ExprNode) allProperties.get(propertyName);
        ExprNode validated = ExprNodeUtility.validateSimpleGetSubtree(exprNode, context.getStatementContext(),
                null);
        final Object value = validated.getExprEvaluator().evaluate(null, true,
                context.getAgentInstanceContext());
        if (value == null) {
            types.put(propertyName, null);
        } else {
            types.put(propertyName, value.getClass());
        }
        evaluators[count] = new ExprEvaluator() {
            public Object evaluate(EventBean[] eventsPerStream, boolean isNewData,
                    ExprEvaluatorContext context) {
                return value;
            }

            public Class getType() {
                return null;
            }
        };
        count++;
    }

    EventType type = context.getServicesContext().getEventAdapterService()
            .createAnonymousObjectArrayType(anonymousTypeName, types);
    return new DataFlowOpInitializeResult(new GraphTypeDesc[] { new GraphTypeDesc(false, true, type) });
}

From source file:com.google.i18n.addressinput.common.FormatInterpreter.java

private void applyFieldOrderOverrides(String regionCode, List<AddressField> fieldOrder) {
    List<AddressField> customFieldOrder = formOptions.getCustomFieldOrder(regionCode);
    if (customFieldOrder == null) {
        return;//w  w w. jav a  2s . c om
    }

    // We can assert that fieldOrder and customFieldOrder contain no duplicates.
    // We know this by the construction above and in FormOptions but we still have to think
    // about fields in the custom ordering which aren't visible (the loop below will fail if
    // a non-visible field appears in the custom ordering). However in that case it's safe to
    // just ignore the extraneous field.
    Set<AddressField> nonVisibleCustomFields = EnumSet.copyOf(customFieldOrder);
    nonVisibleCustomFields.removeAll(fieldOrder);
    if (nonVisibleCustomFields.size() > 0) {
        // Local mutable copy to remove non visible fields - this shouldn't happen often.
        customFieldOrder = new ArrayList<AddressField>(customFieldOrder);
        customFieldOrder.removeAll(nonVisibleCustomFields);
    }
    // It is vital for this loop to work correctly that every element in customFieldOrder
    // appears in fieldOrder exactly once.
    for (int fieldIdx = 0, customIdx = 0; fieldIdx < fieldOrder.size(); fieldIdx++) {
        if (customFieldOrder.contains(fieldOrder.get(fieldIdx))) {
            fieldOrder.set(fieldIdx, customFieldOrder.get(customIdx++));
        }
    }
}

From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkPseudoLikelihoodFromGTT.java

public static Map<String, double[]> computeTripleFrequenciesFromSingleGT(Tree gt,
        Map<String, String> allele2species) {
    Set<String> allAlleles = new HashSet<>();
    for (String allele : gt.getLeaves()) {
        allAlleles.add(allele);//from   www  . j a  va  2 s  . c  o m
    }

    Map<TNode, Set<String>> node2leaves = new HashMap<>();
    Map<String, double[]> triple2counts = new HashMap<>();
    for (TNode node : gt.postTraverse()) {
        Set<String> leavesUnder = new HashSet<>();
        node2leaves.put(node, leavesUnder);
        if (node.isLeaf()) {
            leavesUnder.add(node.getName());
        } else {
            List<Set<String>> childLeavesList = new ArrayList<>();
            for (TNode child : node.getChildren()) {
                Set<String> childLeaves = node2leaves.get(child);
                leavesUnder.addAll(childLeaves);
                childLeavesList.add(childLeaves);
            }

            allAlleles.removeAll(leavesUnder);

            for (int i = 0; i < childLeavesList.size(); i++) {
                Set<String> childLeaves1 = childLeavesList.get(i);
                for (int j = i + 1; j < childLeavesList.size(); j++) {
                    Set<String> childLeaves2 = childLeavesList.get(j);
                    for (String allele1 : childLeaves1) {
                        String species1 = allele2species.get(allele1);
                        for (String allele2 : childLeaves2) {
                            String species2 = allele2species.get(allele2);
                            if (!species1.equals(species2)) {
                                for (String allele3 : allAlleles) {
                                    String species3 = allele2species.get(allele3);
                                    if (!species1.equals(species3) && !species2.equals(species3)) {
                                        addHighestFrequency(species1, species2, species3, triple2counts);
                                    }
                                }
                            }
                        }
                    }
                    //non-binary node
                    for (int k = j + 1; k < childLeavesList.size(); k++) {
                        Set<String> childLeaves3 = childLeavesList.get(k);
                        for (String allele1 : childLeaves1) {
                            String species1 = allele2species.get(allele1);
                            for (String allele2 : childLeaves2) {
                                String species2 = allele2species.get(allele2);
                                if (!species1.equals(species2)) {
                                    for (String allele3 : childLeaves3) {
                                        String species3 = allele2species.get(allele3);
                                        if (!species1.equals(species3) && !species2.equals(species3)) {
                                            addEqualFrequency(species1, species2, species3, triple2counts);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

            }

            allAlleles.addAll(leavesUnder);
        }

    }
    return triple2counts;
}