Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanWithInputCardinalityOneRule.java

/**
 * Whether the cardinality of the input free variables are one.
 * //ww  w. j av a 2s .c  o  m
 * @param opRef
 *            the operator to be checked (including its input operators)
 * @param freeVars
 *            variables to be checked for produced operators
 * @return true if every input variable has cardinality one; false otherwise.
 * @throws AlgebricksException
 */
private boolean isCardinalityOne(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> freeVars)
        throws AlgebricksException {
    Set<LogicalVariable> varsWithCardinalityOne = new ListSet<LogicalVariable>();
    Set<LogicalVariable> varsLiveAtUnnestAndJoin = new ListSet<LogicalVariable>();
    isCardinalityOne(opRef, freeVars, varsWithCardinalityOne, varsLiveAtUnnestAndJoin);
    varsWithCardinalityOne.removeAll(varsLiveAtUnnestAndJoin);
    return varsWithCardinalityOne.equals(freeVars);
}

From source file:com.googlecode.icegem.cacheutils.updater.UpdateTool.java

private void filterRegions(Set<Region<?, ?>> regionsSet) {
    Set<Region<?, ?>> childRegionsSet = new HashSet<Region<?, ?>>();
    for (Region<?, ?> region : regionsSet) {
        if (region.getParentRegion() != null) {
            childRegionsSet.add(region);
        }/*from ww  w .j  ava 2  s .  c  o  m*/
    }

    regionsSet.removeAll(childRegionsSet);
}

From source file:com.autentia.wuija.widget.SelectMultipleLists.java

public void restrictValues(Collection<T> values) {
    final Set<T> allowedValues = new HashSet<T>(values);

    for (SelectManyListbox<T> list : selectionLists) {
        allowedValues.removeAll(list.getItems());
        list.retainAll(values);/*from   w  ww .j  a v a2s .  co  m*/
    }

    getAllowedValuesList().clear();
    addAllToAllowed(allowedValues);
}

From source file:bdi4jade.core.Intention.java

/**
 * Dispatches a new plan to try to achieve the intention goal. It looks for
 * plans that can achieve the goal that were not already tried and then
 * starts the plan. If all possible plans were already executed, the
 * intention is set to unachievable./*w  w w .j a  va  2 s  . c  o m*/
 */
private synchronized void dispatchPlan() {
    Map<Capability, Set<Plan>> options = new HashMap<>();

    if (owners.isEmpty()) {
        for (Capability capability : myAgent.getCapabilities()) {
            capability.addCandidatePlans(goal, options);
        }
    } else {
        for (Capability capability : owners) {
            capability.addCandidatePlans(goal, options);
        }
    }

    Iterator<Capability> it = options.keySet().iterator();
    while (it.hasNext()) {
        Set<Plan> plans = options.get(it.next());
        plans.removeAll(executedPlans);
        if (plans.isEmpty()) {
            it.remove();
        }
    }

    while (this.currentPlan == null && !options.isEmpty()) {
        Plan selectedPlan = myAgent.getPlanSelectionStrategy().selectPlan(goal, options);
        try {
            this.currentPlan = selectedPlan.createPlanBody();
            currentPlan.init(selectedPlan, this);
        } catch (PlanInstantiationException e) {
            log.error("Plan " + selectedPlan.getId() + " could not be instantiated.");
            e.printStackTrace();
            this.currentPlan = null;
            for (Set<Plan> plans : options.values()) {
                plans.remove(selectedPlan);
            }
        }
    }

    if (options.isEmpty()) {
        this.unachievable = true;
    } else {
        this.currentPlan.start();
    }
}

From source file:org.lieuofs.geo.territoire.biz.dao.EtatTerritoireFichierXmlDao.java

@Override
public Set<EtatTerritoirePersistant> rechercher(EtatTerritoireCritere critere) {
    Set<EtatTerritoirePersistant> ensemble = new HashSet<EtatTerritoirePersistant>(tous);
    if (Boolean.TRUE.equals(critere.getEstEtat()))
        ensemble = etats;//from  w w  w  .  j  a va2s  .c  om
    if (Boolean.FALSE.equals(critere.getEstEtat()))
        ensemble.removeAll(etats);
    Set<EtatTerritoirePersistant> retour = new HashSet<EtatTerritoirePersistant>();
    for (EtatTerritoirePersistant etatTerr : ensemble) {
        if (accept(etatTerr, critere))
            retour.add(etatTerr);
    }
    return retour;
}

From source file:msi.gaml.operators.Graphs.java

@operator(value = "main_connected_component", type = IType.GRAPH, category = {
        IOperatorCategory.GRAPH }, concept = { IConcept.GRAPH, IConcept.NODE, IConcept.EDGE })
@doc(value = "returns the sub-graph corresponding to the main connected components of the graph", examples = {
        @example(value = "main_connected_component(my_graph)", isExecutable = false, equals = "the sub-graph corresponding to the main connected components of the graph", test = false) }, see = {
                "connected_components_of" })
public static IGraph ReduceToMainconnectedComponentOf(final IScope scope, final IGraph graph) {
    if (graph == null) {
        throw GamaRuntimeException
                .error("In the connected_components_of operator, the graph should not be null!", scope);
    }//from   w  w w. j a  va  2  s  . co m

    final IList<IList> cc = connectedComponentOf(scope, graph);
    final IGraph newGraph = (IGraph) graph.copy(scope);
    IList mainCC = null;
    int size = 0;
    for (final IList c : cc) {
        if (c.size() > size) {
            size = c.size();
            mainCC = c;
        }
    }
    if (mainCC != null) {
        final Set vs = graph.vertexSet();
        vs.removeAll(mainCC);
        for (final Object v : vs) {
            newGraph.removeVertex(v);
        }
    }

    return newGraph;
}

From source file:edu.utah.further.core.math.schedule.JobSchedulerGraphImpl.java

/**
 * Check that all job dependencies are fulfilled.
 * /*from   w  w w . j  av  a 2s . com*/
 * @param v
 */
private void checkIfDependenciesAreSatisfied(final V v) {
    final Set<V> dependencies = CollectionUtil.newSet(Graphs.predecessorListOf(graph, v));
    if (!completedJobs.containsAll(dependencies)) {
        final Set<V> unfulfilledDependencies = CollectionUtil.newSet(dependencies);
        dependencies.removeAll(completedJobs);
        throw new IllegalStateException(
                "Cannot start job " + v + " due to unfulfilled dependencies " + unfulfilledDependencies
                        + ". Completed jobs " + completedJobs + " dependencies " + dependencies);
    }
}

From source file:org.syncope.core.rest.ReportTestITCase.java

@Test
public void executeAndExport() throws IOException {

    ReportTO reportTO = restTemplate.getForObject(BASE_URL + "report/read/{reportId}", ReportTO.class, 1);
    assertNotNull(reportTO);/* w w  w . ja  va2  s .  c  o m*/

    Set<Long> preExecIds = new HashSet<Long>();
    for (ReportExecTO exec : reportTO.getExecutions()) {
        preExecIds.add(exec.getId());
    }

    ReportExecTO execution = restTemplate.postForObject(BASE_URL + "report/execute/{reportId}", null,
            ReportExecTO.class, reportTO.getId());
    assertNotNull(execution);

    int i = 0;
    int maxit = 50;
    do {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
        }

        reportTO = restTemplate.getForObject(BASE_URL + "report/read/{reportId}", ReportTO.class, 1);

        i++;
    } while (preExecIds.size() == reportTO.getExecutions().size() && i < maxit);

    Set<Long> postExecIds = new HashSet<Long>();
    for (ReportExecTO exec : reportTO.getExecutions()) {
        postExecIds.add(exec.getId());
    }

    postExecIds.removeAll(preExecIds);
    assertEquals(1, postExecIds.size());

    // wait for report exec XML to be stored...
    try {
        Thread.sleep(3000);
    } catch (InterruptedException e) {
    }

    // Export
    // 1. XML (default)
    HttpGet getMethod = new HttpGet(BASE_URL + "report/execution/export/" + postExecIds.iterator().next());
    HttpResponse response = ((PreemptiveAuthHttpRequestFactory) restTemplate.getRequestFactory())
            .getHttpClient().execute(getMethod);
    assertEquals(200, response.getStatusLine().getStatusCode());

    String export = EntityUtils.toString(response.getEntity()).trim();
    assertNotNull(export);
    assertFalse(export.isEmpty());

    // 2. HTML
    getMethod = new HttpGet(
            BASE_URL + "report/execution/export/" + postExecIds.iterator().next() + "?fmt=HTML");
    response = ((PreemptiveAuthHttpRequestFactory) restTemplate.getRequestFactory()).getHttpClient()
            .execute(getMethod);
    assertEquals(200, response.getStatusLine().getStatusCode());

    export = EntityUtils.toString(response.getEntity()).trim();
    assertNotNull(export);
    assertFalse(export.isEmpty());

    // 3. PDF
    getMethod = new HttpGet(BASE_URL + "report/execution/export/" + postExecIds.iterator().next() + "?fmt=PDF");
    response = ((PreemptiveAuthHttpRequestFactory) restTemplate.getRequestFactory()).getHttpClient()
            .execute(getMethod);
    assertEquals(200, response.getStatusLine().getStatusCode());

    export = EntityUtils.toString(response.getEntity()).trim();
    assertNotNull(export);
    assertFalse(export.isEmpty());

    // 4. RTF
    getMethod = new HttpGet(BASE_URL + "report/execution/export/" + postExecIds.iterator().next() + "?fmt=RTF");
    response = ((PreemptiveAuthHttpRequestFactory) restTemplate.getRequestFactory()).getHttpClient()
            .execute(getMethod);
    assertEquals(200, response.getStatusLine().getStatusCode());

    export = EntityUtils.toString(response.getEntity()).trim();
    assertNotNull(export);
    assertFalse(export.isEmpty());
}

From source file:dk.netarkivet.harvester.indexserver.distribute.IndexRequestClient.java

/**
 * This method makes sure the actual caching of underlying data is done
 * using the index server. It will convert calls into an IndexRequestMessage
 * which is sent to the server. The Set&lt;Long&gt; of found jobs, and 
 * the side effect of caching the index, is done using this communication 
 * with the server. The resulting files will be unzipped into the cache dir.
 *
 * This method should not be called directly! Instead call cache() or
 * getIndex()./*  w  ww  . j a v a2  s. c o  m*/
 *
 * @param jobSet The set of job IDs.
 * @return The set of found job IDs.
 * @throws ArgumentNotValid on null argument; or on wrong parameters in
 * replied message.
 * @throws IOFailure on trouble in communication or invalid reply types.
 * @throws IllegalState if message is not OK.
 * @see dk.netarkivet.harvester.indexserver.FileBasedCache#cache
 * @see dk.netarkivet.harvester.indexserver.FileBasedCache#getIndex
 */
protected Set<Long> cacheData(Set<Long> jobSet) throws IOFailure, IllegalState, ArgumentNotValid {
    ArgumentNotValid.checkNotNull(jobSet, "Set<Long> id");

    log.info("Requesting an index of type '" + this.requestType + "' for the jobs ["
            + StringUtils.conjoin(",", jobSet) + "]");
    // use locally defined ftp-server, if required
    RemoteFileSettings ftpSettings = null;

    if (useLocalFtpserver()) {
        log.debug("Requesting the use of the FTPserver defined locally.");
        ftpSettings = FTPRemoteFile.getRemoteFileSettings();
    }

    //Send request to server
    IndexRequestMessage irMsg = new IndexRequestMessage(requestType, jobSet, ftpSettings);
    log.debug("Waiting " + TimeUtils.readableTimeInterval(getIndexTimeout()) + " for the index");
    NetarkivetMessage msg = getSynchronizer().sendAndWaitForOneReply(irMsg, getIndexTimeout());

    checkMessageValid(jobSet, msg);
    IndexRequestMessage reply = (IndexRequestMessage) msg;

    Set<Long> foundJobs = reply.getFoundJobs();
    // Only if all jobs asked for were found will the result contain files.
    Set<Long> diffSet = new HashSet<Long>(jobSet);
    diffSet.removeAll(foundJobs);
    if (diffSet.size() == 0) {
        log.debug("Successfully received an index of type '" + this.requestType + "' for the jobs ["
                + StringUtils.conjoin(",", jobSet) + "]");
        try {
            if (reply.isIndexIsStoredInDirectory()) {
                gunzipToDir(reply.getResultFiles(), getCacheFile(jobSet));
            } else {
                unzipAndDeleteRemoteFile(reply.getResultFile(), getCacheFile(jobSet));
            }
        } catch (IOFailure e) {
            log.warn("IOFailure during unzipping of index", e);
            return new HashSet<Long>();
        }
    } else {
        log.debug("No index received. The following jobs were not found: " + StringUtils.conjoin(",", diffSet));
    }

    //Return the set of found jobs
    return foundJobs;
}

From source file:ch.systemsx.cisd.openbis.generic.server.business.importer.DatabaseInstanceImporter.java

private void checkMetaData(final SimpleDatabaseMetaData currentMetaData,
        final SimpleDatabaseMetaData metaData) {
    final String currentVersion = currentMetaData.getDatabaseVersion();
    final String importedVersion = metaData.getDatabaseVersion();
    if (currentVersion.equals(importedVersion) == false) {
        throw new UserFailureException("Version of current database is " + currentVersion
                + " which does not match the version of the database to be imported: " + importedVersion);
    }/*  ww w  . j a va 2s  .  co m*/
    final Set<String> currentTables = getTableNames(currentMetaData);
    final Set<String> importedTables = getTableNames(metaData);
    if (currentTables.equals(importedTables) == false) {
        Set<String> missingTables = new HashSet<String>(currentTables);
        missingTables.removeAll(importedTables);
        if (missingTables.size() > 0) {
            throw new UserFailureException("Current database has tables " + missingTables
                    + "\n which do not exist in the database to be imported.");
        }
        Set<String> unknownTables = new HashSet<String>(importedTables);
        unknownTables.removeAll(currentTables);
        if (unknownTables.size() > 0) {
            throw new UserFailureException("Current database does not have tables " + unknownTables
                    + "\n which exist in the database to be imported.");
        }
    }
}