Example usage for java.util Collection remove

List of usage examples for java.util Collection remove

Introduction

In this page you can find the example usage for java.util Collection remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes a single instance of the specified element from this collection, if it is present (optional operation).

Usage

From source file:org.jactr.core.module.procedural.six.DefaultProceduralModule6.java

protected IProduction removeProductionInternal(IProduction production) {
    ISymbolicProduction symProd = production.getSymbolicProduction();

    /*// ww w.  j a v a 2  s .  c om
     * we need all the chunktypes that this production matches against this info
     * is used to accelerate conflict set assembly
     */
    Set<IChunkType> candidateChunkTypes = new HashSet<IChunkType>();
    /*
     * in some cases where no chunktype can be infered, we just snag the buffer
     * name
     */
    Set<String> bufferNames = new HashSet<String>();
    Set<String> ambiguousBufferNames = new HashSet<String>();
    for (ICondition condition : symProd.getConditions())
        if (condition instanceof ChunkTypeCondition) {
            ChunkTypeCondition ctc = (ChunkTypeCondition) condition;
            IChunkType chunkType = ctc.getChunkType();

            bufferNames.add(ctc.getBufferName());

            if (chunkType != null)
                candidateChunkTypes.add(chunkType);
        } else if (condition instanceof ChunkCondition) {
            ChunkCondition cc = (ChunkCondition) condition;
            IChunkType chunkType = cc.getChunk().getSymbolicChunk().getChunkType();

            bufferNames.add(cc.getBufferName());

            if (chunkType != null)
                candidateChunkTypes.add(chunkType);
        } else if (condition instanceof AbstractBufferCondition) {
            String bufferName = ((AbstractBufferCondition) condition).getBufferName();

            if (condition instanceof VariableCondition)
                bufferNames.add(bufferName);

            /*
             * this will catch all queries and variable conditions. These are
             * production conditions from which we can't immediately determine the
             * chunktype of the buffer contents
             */
            ambiguousBufferNames.add(bufferName);
        }

    /*
     * figure out all the children of the chunktypes.. since any production that
     * could be fired for chunkTypeA could fire for chunkTypeB if chunkTypeB is
     * a child (derived from) of chunkTypeA
     */
    Set<IChunkType> chunkTypesToProcess = new HashSet<IChunkType>(candidateChunkTypes);
    for (IChunkType chunkType : candidateChunkTypes)
        chunkTypesToProcess.addAll(chunkType.getSymbolicChunkType().getChildren());

    _readWriteLock.writeLock().lock();

    /*
     * make sure the name is unique
     */
    String productionName = symProd.getName();

    /*
     * add it to the name map
     */
    _allProductionsByName.remove(productionName.toLowerCase());

    /*
     * add it to the chunktype maps
     */
    for (IChunkType chunkType : chunkTypesToProcess) {
        Collection<IProduction> productions = _allProductionsByChunkType.get(chunkType);
        if (productions != null)
            productions.remove(production);
    }

    /*
     * now for the ambiguous conditions
     */
    for (String bufferName : ambiguousBufferNames) {
        Collection<IProduction> productions = _ambiguousProductions.get(bufferName);
        if (productions != null)
            productions.remove(production);
    }

    _readWriteLock.writeLock().unlock();

    return production;
}

From source file:uk.ac.ebi.intact.editor.services.curate.interaction.InteractionEditorService.java

private void initialiseParticipants(InteractionEvidence parent, Collection<ParticipantEvidence> participants) {
    List<ParticipantEvidence> originalParticipants = new ArrayList<ParticipantEvidence>(participants);
    ParticipantEvidenceCloner participantCloner = new ParticipantEvidenceCloner();
    FeatureEvidenceCloner featureCloner = new FeatureEvidenceCloner();
    uk.ac.ebi.intact.editor.controller.curate.cloner.InteractorCloner interactorCloner = new uk.ac.ebi.intact.editor.controller.curate.cloner.InteractorCloner();
    for (ParticipantEvidence det : originalParticipants) {
        ParticipantEvidence p = initialiseParticipant(det, interactorCloner, featureCloner, participantCloner);
        if (p != det) {
            participants.remove(det);
            parent.addParticipant(p);//from  w w  w .  j  a va  2  s. c o  m
        }
    }
}

From source file:test.edu.uci.ics.jung.graph.predicates.VertexPredicateTest.java

public void testEnforcesVertexPredicate() {
    Predicate p = new UserDatumVertexPredicate("key", "a");
    Collection predicates = g.getVertexConstraints();
    assertFalse(predicates.contains(p));
    try {/*  w w  w .j av a 2s. c om*/
        predicates.add(p);
        fail("should not be able to add predicates to a non-empty graph");
    } catch (IllegalArgumentException iae) {
    }
    g.removeAllVertices();
    predicates.add(p);
    v1 = new SparseVertex();
    v2 = new SparseVertex();
    v3 = new SparseVertex();
    v1.addUserDatum("key", "a", UserData.SHARED);
    v2.addUserDatum("key", "a", UserData.SHARED);
    v3.addUserDatum("key", "a", UserData.SHARED);
    g.addVertex(v1);
    g.addVertex(v2);
    g.addVertex(v3);
    assertTrue(predicates.contains(p));
    try {
        Vertex v4 = new SparseVertex();
        g.addVertex(v4);
        fail(p.toString());
    } catch (IllegalArgumentException iae) {
    }
    Vertex v5 = new SparseVertex();
    v5.addUserDatum("key", "a", UserData.SHARED);
    g.addVertex(v5);

    assertTrue(predicates.remove(p));
    assertFalse(predicates.remove(p));
    assertTrue(PredicateUtils.satisfiesVertexConstraint(g, p));
}

From source file:org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.EnforceVariablesVisitor.java

@Override
public ILogicalOperator visitAssignOperator(AssignOperator op, Collection<LogicalVariable> varsToRecover)
        throws AlgebricksException {
    List<Mutable<ILogicalExpression>> assignedExprRefs = op.getExpressions();
    List<LogicalVariable> assignedVars = op.getVariables();

    // Maps assigning variables if assignment expressions are VariableReferenceExpressions.
    for (int index = 0; index < assignedVars.size(); ++index) {
        ILogicalExpression expr = assignedExprRefs.get(index).getValue();
        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
            VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;
            LogicalVariable sourceVar = varExpr.getVariableReference();
            updateVarMapping(sourceVar, assignedVars.get(index));
            varsToRecover.remove(sourceVar);
        }//w w w  .j ava 2  s. com
    }
    return visitsInputs(op, varsToRecover);
}

From source file:org.geotools.data.shapefile.ShpFiles.java

/**
 * Unlocks a read lock. The url and requestor must be the the same as the one of the lockers.
 * /* w ww  .ja v  a  2s  .  c  o m*/
 * @param url
 *           url that was locked
 * @param requestor
 *           the class that requested the url
 */
public void unlockRead(URL url, FileReader requestor) {
    if (url == null) {
        throw new NullPointerException("url cannot be null");
    }
    if (requestor == null) {
        throw new NullPointerException("requestor cannot be null");
    }

    Collection threadLockers = getCurrentThreadLockers();
    boolean removed = threadLockers.remove(new ShpFilesLocker(url, requestor));
    if (!removed) {
        throw new IllegalArgumentException("Expected requestor " + requestor
                + " to have locked the url but it does not hold the lock for the URL");
    }
    if (threadLockers.size() == 0)
        lockers.remove(Thread.currentThread());
    readWriteLock.readLock().unlock();
}

From source file:org.apache.cayenne.merge.DbMerger.java

public List<MergerToken> createMergeTokens(Collection<DbEntity> existing, Collection<DbEntity> loadedFromDb,
        DbLoaderConfiguration config) {/*from   w  w  w  .j av  a2s.  c  o  m*/
    Collection<DbEntity> dbEntitiesToDrop = new LinkedList<DbEntity>(loadedFromDb);

    List<MergerToken> tokens = new LinkedList<MergerToken>();
    for (DbEntity dbEntity : existing) {
        String tableName = dbEntity.getName();

        // look for table
        DbEntity detectedEntity = findDbEntity(loadedFromDb, tableName);
        if (detectedEntity == null) {
            tokens.add(factory.createCreateTableToDb(dbEntity));
            // TODO: does this work properly with createReverse?
            for (DbRelationship rel : dbEntity.getRelationships()) {
                tokens.add(factory.createAddRelationshipToDb(dbEntity, rel));
            }
            continue;
        }

        dbEntitiesToDrop.remove(detectedEntity);

        tokens.addAll(checkRelationshipsToDrop(dbEntity, detectedEntity));
        if (!config.isSkipRelationshipsLoading()) {
            tokens.addAll(checkRelationshipsToAdd(dbEntity, detectedEntity));
        }
        tokens.addAll(checkRows(dbEntity, detectedEntity));

        if (!config.isSkipPrimaryKeyLoading()) {
            MergerToken token = checkPrimaryKeyChange(dbEntity, detectedEntity);
            if (token != null) {
                tokens.add(token);
            }
        }
    }

    // drop table
    // TODO: support drop table. currently, too many tables are marked for
    // drop
    for (DbEntity e : dbEntitiesToDrop) {
        tokens.add(factory.createDropTableToDb(e));
        for (DbRelationship relationship : e.getRelationships()) {
            DbEntity detectedEntity = findDbEntity(existing, relationship.getTargetEntityName());
            if (detectedEntity != null) {
                tokens.add(factory.createDropRelationshipToDb(detectedEntity,
                        relationship.getReverseRelationship()));
            }
        }
    }

    return tokens;
}

From source file:org.nuxeo.ecm.webapp.filemanager.FileManageActionsBean.java

@Override
@WebRemote/*ww w. ja  v  a 2  s .c  om*/
public String removeUploadedFile(String fileName) {
    NxUploadedFile fileToDelete = null;

    // Retrieve only the real filename
    // IE stores the full path of the file as the filename (ie.
    // Z:\\path\\to\\file)
    fileName = FilenameUtils.getName(fileName);
    Collection<NxUploadedFile> files = getUploadedFiles();
    if (files != null) {
        for (NxUploadedFile file : files) {
            String uploadedFileName = file.getName();
            if (fileName.equals(uploadedFileName)) {
                fileToDelete = file;
                break;
            }
        }
    }
    if (fileToDelete != null) {
        fileToDelete.getFile().delete();
        files.remove(fileToDelete);
        setUploadedFiles(files);
    }
    return "";
}

From source file:org.apache.flume.client.avro.TestReliableSpoolingFileEventReader.java

private void templateTestForLargeNumberOfFiles(ConsumeOrder order, Comparator<Long> comparator, int N)
        throws IOException {
    File dir = null;//  ww w  .j  ava2  s  .  co  m
    try {
        dir = new File("target/test/work/" + this.getClass().getSimpleName() + "_large");
        Files.createParentDirs(new File(dir, "dummy"));
        ReliableEventReader reader = new ReliableSpoolingFileEventReader.Builder().spoolDirectory(dir)
                .consumeOrder(order).build();
        Map<Long, List<String>> expected;
        if (comparator == null) {
            expected = new TreeMap<Long, List<String>>();
        } else {
            expected = new TreeMap<Long, List<String>>(comparator);
        }
        for (int i = 0; i < N; i++) {
            File f = new File(dir, "file-" + i);
            String data = "file-" + i;
            Files.write(data, f, Charsets.UTF_8);
            if (expected.containsKey(f.lastModified())) {
                expected.get(f.lastModified()).add(data);
            } else {
                expected.put(f.lastModified(), Lists.newArrayList(data));
            }
        }
        Collection<String> expectedList;
        if (order == ConsumeOrder.RANDOM) {
            expectedList = Sets.newHashSet();
        } else {
            expectedList = Lists.newArrayList();
        }
        for (Entry<Long, List<String>> entry : expected.entrySet()) {
            Collections.sort(entry.getValue());
            expectedList.addAll(entry.getValue());
        }
        for (int i = 0; i < N; i++) {
            List<Event> events;
            events = reader.readEvents(10);
            for (Event e : events) {
                if (order == ConsumeOrder.RANDOM) {
                    Assert.assertTrue(expectedList.remove(new String(e.getBody())));
                } else {
                    Assert.assertEquals(((ArrayList<String>) expectedList).get(0), new String(e.getBody()));
                    ((ArrayList<String>) expectedList).remove(0);
                }
            }
            reader.commit();
        }
    } finally {
        deleteDir(dir);
    }
}

From source file:org.openmicroscopy.shoola.env.data.views.calls.DMRefreshLoader.java

/**
 * Creates a {@link BatchCall} to retrieve the groups.
 * /*from w  w w .j av  a2s .  co  m*/
 * @param nodes The map whose keys are the security context and the values 
 *             are the corresponding collections of data objects to reload.
 * @return The {@link BatchCall}.
 */
private BatchCall makeGroupsBatchCall(final Map<SecurityContext, List> nodes) {
    return new BatchCall("Loading groups: ") {
        public void doCall() throws Exception {
            AdminService svc = context.getAdminService();
            //Check if the user is an administrator
            Boolean admin = (Boolean) context.lookup(LookupNames.USER_ADMINISTRATOR);
            if (admin != null && admin.booleanValue()) {
                Iterator<Entry<SecurityContext, List>> i = nodes.entrySet().iterator();
                Entry<SecurityContext, List> e;
                SecurityContext ctx;
                while (i.hasNext()) {
                    e = i.next();
                    ctx = e.getKey();
                    List<GroupData> groups = svc.loadGroups(ctx, -1);
                    List<GroupData> r = new ArrayList<GroupData>();
                    List<Long> toRemove = new ArrayList<Long>();
                    List<GroupData> l;
                    List list;
                    list = e.getValue();
                    Iterator j = list.iterator();
                    while (j.hasNext()) {
                        long groupID = (Long) j.next();
                        l = svc.loadGroups(ctx, groupID);
                        toRemove.add(groupID);
                        if (l.size() == 1)
                            r.add(l.get(0));
                    }
                    Iterator<GroupData> k = groups.iterator();
                    GroupData g;
                    while (k.hasNext()) {
                        g = (GroupData) k.next();
                        if (!toRemove.contains(g.getId()))
                            r.add(g);
                    }
                    results = r;
                }
            } else { //Not admin groups owner.
                Collection allgroups = getAllGroups();
                Collection groups = getGroupsLeaderOf();
                Iterator i = groups.iterator();
                GroupData group;
                SecurityContext ctx;
                List<GroupData> l = new ArrayList<GroupData>();
                while (i.hasNext()) {
                    group = (GroupData) i.next();
                    ctx = new SecurityContext(group.getId());
                    l.addAll(svc.loadGroups(ctx, group.getId()));
                    allgroups.remove(group);
                }
                Collection all = new ArrayList();
                all.addAll(l);
                all.addAll(allgroups);
                context.bind(LookupNames.USER_GROUP_DETAILS, all);
                List agents = (List) context.lookup(LookupNames.AGENTS);
                Iterator kk = agents.iterator();
                AgentInfo agentInfo;
                Registry reg;
                while (kk.hasNext()) {
                    agentInfo = (AgentInfo) kk.next();
                    if (agentInfo.isActive()) {
                        reg = agentInfo.getRegistry();
                        reg.bind(LookupNames.USER_GROUP_DETAILS, all);
                    }
                }
                results = l;
            }
        }
    };
}

From source file:org.geotools.data.shapefile.ShpFiles.java

/**
 * Unlocks a read lock. The requestor must be have previously obtained a lock for the url.
 * /*from  www  .  j  a v a 2 s . c  o  m*/
 * 
 * @param url
 *           url that was locked
 * @param requestor
 *           the class that requested the url
 */
public void unlockWrite(URL url, FileWriter requestor) {
    if (url == null) {
        throw new NullPointerException("url cannot be null");
    }
    if (requestor == null) {
        throw new NullPointerException("requestor cannot be null");
    }
    Collection<ShpFilesLocker> threadLockers = getCurrentThreadLockers();
    boolean removed = threadLockers.remove(new ShpFilesLocker(url, requestor));
    if (!removed) {
        throw new IllegalArgumentException("Expected requestor " + requestor
                + " to have locked the url but it does not hold the lock for the URL");
    }

    if (threadLockers.size() == 0) {
        lockers.remove(Thread.currentThread());
    } else {
        // get back read locks before giving up the write one
        regainReadLocks(threadLockers);
    }
    readWriteLock.writeLock().unlock();
}