Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:edu.mayo.informatics.lexgrid.convert.directConversions.owlapi.OwlApi2LG.java

protected void resolveSubClassOfRelations(AssociationSource source, OWLClass owlClass) {
    // Process parent-child (rdfs:subClassOf) relationships
    // Does this concept represent the root of a concept branch that should
    // be centrally linked to the top node for subclass traversal?;
    if (isRootNode(owlClass)) {
        // always give the root node the default namespace
        AssociationTarget target = CreateUtils.createAssociationTarget(OwlApi2LGConstants.ROOT_CODE,
                getDefaultNameSpace());/*from w w  w  .  j  av  a2  s .  co  m*/
        relateAssociationSourceTarget(assocManager.getSubClassOf(), source, target);

    }

    Set<OWLClass> statedSubClasses = new HashSet<OWLClass>();
    for (OWLSubClassOfAxiom ax : ontology.getSubClassAxiomsForSubClass(owlClass)) {
        relateAssocSourceWithOWLClassExpressionTarget(EntityTypes.CONCEPT, assocManager.getSubClassOf(), source,
                ax.getSuperClass(), ax);
        if (!ax.getSuperClass().isAnonymous()) {
            statedSubClasses.add(ax.getSuperClass().asOWLClass());
        }
    }

    //The reasoner.getSuperClasses doesn't return the anonymous classes. The ontology.getSubClassAxiomsForSubClass
    //method doesn't have information that can be found using the reasoner, so we add in the reasoned expressions.
    Set<OWLClass> reasonedSubClasses = new HashSet<OWLClass>();
    Iterator<Node<OWLClass>> itr = reasoner.getSuperClasses(owlClass, true).getNodes().iterator();
    while (itr.hasNext()) {
        reasonedSubClasses.addAll(itr.next().getEntities());
    }
    //        reasonedSubClasses.addAll(reasoner.getSuperClasses(owlClass, true).getNodes().iterator().next().getEntities());
    reasonedSubClasses.removeAll(statedSubClasses);
    for (OWLClassExpression superClass : reasonedSubClasses) {
        relateAssocSourceWithOWLClassExpressionTarget(EntityTypes.CONCEPT, assocManager.getSubClassOf(), source,
                superClass, null);
    }

    //The reasoner.getSuperClasses doesn't return the anonymous classes, so we process them separately.
    //        for (OWLClassExpression superClass : owlClass.getSuperClasses(ontology)) {
    //            if (superClass.isAnonymous()) {
    //                relateAssocSourceWithOWLClassExpressionTarget(EntityTypes.CONCEPT, assocManager.getSubClassOf(),
    //                        source, superClass);
    //              
    //            }
    //
    //        }

}

From source file:info.schnatterer.songbirdDbTools.commands.playlist.ExportPlaylistsCommand.java

/**
 * Exports all songbird playlists in a specific format to a specified destination folder.
 * //w  ww  . j  ava 2  s .  com
 * @param destinationFolder
 *            the folder to write the playlist to
 * @param playlistFormat
 *            desired format for the playlist (e.g. "m3u" or "pls") if
 * @param playlistNames
 *            the names of the playlists to be exported. If <code>null</code> or empty, all playlists are exported.
 * @param useRelativePaths
 *            <code>true</code> tries to create relative paths from the playlist members to the playlist file
 * @param skipDynamicLists
 *            <code>true</code> skips dynamic playlists
 */
public void exportPlaylists(final String destinationFolder, final String playlistFormat,
        final List<String> playlistNames, final boolean useRelativePaths, final boolean skipDynamicLists) {

    // Check if playlist can be written to destination folder.
    try {
        checkDirectory(destinationFolder);
    } catch (Exception e) {
        logger.warn("Error writing playlist: " + e.getMessage(), e);
        return;
    }

    Set<String> playlistNamesSet = null;
    if (playlistNames != null && !playlistNames.isEmpty()) {
        playlistNamesSet = new HashSet<String>();
        playlistNamesSet.addAll(playlistNames);
    }

    Set<String> exportedLists = new HashSet<String>();

    try {
        List<SimpleMediaList> playlists = songbirdDb.getPlayLists(true, skipDynamicLists);
        for (SimpleMediaList simpleMediaList : playlists) {

            // String playlistName = simpleMediaList.getList().getProperty(
            // Property.PROP_MEDIA_LIST_NAME);

            // if (logger.isDebugEnabled()) {
            // // Print playlist info
            // logger.debug("Playlist: ID: "
            // + simpleMediaList.getList().getId()
            // + ": \""
            // + simpleMediaList.getList().getProperty(
            // Property.PROP_MEDIA_LIST_NAME)
            // + "\"; Type: "
            // + simpleMediaList.getList().getListType());
            // }

            // Export Playlist
            // try {
            String playlistName = simpleMediaList.getList().getProperty(Property.PROP_MEDIA_LIST_NAME);
            if (playlistName == null) {
                logger.warn("Found playlist with no name. Skipping list. " + simpleMediaList);
                continue;
            }
            if (playlistName.startsWith("&smart.defaultlist.")) {
                playlistName = playlistName.substring("&smart.defaultlist.".length());
            }
            if (playlistNamesSet != null && !playlistNamesSet.contains(playlistName)) {
                // Don't export playlist
                logger.debug("Skipping playlist \"" + playlistName + "\"");
                continue;
            }
            exportedLists.add(playlistName);
            try {
                List<String> omittedFiles = playlistExporter.export(playlistNameToFileName(playlistName),
                        getMemberPaths(simpleMediaList), destinationFolder, playlistFormat, useRelativePaths,
                        skipDynamicLists);
                String output = "Finished writing playlist " + playlistName;
                if (omittedFiles != null && omittedFiles.size() > 0) {
                    output += ". The following files were omitted because they did not exist: " + EOL;
                    output += StringUtils.join(omittedFiles, EOL);
                }
                logger.info(output);
            } catch (PlaylistExporterException e) {
                logger.warn("Error creating playlist: " + e.getMessage(), e);
            }
            // } catch (IOException e) {
            // logger.error("Unable to write playlist file", e);
            // } catch (Exception e) {
            // logger.error("Error while trying to write playlist file", e);
            // } finally {
            // try {
            // out.close();
            // } catch (IOException e) {
            // logger.error("Unable to close playlist file", e);
            // }
            // }
            // } catch (FileNotFoundException e) {
            // logger.error("Unable to open playlist file for writing",
            // e);
            // }
        }
        if (playlistNamesSet != null && !playlistNamesSet.isEmpty()) {
            playlistNamesSet.removeAll(exportedLists);
            if (!playlistNamesSet.isEmpty()) {
                logger.warn("The following playlist were not found in songbird database: "
                        + playlistNamesSet.toString());
            }
        }
    } catch (SQLException e) {
        /*
         * if the error message is "out of memory", it probably means no database file is found
         */
        logger.error("Error reading songbird database", e);
    }
}

From source file:com.parse.ParseObject.java

/**
 * This saves all of the objects and files reachable from the given object. It does its work in
 * multiple waves, saving as many as possible in each wave. If there's ever an error, it just
 * gives up, sets error, and returns NO.
 *///ww  w.  ja  va2 s. c  o  m
private static Task<Void> deepSaveAsync(final Object object, final String sessionToken) {
    Set<ParseObject> objects = new HashSet<>();
    Set<ParseFile> files = new HashSet<>();
    collectDirtyChildren(object, objects, files);

    // This has to happen separately from everything else because ParseUser.save() is
    // special-cased to work for lazy users, but new users can't be created by
    // ParseMultiCommand's regular save.
    Set<ParseUser> users = new HashSet<>();
    for (ParseObject o : objects) {
        if (o instanceof ParseUser) {
            ParseUser user = (ParseUser) o;
            if (user.isLazy()) {
                users.add((ParseUser) o);
            }
        }
    }
    objects.removeAll(users);

    // objects will need to wait for files to be complete since they may be nested children.
    final AtomicBoolean filesComplete = new AtomicBoolean(false);
    List<Task<Void>> tasks = new ArrayList<>();
    for (ParseFile file : files) {
        tasks.add(file.saveAsync(sessionToken, null, null));
    }
    Task<Void> filesTask = Task.whenAll(tasks).continueWith(new Continuation<Void, Void>() {
        @Override
        public Void then(Task<Void> task) throws Exception {
            filesComplete.set(true);
            return null;
        }
    });

    // objects will need to wait for users to be complete since they may be nested children.
    final AtomicBoolean usersComplete = new AtomicBoolean(false);
    tasks = new ArrayList<>();
    for (final ParseUser user : users) {
        tasks.add(user.saveAsync(sessionToken));
    }
    Task<Void> usersTask = Task.whenAll(tasks).continueWith(new Continuation<Void, Void>() {
        @Override
        public Void then(Task<Void> task) throws Exception {
            usersComplete.set(true);
            return null;
        }
    });

    final Capture<Set<ParseObject>> remaining = new Capture<>(objects);
    Task<Void> objectsTask = Task.forResult(null).continueWhile(new Callable<Boolean>() {
        @Override
        public Boolean call() throws Exception {
            return remaining.get().size() > 0;
        }
    }, new Continuation<Void, Task<Void>>() {
        @Override
        public Task<Void> then(Task<Void> task) throws Exception {
            // Partition the objects into two sets: those that can be save immediately,
            // and those that rely on other objects to be created first.
            final List<ParseObject> current = new ArrayList<>();
            final Set<ParseObject> nextBatch = new HashSet<>();
            for (ParseObject obj : remaining.get()) {
                if (obj.canBeSerialized()) {
                    current.add(obj);
                } else {
                    nextBatch.add(obj);
                }
            }
            remaining.set(nextBatch);

            if (current.size() == 0 && filesComplete.get() && usersComplete.get()) {
                // We do cycle-detection when building the list of objects passed to this function, so
                // this should never get called. But we should check for it anyway, so that we get an
                // exception instead of an infinite loop.
                throw new RuntimeException("Unable to save a ParseObject with a relation to a cycle.");
            }

            // Package all save commands together
            if (current.size() == 0) {
                return Task.forResult(null);
            }

            return enqueueForAll(current, new Continuation<Void, Task<Void>>() {
                @Override
                public Task<Void> then(Task<Void> toAwait) throws Exception {
                    return saveAllAsync(current, sessionToken, toAwait);
                }
            });
        }
    });

    return Task.whenAll(Arrays.asList(filesTask, usersTask, objectsTask));
}

From source file:org.fhcrc.cpl.viewer.quant.gui.ProteinQuantSummaryFrame.java

License:asdf

/**
 * This gets called after selected events are loaded, before deferring control back to QuantReviewer. Here's
 * where we might mark events as Qurated good or bad, if the user has asked us to do that, based on the
 * algorithmic assessment./* ww w  .  j  a  v a 2  s .  com*/
 *todo: some of this stuff, particularly ID-ing events with no other support, should probably be made modular,
 * at least for clarity
 * @param shouldDispose
 */
public void postEventLoad(boolean shouldDispose) {
    if (shouldMarkAlgGoodAsGood)
        for (QuantEvent quantEvent : selectedQuantEvents) {
            if (quantEvent == null)
                System.err.println("************NULL EVENT!");
            if (quantEvent.getAlgorithmicAssessment() == null)
                System.err.println("NULL ASSESSMENT!");
            if (quantEvent.getAlgorithmicAssessment().isGood()) {
                quantEvent.setQuantCurationStatus(QuantEvent.CURATION_STATUS_GOOD);
                quantEvent.setComment("Auto-marked good because of algorithm");
            }
        }
    if (shouldMarkAlgBadAsBad)
        for (QuantEvent quantEvent : selectedQuantEvents)
            if (!quantEvent.getAlgorithmicAssessment().isGood()) {
                quantEvent.setQuantCurationStatus(QuantEvent.CURATION_STATUS_BAD);
                quantEvent.setComment("Auto-marked bad because of algorithm");
            }
    if (shouldMarkAlgBadAsBadIfOtherProteinSupport) {
        //Load a map from peptides to all the proteins that they help ID.  Hardcoded ProteinProphet threshold
        Map<String, Set<String>> peptideProteinMap = null;
        try {
            peptideProteinMap = ProteinUtilities.loadPeptideProteinMapFromProtXML(protXmlFile, 0.1f, true);
        } catch (Exception e) {
            errorMessage(
                    "ERROR!  Problem loading ProtXML file.  Failed to locate all other proteins from 'bad' events.  "
                            + "'bad' events will be left as unknown",
                    e);
            return;
        }

        //Build a list of proteins that only have bad selected events.  These are the ones that look like they
        //have no other support; we need to look for other support for them, outside the selected events.
        //To do this, we just populate two sets: proteins with good events, and proteins with bad events. When
        //we've looked at all the selected events, we remove all proteins from the "good" list from the "bad" list,
        //leaving us with only proteins with bad events and no good events.

        //This just keeps track of the fractions and scans of events we've already looked at, so we don't
        //waste time on them
        Map<String, List<Integer>> selectedEventFractionScansMap = new HashMap<String, List<Integer>>();

        //This will actually contain /all/ proteins with bad events (even if they have good events, too) until
        //the removeAll statement after the loop
        Set<String> proteinsWithOnlyBadEvents = new HashSet<String>();
        Set<String> proteinsWithGoodEvents = new HashSet<String>();

        //This just keeps track of which events are bad, since those are the ones we'll potentially need to update
        //later.  Convenience.
        List<QuantEvent> badSelectedQuantEvents = new ArrayList<QuantEvent>();
        for (QuantEvent quantEvent : selectedQuantEvents) {
            List<Integer> scansThisFraction = selectedEventFractionScansMap.get(quantEvent.getFraction());
            if (scansThisFraction == null) {
                scansThisFraction = new ArrayList<Integer>();
                selectedEventFractionScansMap.put(quantEvent.getFraction(), scansThisFraction);
            }

            scansThisFraction.add(quantEvent.getScan());
            //don't bother looking at subsumed QuantEvents, either.  There's no point -- if we found good ones,
            //we would remove them all, including this one, which would be bad.
            //todo: properly I /should/ look at subsumed QuantEvents, and if any is good, mark this one good.
            //Lots of time spent on that, though.
            if (quantEvent.getOtherEvents() != null && !quantEvent.getOtherEvents().isEmpty()) {
                for (QuantEvent subsumedEvent : quantEvent.getOtherEvents())
                    scansThisFraction.add(subsumedEvent.getScan());
            }

            //If good, add all proteins associated with this peptide to good set.  If bad, add them to
            //bad set and add this event to bad selected events list
            String peptide = quantEvent.getPeptide();
            if (quantEvent.getAlgorithmicAssessment().isGood()) {
                proteinsWithGoodEvents.addAll(peptideProteinMap.get(peptide));
            } else {
                badSelectedQuantEvents.add(quantEvent);
                proteinsWithOnlyBadEvents.addAll(peptideProteinMap.get(peptide));
            }
        }
        //Make the bad list into a bad-only list
        proteinsWithOnlyBadEvents.removeAll(proteinsWithGoodEvents);

        ApplicationContext.infoMessage(badSelectedQuantEvents.size() + " bad selected quant events.");
        ApplicationContext.infoMessage(
                proteinsWithOnlyBadEvents.size() + " selected proteins with only bad selected events");

        _log.debug("Proteins with only bad events: ");
        for (String protein : proteinsWithOnlyBadEvents)
            _log.debug("\t" + protein);

        setMessage("Examining all events for " + proteinsWithOnlyBadEvents.size()
                + " proteins with bad events...");
        PepXMLFeatureFileHandler.PepXMLFeatureSetIterator fsi = null;
        try {
            fsi = new PepXMLFeatureFileHandler.PepXMLFeatureSetIterator(pepXmlFile);
        } catch (Exception e) {
            errorMessage(
                    "ERROR!  Problem loading fractions from pepXML file.  Failed to locate all other proteins from 'bad' events.  "
                            + "'bad' events will be left as unknown",
                    e);
            return;
        }
        QuantEventAssessor eventAssessor = new QuantEventAssessor();
        //            eventAssessor.setLabelType(labelType);
        int numFeaturesExamined = 0;
        while (fsi.hasNext()) {
            if (proteinsWithOnlyBadEvents.isEmpty()) {
                _log.debug("Stopping early: found good events for all proteins");
                break;
            }
            FeatureSet featureSet = fsi.next();
            String fraction = MS2ExtraInfoDef.getFeatureSetBaseName(featureSet);
            _log.debug("Processing fraction " + fraction + ".  " + proteinsWithOnlyBadEvents.size()
                    + " proteins remain");

            //scans we can ignore because they were on the selected list -- they've already been examined
            List<Integer> alreadySelectedScansThisFraction = selectedEventFractionScansMap.get(fraction);
            MSRun run = null;
            for (Feature feature : featureSet.getFeatures()) {
                //if, at any time, no more proteins to examine, stop immediately
                if (proteinsWithOnlyBadEvents.isEmpty())
                    break;
                //if no ratio, nothing to do
                if (!IsotopicLabelExtraInfoDef.hasRatio(feature))
                    continue;
                //if this was a selected event, no need to examine
                if (alreadySelectedScansThisFraction != null
                        && alreadySelectedScansThisFraction.contains(feature.getScan()))
                    continue;
                String peptide = MS2ExtraInfoDef.getFirstPeptide(feature);
                //This actually shouldn't be possible, but maybe with different probability tolerances....
                if (!peptideProteinMap.containsKey(peptide))
                    continue;
                //check if this peptide contributes to any of our bad-only proteins
                boolean hasBadProteins = false;
                for (String protein : peptideProteinMap.get(peptide))
                    if (proteinsWithOnlyBadEvents.contains(protein)) {
                        hasBadProteins = true;
                        break;
                    }
                if (!hasBadProteins)
                    continue;

                //OK, we actually have to load the run (unless we already have) and look at this feature
                if (run == null) {
                    try {
                        File featureSetFile = featureSet.getSourceFile();
                        if (MS2ExtraInfoDef.getFeatureSetBaseName(featureSet) != null)
                            featureSetFile = new File(
                                    MS2ExtraInfoDef.getFeatureSetBaseName(featureSet) + ".pep.xml");
                        File mzXmlFile = ViewerCommandModuleUtilities.findCorrespondingMzXmlFile(featureSetFile,
                                mzXmlDir);
                        ApplicationContext.infoMessage("Loading mzXml file " + mzXmlFile.getAbsolutePath());
                        run = MSRun.load(mzXmlFile.getAbsolutePath());
                        ApplicationContext.infoMessage("Loaded.");
                    } catch (IOException e) {
                        errorMessage(
                                "ERROR!  Problem loading mzXML file.  Failed to locate all other proteins from 'bad' events.  "
                                        + "'bad' events will be left as unknown",
                                e);
                        return;
                    }
                }
                //assess the feature.  If good, remove all its proteins from the bad-only list
                QuantEventAssessor.QuantEventAssessment assessment = eventAssessor.assessFeature(feature, run);
                if (assessment.isGood()) {
                    _log.debug("Found a good event for peptide " + peptide + ", removing proteins");
                    numFeaturesExamined++;
                    Set<String> proteinsThisPeptide = peptideProteinMap.get(peptide);
                    for (String protein : proteinsThisPeptide) {
                        if (proteinsWithOnlyBadEvents.contains(protein)) {
                            _log.debug("\tRemoving protein " + protein);
                            proteinsWithOnlyBadEvents.remove(protein);
                        }
                    }
                }
            }
        }
        ApplicationContext.infoMessage("Checked all fractions, examined " + numFeaturesExamined + " events.  "
                + proteinsWithOnlyBadEvents.size() + " proteins remain with no good events");
        if (_log.isDebugEnabled()) {
            for (String protein : proteinsWithOnlyBadEvents)
                _log.debug("\t" + protein);
        }

        //mark any of our algorithm-bad events bad if any of their proteins have good event support
        int numEventsMarkedBad = 0;
        for (QuantEvent quantEvent : badSelectedQuantEvents) {
            boolean hasBadProteins = false;
            for (String protein : peptideProteinMap.get(quantEvent.getPeptide())) {
                if (proteinsWithOnlyBadEvents.contains(protein)) {
                    hasBadProteins = true;
                    break;
                }
            }
            if (hasBadProteins)
                quantEvent.setComment("Not auto-marked: only quant event for this protein");
            else {
                quantEvent.setQuantCurationStatus(QuantEvent.CURATION_STATUS_BAD);
                quantEvent.setComment("Auto-marked bad: algorithm bad and good events exist for protein");
                numEventsMarkedBad++;
            }
        }
        ApplicationContext.infoMessage(numEventsMarkedBad + " out of " + badSelectedQuantEvents.size()
                + " marked bad because all their proteins had other good events");

        setMessage("Examined " + numFeaturesExamined + " events.");
    }

    if (shouldDispose)
        dispose();
}

From source file:com.jaspersoft.jasperserver.api.metadata.user.service.impl.UserAuthorityServiceImpl.java

/**
 * Ensure the external user has the right roles. Roles attached to the userDetails are the definitive list
 * of externally defined roles./*ww w  .j a  va  2  s .  co m*/
 *
 * @param externalRoles
 * @param user
 */
protected void alignInternalAndExternalUser(Set externalRoles, User user) {

    final Predicate externallyDefinedRoles = new Predicate() {
        public boolean evaluate(Object input) {
            if (!(input instanceof Role)) {
                return false;
            }
            return ((Role) input).isExternallyDefined();
        }
    };

    Set currentRoles = user.getRoles();

    // we may have a new user, so always persist them
    boolean persistUserNeeded = (currentRoles.size() == 0);
    /*
           // If it is externally authenticated, no save of password
           if (!user.getPassword().equals(userDetails.getPassword())) {
              user.setPassword(userDetails.getPassword());
              persistUserNeeded = true;
           }
            
    */ Collection currentExternalRoles = CollectionUtils.select(user.getRoles(), externallyDefinedRoles);
    if (log.isDebugEnabled()) {
        log.debug("Login of external User: " + user.getUsername());
        log.debug("Roles from authentication:\n" + roleCollectionToString(externalRoles));
        log.debug("Current roles from metadata:\n" + roleCollectionToString(user.getRoles()));
        log.debug("Current external roles for user from metadata: " + user.getUsername() + "\n"
                + roleCollectionToString(currentExternalRoles));
    }

    /*
       * If we have new external roles, we want to add them
       */
    Collection newExternalRoles = CollectionUtils.subtract(externalRoles, currentExternalRoles);

    if (newExternalRoles.size() > 0) {
        currentRoles.addAll(newExternalRoles);
        if (log.isWarnEnabled()) {
            log.warn("Added following external roles to: " + user.getUsername() + "\n"
                    + roleCollectionToString(newExternalRoles));
        }
        persistUserNeeded = true;
    }

    /*
       * If external roles have been removed, we need to remove them
       */
    Collection rolesNeedingRemoval = CollectionUtils.subtract(currentExternalRoles, externalRoles);

    if (rolesNeedingRemoval.size() > 0) {
        currentRoles.removeAll(rolesNeedingRemoval);
        if (log.isWarnEnabled()) {
            log.warn("Removed following external roles from: " + user.getUsername() + "\n"
                    + roleCollectionToString(rolesNeedingRemoval));
        }
        persistUserNeeded = true;
    }

    /*
       * If we have new default internal roles, we want to add them
       */
    Collection defaultInternalRolesToAdd = CollectionUtils.subtract(getNewDefaultInternalRoles(), currentRoles);

    if (defaultInternalRolesToAdd.size() > 0) {
        if (log.isDebugEnabled()) {
            log.debug("Default internal roles: " + roleCollectionToString(getNewDefaultInternalRoles()));
        }
        currentRoles.addAll(defaultInternalRolesToAdd);
        if (log.isWarnEnabled()) {
            log.warn("Added following new default internal roles to: " + user.getUsername() + "\n"
                    + roleCollectionToString(defaultInternalRolesToAdd));
        }
        persistUserNeeded = true;
    }

    if (persistUserNeeded) {
        if (log.isWarnEnabled()) {
            log.warn("Updated user: " + user.getUsername() + ". Roles are now:\n"
                    + roleCollectionToString(currentRoles));
        }
        user.setRoles(currentRoles);
        // persist user and roles
        doPutUser(new ExecutionContextImpl(), user);
        if (log.isWarnEnabled()) {
            log.warn("Updated user: " + user.getUsername() + ". Roles are now:\n"
                    + roleCollectionToString(currentRoles));
        }
    }

}

From source file:hudson.model.Hudson.java

/**
 * Updates {@link #computers} by using {@link #getSlaves()}.
 *
 * <p>//ww  w . ja v  a 2s . c o  m
 * This method tries to reuse existing {@link Computer} objects
 * so that we won't upset {@link Executor}s running in it.
 */
private void updateComputerList() throws IOException {
    synchronized (updateComputerLock) {// just so that we don't have two code updating computer list at the same time
        Map<String, Computer> byName = new HashMap<String, Computer>();
        for (Computer c : computers.values()) {
            if (c.getNode() == null)
                continue; // this computer is gone
            byName.put(c.getNode().getNodeName(), c);
        }

        Set<Computer> old = new HashSet<Computer>(computers.values());
        Set<Computer> used = new HashSet<Computer>();

        updateComputer(this, byName, used);
        for (Node s : getNodes())
            updateComputer(s, byName, used);

        // find out what computers are removed, and kill off all executors.
        // when all executors exit, it will be removed from the computers map.
        // so don't remove too quickly
        old.removeAll(used);
        for (Computer c : old) {
            c.kill();
        }
    }
    getQueue().scheduleMaintenance();
}

From source file:com.datatorrent.stram.plan.physical.PhysicalPlanTest.java

/**
 * MxN partitioning. When source and sink of a stream are partitioned, a
 * separate unifier is created container local with each downstream partition.
 *///from w w  w .j a v a 2s.c  o  m
@Test
public void testSingleFinalMxNPartitioning() {

    LogicalPlan dag = new LogicalPlan();

    TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
    dag.setAttribute(o1, OperatorContext.PARTITIONER, new StatelessPartitioner<TestGeneratorInputOperator>(2));
    dag.setAttribute(o1, OperatorContext.STATS_LISTENERS,
            Lists.newArrayList((StatsListener) new PartitioningTest.PartitionLoadWatch()));
    dag.setOutputPortAttribute(o1.outport, PortContext.UNIFIER_SINGLE_FINAL, true);
    OperatorMeta o1Meta = dag.getMeta(o1);

    GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
    dag.setAttribute(o2, OperatorContext.PARTITIONER, new StatelessPartitioner<TestGeneratorInputOperator>(3));
    dag.setAttribute(o2, OperatorContext.STATS_LISTENERS,
            Arrays.asList(new StatsListener[] { new PartitioningTest.PartitionLoadWatch() }));
    OperatorMeta o2Meta = dag.getMeta(o2);

    dag.addStream("o1.outport1", o1.outport, o2.inport1);

    int maxContainers = 10;
    dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, maxContainers);

    TestPlanContext ctx = new TestPlanContext();
    dag.setAttribute(OperatorContext.STORAGE_AGENT, ctx);

    PhysicalPlan plan = new PhysicalPlan(dag, ctx);
    Assert.assertEquals("number of containers", 6, plan.getContainers().size());

    List<PTOperator> inputOperators = new ArrayList<PTOperator>();
    for (int i = 0; i < 2; i++) {
        PTContainer container = plan.getContainers().get(i);
        Assert.assertEquals("number operators " + container, 1, container.getOperators().size());
        Assert.assertEquals("operators " + container, o1Meta.getName(),
                container.getOperators().get(0).getOperatorMeta().getName());
        inputOperators.add(container.getOperators().get(0));
    }

    PTOperator inputUnifier = null;
    {
        PTContainer container = plan.getContainers().get(2);
        Assert.assertEquals("number operators " + container, 1, container.getOperators().size());
        PTOperator pUnifier = container.getOperators().get(0);
        Assert.assertEquals("operators " + container, o1Meta.getMeta(o1.outport).getUnifierMeta().getName(),
                pUnifier.getOperatorMeta().getName());
        Assert.assertTrue("single unifier " + pUnifier, pUnifier.isUnifier());
        Assert.assertEquals("" + pUnifier, 2, pUnifier.getInputs().size());
        for (int inputIndex = 0; inputIndex < pUnifier.getInputs().size(); inputIndex++) {
            PTInput input = pUnifier.getInputs().get(inputIndex);
            Assert.assertEquals("source port name " + pUnifier, "outport", input.source.portName);
            Assert.assertEquals("" + pUnifier, inputOperators.get(inputIndex), input.source.source);
            Assert.assertEquals("partition keys " + input.partitions, null, input.partitions);
        }
        Assert.assertEquals("number outputs " + pUnifier, 1, pUnifier.getOutputs().size());
        PTOutput output = pUnifier.getOutputs().get(0);
        Assert.assertEquals("number inputs " + output, 3, output.sinks.size());
        for (int inputIndex = 0; inputIndex < output.sinks.size(); ++inputIndex) {
            Assert.assertEquals("output sink " + output, o2Meta.getName(),
                    output.sinks.get(inputIndex).target.getName());
            Assert.assertEquals("destination port name " + output, GenericTestOperator.IPORT1,
                    output.sinks.get(inputIndex).portName);
        }
        inputUnifier = pUnifier;
    }

    List<Integer> partitionKeySizes = new ArrayList<Integer>();
    for (int i = 3; i < 6; i++) {
        PTContainer container = plan.getContainers().get(i);
        Assert.assertEquals("number operators " + container, 1, container.getOperators().size());
        Assert.assertEquals("operators " + container, o2Meta.getName(),
                container.getOperators().get(0).getOperatorMeta().getName());

        PTOperator operator = container.getOperators().get(0);
        Assert.assertEquals("operators " + container, o2Meta.getName(), operator.getOperatorMeta().getName());
        Assert.assertEquals("number inputs " + operator, 1, operator.getInputs().size());
        PTInput input = operator.getInputs().get(0);
        Assert.assertEquals("" + operator, inputUnifier, input.source.source);
        Assert.assertNotNull("input partitions " + operator, input.partitions);
        partitionKeySizes.add(input.partitions.partitions.size());
    }

    Assert.assertEquals("input partition sizes count", 3, partitionKeySizes.size());
    Collections.sort(partitionKeySizes);
    Assert.assertEquals("input partition sizes", Arrays.asList(1, 1, 2), partitionKeySizes);

    // Test Dynamic change
    // for M x N partition
    // scale down N from 3 to 2 and then from 2 to 1
    for (int i = 0; i < 2; i++) {
        List<PTOperator> ptos = plan.getOperators(o2Meta);
        Set<PTOperator> expUndeploy = Sets.newHashSet(ptos);
        for (PTOperator ptOperator : ptos) {
            //expUndeploy.addAll(ptOperator.upstreamMerge.values());
            expUndeploy.add(ptOperator);
            PartitioningTest.PartitionLoadWatch.put(ptOperator, -1);
            plan.onStatusUpdate(ptOperator);
        }
        ctx.backupRequests = 0;
        ctx.events.remove(0).run();
        Assert.assertEquals("single unifier ", 1, plan.getMergeOperators(o1Meta).size());
        Set<PTOperator> expDeploy = Sets.newHashSet(plan.getOperators(o2Meta));
        // The unifier and o2 operators are expected to be deployed because of partition key changes
        for (PTOperator ptOperator : plan.getOperators(o2Meta)) {
            expDeploy.add(ptOperator);
        }
        // from 3 to 2 the containers decrease from 5 to 4, but from 2 to 1 the container remains same because single unifier are not inline with single operator partition
        Assert.assertEquals("number of containers", 5 - i, plan.getContainers().size());
        Assert.assertEquals("number of operators", 2 - i, plan.getOperators(o2Meta).size());
        Assert.assertEquals("undeployed operators " + ctx.undeploy, expUndeploy, ctx.undeploy);
        Assert.assertEquals("deployed operators " + ctx.deploy, expDeploy, ctx.deploy);
    }

    // scale up N from 1 to 2 and then from 2 to 3
    for (int i = 0; i < 2; i++) {

        List<PTOperator> unChangedOps = new LinkedList<PTOperator>(plan.getOperators(o2Meta));
        PTOperator o2p1 = unChangedOps.remove(0);
        Set<PTOperator> expUndeploy = Sets.newHashSet(o2p1);

        PartitioningTest.PartitionLoadWatch.put(o2p1, 1);

        plan.onStatusUpdate(o2p1);
        Assert.assertEquals("repartition event", 1, ctx.events.size());
        ctx.backupRequests = 0;
        ctx.events.remove(0).run();

        Assert.assertEquals("single unifier ", 1, plan.getMergeOperators(o1Meta).size());
        Assert.assertEquals("N partitions after scale up " + o2Meta, 2 + i, plan.getOperators(o2Meta).size());

        for (PTOperator o : plan.getOperators(o2Meta)) {
            Assert.assertNotNull(o.container);
            Assert.assertEquals("number operators ", 1, o.container.getOperators().size());
        }
        Set<PTOperator> expDeploy = Sets.newHashSet(plan.getOperators(o2Meta));
        expDeploy.removeAll(unChangedOps);
        Assert.assertEquals("number of containers", 5 + i, plan.getContainers().size());
        Assert.assertEquals("undeployed operators" + ctx.undeploy, expUndeploy, ctx.undeploy);
        Assert.assertEquals("deployed operators" + ctx.deploy, expDeploy, ctx.deploy);

    }

    // scale down M to 1
    {
        Set<PTOperator> expUndeploy = Sets.newHashSet();
        Set<PTOperator> expDeploy = Sets.newHashSet();
        expUndeploy.addAll(plan.getMergeOperators(o1Meta));
        for (PTOperator o2p : plan.getOperators(o2Meta)) {
            expUndeploy.add(o2p);
            expDeploy.add(o2p);
        }

        for (PTOperator o1p : plan.getOperators(o1Meta)) {
            expUndeploy.add(o1p);
            PartitioningTest.PartitionLoadWatch.put(o1p, -1);
            plan.onStatusUpdate(o1p);
        }

        Assert.assertEquals("repartition event", 1, ctx.events.size());
        ctx.events.remove(0).run();

        Assert.assertEquals("M partitions after scale down " + o1Meta, 1, plan.getOperators(o1Meta).size());
        expUndeploy.removeAll(plan.getOperators(o1Meta));

        Assert.assertEquals("undeploy", expUndeploy, ctx.undeploy);
        Assert.assertEquals("deploy", expDeploy, ctx.deploy);
    }

    // scale up M to 2
    Assert.assertEquals("M partitions " + o1Meta, 1, plan.getOperators(o1Meta).size());
    {
        Set<PTOperator> expUndeploy = Sets.newHashSet();
        Set<PTOperator> expDeploy = Sets.newHashSet();
        for (PTOperator o1p : plan.getOperators(o1Meta)) {
            expUndeploy.add(o1p);
            PartitioningTest.PartitionLoadWatch.put(o1p, 1);
            plan.onStatusUpdate(o1p);
        }

        Assert.assertEquals("repartition event", 1, ctx.events.size());
        ctx.events.remove(0).run();

        Assert.assertEquals("M partitions after scale up " + o1Meta, 2, plan.getOperators(o1Meta).size());
        expDeploy.addAll(plan.getOperators(o1Meta));
        expDeploy.addAll(plan.getMergeOperators(o1Meta));
        for (PTOperator o2p : plan.getOperators(o2Meta)) {
            expUndeploy.add(o2p);
            expDeploy.add(o2p);
            Assert.assertNotNull(o2p.container);
            Assert.assertEquals("number operators ", 1, o2p.container.getOperators().size());
        }
        Assert.assertEquals("undeploy", expUndeploy, ctx.undeploy);
        Assert.assertEquals("deploy", expDeploy, ctx.deploy);
    }

}

From source file:org.alfresco.repo.domain.node.AbstractNodeDAOImpl.java

@Override
public boolean removeNodeAspects(Long nodeId, Set<QName> aspectQNames) {
    if (aspectQNames.size() == 0) {
        return false;
    }/*from   www .j a va 2  s  . c  o m*/
    // Get the current aspects
    Set<QName> existingAspectQNames = getNodeAspects(nodeId);

    // Collate the new set of aspects so that touch works correctly against cm:auditable
    Set<QName> newAspectQNames = new HashSet<QName>(existingAspectQNames);
    newAspectQNames.removeAll(aspectQNames);

    // Touch the node; all caches are fine
    touchNode(nodeId, null, newAspectQNames, false, false, false);

    // Now remove each aspect
    Set<Long> aspectQNameIdsToRemove = qnameDAO.convertQNamesToIds(aspectQNames, false);
    int deleteCount = deleteNodeAspects(nodeId, aspectQNameIdsToRemove);
    if (deleteCount == 0) {
        return false;
    }

    // Handle sys:aspect_root
    if (aspectQNames.contains(ContentModel.ASPECT_ROOT)) {
        // invalidate root nodes cache for the store
        StoreRef storeRef = getNodeNotNull(nodeId, false).getStore().getStoreRef();
        allRootNodesCache.remove(storeRef);
        // Touch the node; parent assocs need invalidation
        touchNode(nodeId, null, newAspectQNames, false, false, true);
    } else {
        // Touch the node; all caches are fine
        touchNode(nodeId, null, newAspectQNames, false, false, false);
    }

    // Manually update the cache
    setNodeAspectsCached(nodeId, newAspectQNames);

    // Done
    return deleteCount > 0;
}

From source file:com.znsx.cms.service.impl.TmDeviceManagerImpl.java

@Override
public Playlist savePlaylist(String folderId, String playlistId, String playlistName, Short type,
        List<Element> items, String cmsSize) throws BusinessException {
    Playlist playlist = null;//from  ww w .  ja v  a2s.com
    // 
    if (StringUtils.isNotBlank(playlistId)) {
        playlist = playlistDAO.findById(playlistId);
        playlist.setName(playlistName);
        playlist.setType(type);
        playlist.setCmsSize(cmsSize);
        // ?ID
        List<String> itemList = new ArrayList<String>();
        Set<PlayItem> itemSet = playlist.getItems();
        for (Element item : items) {
            PlayItem playItem = null;
            // 
            if (StringUtils.isNotBlank(item.getAttributeValue("Id"))) {
                Iterator<PlayItem> it = itemSet.iterator();
                while (it.hasNext()) {
                    playItem = it.next();
                    if (playItem.getId().equals(item.getAttributeValue("Id"))) {
                        playItem.setColor(item.getAttributeValue("Color"));
                        playItem.setContent(item.getAttributeValue("Content"));
                        playItem.setDuration(ElementUtil.getInteger(item, "Duration"));
                        playItem.setFont(item.getAttributeValue("Font"));
                        playItem.setSize(item.getAttributeValue("Size"));
                        playItem.setWordSpace(ElementUtil.getShort(item, "Space"));
                        playItem.setX(item.getAttributeValue("X"));
                        playItem.setY(item.getAttributeValue("Y"));
                        playItem.setType(ElementUtil.getShort(item, "Type"));
                        break;
                    }
                    continue;
                }
            }
            // 
            else {
                playItem = new PlayItem();
                playItem.setColor(item.getAttributeValue("Color"));
                playItem.setContent(item.getAttributeValue("Content"));
                playItem.setDuration(ElementUtil.getInteger(item, "Duration"));
                playItem.setFont(item.getAttributeValue("Font"));
                playItem.setSize(item.getAttributeValue("Size"));
                playItem.setWordSpace(ElementUtil.getShort(item, "Space"));
                playItem.setX(item.getAttributeValue("X"));
                playItem.setY(item.getAttributeValue("Y"));
                playItem.setType(ElementUtil.getShort(item, "Type"));
                playItem.setPlaylist(playlist);
                playItemDAO.save(playItem);
                itemSet.add(playItem);
            }
            // xmlitem ID
            itemList.add(playItem.getId());
        }
        // PlayItemIDxmlIDxml?
        List<PlayItem> removeList = new LinkedList<PlayItem>();
        Iterator<PlayItem> it = itemSet.iterator();
        while (it.hasNext()) {
            PlayItem playItem = it.next();
            if (!itemList.contains(playItem.getId())) {
                removeList.add(playItem);
                playItemDAO.delete(playItem);
            }
        }
        itemSet.removeAll(removeList);
    }
    // 
    else {
        playlist = new Playlist();
        playlist.setName(playlistName);
        playlist.setType(type);
        playlist.setCmsSize(cmsSize);
        // ?folder
        PlaylistFolder folder = folderDAO.findById(folderId);
        folder.getPlaylists().add(playlist);
        playlist.setFolder(folder);
        playlistDAO.save(playlist);
        for (Element item : items) {
            PlayItem playItem = new PlayItem();
            playItem.setColor(item.getAttributeValue("Color"));
            playItem.setContent(item.getAttributeValue("Content"));
            playItem.setDuration(ElementUtil.getInteger(item, "Duration"));
            playItem.setFont(item.getAttributeValue("Font"));
            playItem.setSize(item.getAttributeValue("Size"));
            playItem.setWordSpace(ElementUtil.getShort(item, "Space"));
            playItem.setX(item.getAttributeValue("X"));
            playItem.setY(item.getAttributeValue("Y"));
            playItem.setType(ElementUtil.getShort(item, "Type"));
            playItem.setPlaylist(playlist);
            playItemDAO.save(playItem);
            playlist.getItems().add(playItem);
        }
    }

    return playlist;
}