Example usage for java.util Collections EMPTY_SET

List of usage examples for java.util Collections EMPTY_SET

Introduction

In this page you can find the example usage for java.util Collections EMPTY_SET.

Prototype

Set EMPTY_SET

To view the source code for java.util Collections EMPTY_SET.

Click Source Link

Document

The empty set (immutable).

Usage

From source file:org.apache.openjpa.enhance.Reflection.java

/**
 * Gets all public field names of the given Class.
 *   //from   w ww  . j a  v  a 2s .  c o  m
 */
public static Set<String> getPublicFieldNames(Class c) {
    if (c == null)
        return Collections.EMPTY_SET;
    Field[] fields = c.getFields();
    if (fields == null || fields.length == 0)
        return Collections.EMPTY_SET;
    Set<String> result = new TreeSet<String>();
    for (Field f : fields) {
        if (canReflect(f))
            result.add(f.getName());
    }
    return result;
}

From source file:uk.ac.ebi.intact.dbupdate.prot.actions.impl.DuplicatesFixerImpl.java

/**
 * Merge the duplicates, the interactions are moved (not the cross references as they will be deleted)
 * @param duplicates//from   ww  w .ja  va  2s .c  o  m
 */
protected Protein merge(List<Protein> duplicates,
        Map<String, Collection<Component>> proteinsNeedingPartialMerge, DuplicatesFoundEvent evt,
        boolean isSequenceChanged) {
    ProteinUpdateProcessorConfig config = ProteinUpdateContext.getInstance().getConfig();
    ProteinUpdateErrorFactory errorfactory = config.getErrorFactory();

    DaoFactory factory = evt.getDataContext().getDaoFactory();

    // calculate the original protein (the oldest is kept as original)
    Protein originalProt = calculateOriginalProtein(duplicates);
    // set the protein kept from the merge
    evt.setReferenceProtein(originalProt);

    // the merge can be done without looking at the sequence of the duplicates
    if (!isSequenceChanged) {
        // move the interactions from the rest of proteins to the original
        for (Protein duplicate : duplicates) {

            // don't process the original protein with itself
            if (!duplicate.getAc().equals(originalProt.getAc())) {

                // move the interactions
                Set<String> movedInteractions = ProteinTools.moveInteractionsBetweenProteins(originalProt,
                        duplicate, evt.getDataContext(), (ProteinUpdateProcessor) evt.getSource(),
                        evt.getPrimaryUniprotAc());

                // report the interactions to move
                reportMovedInteraction(duplicate, movedInteractions, evt);

                // add the intact secondary references
                Collection<InteractorXref> addedXRef = ProteinTools.addIntactSecondaryReferences(originalProt,
                        duplicate, factory);

                // update the protein transcripts if necessary
                Collection<String> updatedTranscripts = ProteinTools.updateProteinTranscripts(factory,
                        originalProt, duplicate);

                evt.getMovedXrefs().put(duplicate.getAc(), addedXRef);
                evt.getUpdatedTranscripts().put(duplicate.getAc(), updatedTranscripts);

                // the duplicate will be deleted
                //factory.getProteinDao().update((ProteinImpl) duplicate);

                // and delete the duplicate if no active instances are attached to it
                if (duplicate.getActiveInstances().isEmpty()) {
                    ProteinEvent protEvt = new ProteinEvent(evt.getSource(), evt.getDataContext(), duplicate,
                            "Duplicate of " + originalProt.getAc());
                    protEvt.setUniprotIdentity(evt.getPrimaryUniprotAc());
                    deleteProtein(protEvt);
                } else {
                    throw new ProcessorException("The duplicate " + duplicate.getAc() + " still have "
                            + duplicate.getActiveInstances().size() + " active instances and should not.");
                }
            }
        }
    }
    // before merging, we need to check the feature conflicts because the sequence needs to be updated
    else {
        // even if the ranges were not shifted, the sequence has been updated
        evt.setHasShiftedRanges(true);
        ProteinUpdateProcessor processor = (ProteinUpdateProcessor) evt.getSource();

        // move the interactions from the rest of proteins to the original
        for (Protein duplicate : duplicates) {
            // sequence of the duplicate
            String sequence = duplicate.getSequence();

            // don't process the original protein with itself
            if (!duplicate.getAc().equals(originalProt.getAc())) {

                // we have feature conflicts for this protein which cannot be merged and becomes deprecated
                if (proteinsNeedingPartialMerge.containsKey(duplicate.getAc())) {
                    ProteinUpdateError impossibleMerge = errorfactory.createImpossibleMergeError(
                            duplicate.getAc(), originalProt.getAc(), evt.getPrimaryUniprotAc(),
                            "the duplicated protein has "
                                    + proteinsNeedingPartialMerge.get(duplicate.getAc()).size()
                                    + " components with range conflicts. The protein is now deprecated.");
                    processor.fireOnProcessErrorFound(new UpdateErrorEvent(processor, evt.getDataContext(),
                            impossibleMerge, duplicate, evt.getPrimaryUniprotAc()));

                    // add no-uniprot-update and caution
                    Collection<Annotation> addedAnnotations = addAnnotationsForBadParticipant(duplicate,
                            originalProt.getAc(), factory);
                    // components to let on the current protein
                    Collection<Component> componentToFix = proteinsNeedingPartialMerge.get(duplicate.getAc());
                    // components without conflicts to move on the original protein
                    Collection<Component> componentToMove = CollectionUtils
                            .subtract(duplicate.getActiveInstances(), componentToFix);

                    Set<String> movedInteractions = Collections.EMPTY_SET;
                    // move components without conflicts
                    if (!componentToMove.isEmpty()) {
                        movedInteractions = ComponentTools.moveComponents(originalProt, duplicate,
                                evt.getDataContext(), processor, componentToMove, evt.getPrimaryUniprotAc());
                    }

                    // report the interactions to move before moving them
                    reportMovedInteraction(duplicate, movedInteractions, evt);

                    evt.getAddedAnnotations().put(duplicate.getAc(), addedAnnotations);

                    // the sequence is not updated because of range conflicts
                    //double relativeConservation = computesRequenceConservation(sequence, evt.getUniprotSequence());
                    // if the sequence in uniprot is different than the one of the duplicate, need to update the sequence and shift the ranges
                    //processor.fireOnProteinSequenceChanged(new ProteinSequenceChangeEvent(processor, evt.getDataContext(), duplicate, sequence, evt.getPrimaryUniprotAc(), evt.getUniprotSequence(), evt.getUniprotCrc64(), relativeConservation));

                    // update duplicate which will be kept because of range conflicts
                    factory.getProteinDao().update((ProteinImpl) duplicate);
                }
                // we don't have feature conflicts, we can merge the proteins normally
                else {

                    // move the interactions
                    Set<String> movedInteractions = ProteinTools.moveInteractionsBetweenProteins(originalProt,
                            duplicate, evt.getDataContext(), processor, evt.getPrimaryUniprotAc());

                    // report the interactions to move before moving them
                    reportMovedInteraction(duplicate, movedInteractions, evt);

                    // the duplicate will be deleted, add intact secondary references
                    Collection<InteractorXref> addedXRef = ProteinTools
                            .addIntactSecondaryReferences(originalProt, duplicate, factory);
                    evt.getMovedXrefs().put(duplicate.getAc(), addedXRef);

                    // if the sequence in uniprot is different than the one of the duplicate, need to update the sequence and shift the ranges
                    if (ProteinTools.isSequenceChanged(sequence, evt.getUniprotSequence())) {
                        double relativeConservation = computesRequenceConservation(sequence,
                                evt.getUniprotSequence());
                        processor.fireOnProteinSequenceChanged(new ProteinSequenceChangeEvent(processor,
                                evt.getDataContext(), duplicate, sequence, evt.getPrimaryUniprotAc(),
                                evt.getUniprotSequence(), evt.getUniprotCrc64(), relativeConservation));
                    }
                }

                // update isoforms and feature chains
                Collection<String> updatedTranscripts = ProteinTools.updateProteinTranscripts(factory,
                        originalProt, duplicate);

                evt.getUpdatedTranscripts().put(duplicate.getAc(), updatedTranscripts);

                // and delete the duplicate if no active instances are still attached to it
                if (duplicate.getActiveInstances().isEmpty()) {
                    ProteinEvent protEvt = new ProteinEvent(evt.getSource(), evt.getDataContext(), duplicate,
                            "Duplicate of " + originalProt.getAc());
                    protEvt.setUniprotIdentity(evt.getPrimaryUniprotAc());
                    deleteProtein(protEvt);
                } else {
                    log.trace("The duplicate " + duplicate.getAc() + " still have "
                            + duplicate.getActiveInstances().size()
                            + " active instances and cannot be deleted.");
                }
            }
        }
    }

    return originalProt;
}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] getNeatData3(String baseDir, String fileNamePattern, int generations)
        throws IOException {
    List<List<Map<Integer, Multimap<Integer, Integer>>>> data = new ArrayList<>();
    Map<Integer, String> nodeTypesMap = new HashMap<>();
    for (int g = 0; g < generations; g++) {
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = new ArrayList<>();
        BufferedReader reader = Files.newBufferedReader(
                FileSystems.getDefault().getPath(baseDir, String.format(fileNamePattern, g + 1)));
        String line;/*  w  ww . ja v  a  2  s  .  co m*/
        boolean isInPopulation = false;
        Map<Integer, Multimap<Integer, Integer>> currentIndividual = null;
        while ((line = reader.readLine()) != null) {
            if (line.equals("[NEAT-POPULATION:SPECIES]")) {
                isInPopulation = true;
                continue;
            }
            if (!isInPopulation) {
                continue;
            }
            if (line.startsWith("\"g\"")) {
                if (currentIndividual != null) {
                    //save current individual
                    currentPopulation.add(currentIndividual);
                }
                currentIndividual = new HashMap<>();
            }
            if (line.startsWith("\"n\"")) {
                String[] pieces = line.split(",");
                nodeTypesMap.put(Integer.parseInt(pieces[4]), pieces[3].replaceAll("\"", ""));
                currentIndividual.put(Integer.parseInt(pieces[4]), (Multimap) HashMultimap.create());
            } else if (line.startsWith("\"l\"")) {
                String[] pieces = line.split(",");
                int from = Integer.parseInt(pieces[3]);
                int to = Integer.parseInt(pieces[4]);
                if (currentIndividual.get(from) == null) {
                    currentIndividual.put(from, (Multimap) HashMultimap.create());
                }
                if (currentIndividual.get(to) == null) {
                    currentIndividual.put(to, (Multimap) HashMultimap.create());
                }
                currentIndividual.get(from).put(1, to);
                currentIndividual.get(to).put(-1, from);
            }
        }
        reader.close();
        data.add(currentPopulation);
    }
    //build node innovation numbers
    String[] nodeTypes = new String[] { "i", "b", "h", "o" };
    List<Integer> nodeINs = new ArrayList<>();
    for (String nodeType : nodeTypes) {
        List<Integer> typeNodeINs = new ArrayList<>();
        for (Integer in : nodeTypesMap.keySet()) {
            if (nodeTypesMap.get(in).equals(nodeType)) {
                typeNodeINs.add(in);
            }
        }
        Collections.sort(typeNodeINs);
        nodeINs.addAll(typeNodeINs);
    }
    //populate arrays
    double[][] usages = new double[generations][];
    double[][] diversities = new double[generations][];
    for (int g = 0; g < generations; g++) {
        usages[g] = new double[nodeINs.size()];
        diversities[g] = new double[nodeINs.size()];
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = data.get(g);
        //populate usages, diversities
        int i = 0;
        for (int nodeIN : nodeINs) {
            double[] localUsages = new double[currentPopulation.size()];
            Multiset<Set<Integer>> froms = HashMultiset.create();
            Multiset<Set<Integer>> tos = HashMultiset.create();
            int c = 0;
            for (Map<Integer, Multimap<Integer, Integer>> currentIndividual : currentPopulation) {
                if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 1;
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = (currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 0.5)
                                + (currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 0.5);
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                        froms.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 1;
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        froms.add(Collections.EMPTY_SET);
                    }
                }
                c = c + 1;
            }
            usages[g][i] = StatUtils.mean(localUsages);
            if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet());
            } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet()) / 2
                        + Utils.multisetDiversity(froms, tos.elementSet()) / 2;
            } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                diversities[g][i] = Utils.multisetDiversity(froms, tos.elementSet());
            }
            i = i + 1;
        }
    }
    return new double[][][] { diversities, usages };
}

From source file:org.apache.openjpa.enhance.Reflection.java

/**
 * Gets values of all field f the given class such that f exactly 
 * match the given modifiers and are of given type (Object implies any type)
 * unless f is annotated as {@link Reflectable}. 
 *   /*www.  j av  a 2s.  c om*/
 */
public static <T> Set<T> getFieldValues(Class c, int mods, Class<T> t) {
    if (c == null)
        return Collections.EMPTY_SET;
    Field[] fields = c.getFields();
    if (fields == null || fields.length == 0)
        return Collections.EMPTY_SET;
    Set<T> result = new TreeSet<T>();
    for (Field f : fields) {
        if (mods == f.getModifiers() && (t == Object.class || t.isAssignableFrom(f.getType()))
                && canReflect(f)) {
            try {
                result.add((T) f.get(null));
            } catch (IllegalArgumentException e) {
            } catch (IllegalAccessException e) {
            }
        }
    }
    return result;
}

From source file:com.senseidb.svc.impl.HttpRestSenseiServiceImpl.java

private static Set<SenseiSystemInfo.SenseiFacetInfo> convertFacetInfos(JSONArray array) throws JSONException {
    if (array == null || array.length() == 0)
        return Collections.EMPTY_SET;

    Set<SenseiSystemInfo.SenseiFacetInfo> infos = new HashSet<SenseiSystemInfo.SenseiFacetInfo>(array.length());
    for (int i = 0; i < array.length(); ++i) {
        JSONObject info = array.getJSONObject(i);
        SenseiSystemInfo.SenseiFacetInfo facetInfo = new SenseiSystemInfo.SenseiFacetInfo(
                info.getString(SenseiSearchServletParams.PARAM_SYSINFO_FACETS_NAME));
        facetInfo.setRunTime(info.optBoolean(SenseiSearchServletParams.PARAM_SYSINFO_FACETS_RUNTIME));
        facetInfo.setProps(convertJsonToStringMap(
                info.optJSONObject(SenseiSearchServletParams.PARAM_SYSINFO_FACETS_PROPS)));

        infos.add(facetInfo);/*  w  ww .ja  v a 2s.  c  o  m*/
    }

    return infos;
}

From source file:com.redhat.rhn.manager.kickstart.KickstartScheduleCommand.java

/**
 * @param prereqAction the prerequisite for this action
 *
 * @return Returns the KickstartAction//from ww w . ja va 2 s.  c  o  m
 */
public Action scheduleKickstartAction(Action prereqAction) {

    // We will schedule the kickstart action against the host server, since the host
    // server is the liason for the target server.
    Set fileList = Collections.EMPTY_SET;

    if (!isCobblerOnly()) {
        fileList = ksdata.getPreserveFileLists();
    }
    String server = this.getKickstartServerName();
    if (this.getProxyHost() != null) {
        server = this.getProxyHost();
    }
    KickstartAction ksAction = ActionManager.scheduleKickstartAction(fileList, this.getUser(),
            this.getHostServer(), this.getScheduleDate(), this.getExtraOptions(), server);

    if (prereqAction != null) {
        ksAction.setPrerequisite(prereqAction);
    }
    if (!isDhcp) {
        ksAction.getKickstartActionDetails().setStaticDevice(networkInterface);
    }

    return ksAction;
}

From source file:de.hybris.platform.commercefacades.customergroups.CustomerGroupFacadeIntegrationTest.java

private UserModel registerUser(final String login)
        throws UnknownIdentifierException, IllegalArgumentException, DuplicateUidException {
    final BaseSiteModel site = new BaseSiteModel();
    site.setName(TEST_SITE, Locale.ENGLISH);
    site.setUid(TEST_SITE);/*  ww  w  .  j a v a  2 s. c  o m*/
    site.setChannel(SiteChannel.B2C);
    baseSiteService.setCurrentBaseSite(site, false);

    try {
        userService.getTitleForCode("mr");
    } catch (UnknownIdentifierException e) {
        final TitleModel titleModel = new TitleModel();
        titleModel.setCode("mr");
        modelService.save(titleModel);
    }

    final RegisterData registerData = new RegisterData();
    registerData.setFirstName("firstName");
    registerData.setLastName("lastName");
    registerData.setLogin(login);
    registerData.setPassword("***");
    registerData.setTitleCode("mr");

    customerFacade.register(registerData);
    final UserModel user = userService.getUserForUID(login);
    // clear groups since JaloIntiDefaultsInterceptor is setting customergroup for customer type
    user.setGroups(Collections.EMPTY_SET);
    modelService.save(user);
    return user;
}

From source file:org.cloudfoundry.identity.uaa.oauth.UaaTokenServices.java

@Override
public OAuth2AccessToken createAccessToken(OAuth2Authentication authentication) throws AuthenticationException {

    String userId = null;//  w w  w. j a va2s  . co m
    Date userAuthenticationTime = null;
    UaaUser user = null;
    boolean wasIdTokenRequestedThroughAuthCodeScopeParameter = false;
    Collection<GrantedAuthority> clientScopes = null;
    Set<String> authenticationMethods = null;
    Set<String> authNContextClassRef = null;
    // Clients should really by different kinds of users
    if (authentication.isClientOnly()) {
        ClientDetails client = clientDetailsService.loadClientByClientId(authentication.getName());
        clientScopes = client.getAuthorities();
    } else {
        userId = getUserId(authentication);
        user = userDatabase.retrieveUserById(userId);
        if (authentication.getUserAuthentication() instanceof UaaAuthentication) {
            userAuthenticationTime = new Date(
                    ((UaaAuthentication) authentication.getUserAuthentication()).getAuthenticatedTime());
            authenticationMethods = ((UaaAuthentication) authentication.getUserAuthentication())
                    .getAuthenticationMethods();
            authNContextClassRef = ((UaaAuthentication) authentication.getUserAuthentication())
                    .getAuthContextClassRef();
        }
    }

    ClientDetails client = clientDetailsService
            .loadClientByClientId(authentication.getOAuth2Request().getClientId());
    String clientSecretForHash = client.getClientSecret();
    if (clientSecretForHash != null && clientSecretForHash.split(" ").length > 1) {
        clientSecretForHash = clientSecretForHash.split(" ")[1];
    }
    String revocableHashSignature = UaaTokenUtils.getRevocableTokenSignature(client, clientSecretForHash, user);

    String tokenId = generateUniqueTokenId();
    String refreshTokenId = generateUniqueTokenId() + REFRESH_TOKEN_SUFFIX;

    boolean opaque = opaqueTokenRequired(authentication);
    boolean accessTokenRevocable = opaque
            || IdentityZoneHolder.get().getConfig().getTokenPolicy().isJwtRevocable();
    boolean refreshTokenRevocable = accessTokenRevocable || TokenConstants.TokenFormat.OPAQUE.getStringValue()
            .equals(IdentityZoneHolder.get().getConfig().getTokenPolicy().getRefreshTokenFormat());

    OAuth2RefreshToken refreshToken = createRefreshToken(refreshTokenId, authentication, revocableHashSignature,
            refreshTokenRevocable);

    String clientId = authentication.getOAuth2Request().getClientId();
    Set<String> userScopes = authentication.getOAuth2Request().getScope();
    String grantType = authentication.getOAuth2Request().getRequestParameters().get("grant_type");

    Set<String> modifiableUserScopes = new LinkedHashSet<>(userScopes);

    Set<String> externalGroupsForIdToken = Collections.EMPTY_SET;
    Map<String, List<String>> userAttributesForIdToken = Collections.EMPTY_MAP;
    if (authentication.getUserAuthentication() instanceof UaaAuthentication) {
        externalGroupsForIdToken = ((UaaAuthentication) authentication.getUserAuthentication())
                .getExternalGroups();
        userAttributesForIdToken = ((UaaAuthentication) authentication.getUserAuthentication())
                .getUserAttributes();
    }

    String nonce = authentication.getOAuth2Request().getRequestParameters().get(NONCE);

    Map<String, String> additionalAuthorizationAttributes = getAdditionalAuthorizationAttributes(
            authentication.getOAuth2Request().getRequestParameters().get("authorities"));

    if ("authorization_code"
            .equals(authentication.getOAuth2Request().getRequestParameters().get(OAuth2Utils.GRANT_TYPE))
            && "code".equals(
                    authentication.getOAuth2Request().getRequestParameters().get(OAuth2Utils.RESPONSE_TYPE))
            && authentication.getOAuth2Request().getRequestParameters().get(OAuth2Utils.SCOPE) != null
            && authentication.getOAuth2Request().getRequestParameters().get(OAuth2Utils.SCOPE)
                    .contains("openid")) {
        wasIdTokenRequestedThroughAuthCodeScopeParameter = true;
    }

    int zoneAccessTokenValidity = getZoneAccessTokenValidity();

    Integer validity = client.getAccessTokenValiditySeconds();
    Set<String> responseTypes = extractResponseTypes(authentication);

    Map<String, String> externalAttributes = null;
    if (uaaTokenEnhancer != null) {
        externalAttributes = uaaTokenEnhancer.getExternalAttributes(authentication);
    }

    CompositeAccessToken accessToken = createAccessToken(tokenId, userId, user, userAuthenticationTime,
            validity != null ? validity.intValue() : zoneAccessTokenValidity, clientScopes,
            modifiableUserScopes, clientId, authentication.getOAuth2Request().getResourceIds(), grantType,
            refreshToken != null ? refreshToken.getValue() : null, nonce, additionalAuthorizationAttributes,
            externalAttributes, responseTypes, revocableHashSignature,
            wasIdTokenRequestedThroughAuthCodeScopeParameter, externalGroupsForIdToken,
            userAttributesForIdToken, accessTokenRevocable, authenticationMethods, authNContextClassRef);

    return persistRevocableToken(tokenId, refreshTokenId, accessToken, refreshToken, clientId, userId, opaque,
            accessTokenRevocable);
}

From source file:com.phoenixst.plexus.DefaultGraph.java

public Collection nodes(Predicate nodePredicate) {
    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug(instanceString + ".nodes( " + nodePredicate + " )");
    }/*from   w w  w  .java 2 s.c  o m*/

    if (nodePredicate == null || nodePredicate == TruePredicate.INSTANCE) {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("  " + instanceString + ".nodes() returning all nodes");
        }
        return nodeCollection;

    } else if (nodePredicate == FalsePredicate.INSTANCE) {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("  " + instanceString + ".nodes() returning empty set");
        }
        return Collections.EMPTY_SET;

    } else if (nodePredicate instanceof EqualPredicate) {
        Object testNode = ((EqualPredicate) nodePredicate).getTestObject();
        if (!containsNode(testNode)) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("  " + instanceString + ".nodes() returning empty set");
            }
            return Collections.EMPTY_SET;
        }
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("  " + instanceString + ".nodes() returning singleton");
        }
        return new SingletonNodeCollection(this, testNode);

    } else {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("  " + instanceString + ".nodes() returning general filtered collection");
        }
        return new FilteredCollection(nodeCollection, nodePredicate);
    }
}

From source file:org.apache.cocoon.components.treeprocessor.sitemap.SitemapLanguage.java

/**
 * Split a list of space/comma separated labels into a Collection
 *
 * @return the collection of labels (may be empty, nut never null)
 *///from  w w w  . j  ava  2  s  .  c o  m
private static Collection splitLabels(String labels) {
    if (labels == null) {
        return Collections.EMPTY_SET;
    }
    return Arrays.asList(StringUtils.split(labels, ", \t\n\r"));
}