Example usage for java.util EnumSet noneOf

List of usage examples for java.util EnumSet noneOf

Introduction

In this page you can find the example usage for java.util EnumSet noneOf.

Prototype

public static <E extends Enum<E>> EnumSet<E> noneOf(Class<E> elementType) 

Source Link

Document

Creates an empty enum set with the specified element type.

Usage

From source file:org.opencb.opencga.catalog.auth.authorization.CatalogAuthorizationManager.java

private DiseasePanelAclEntry resolveDiseasePanelPermissions(long studyId, String userId, String groupId,
        Map<String, DiseasePanelAclEntry> userAclMap) throws CatalogException {
    if (userId.equals(ANONYMOUS)) {
        if (userAclMap.containsKey(userId)) {
            return userAclMap.get(userId);
        } else {//from ww  w . j a  v  a2  s  .c  o  m
            return transformStudyAclToDiseasePanelAcl(getStudyAclBelonging(studyId, userId, groupId));
        }
    }

    // Registered user
    EnumSet<DiseasePanelAclEntry.DiseasePanelPermissions> permissions = EnumSet
            .noneOf(DiseasePanelAclEntry.DiseasePanelPermissions.class);
    boolean flagPermissionFound = false;

    if (userAclMap.containsKey(userId)) {
        permissions.addAll(userAclMap.get(userId).getPermissions());
        flagPermissionFound = true;
    }
    if (StringUtils.isNotEmpty(groupId) && userAclMap.containsKey(groupId)) {
        permissions.addAll(userAclMap.get(groupId).getPermissions());
        flagPermissionFound = true;
    }
    if (userAclMap.containsKey(ANONYMOUS)) {
        permissions.addAll(userAclMap.get(ANONYMOUS).getPermissions());
        flagPermissionFound = true;
    }
    if (userAclMap.containsKey(OTHER_USERS_ID)) {
        permissions.addAll(userAclMap.get(OTHER_USERS_ID).getPermissions());
        flagPermissionFound = true;
    }

    if (flagPermissionFound) {
        return new DiseasePanelAclEntry(userId, permissions);
    } else {
        return transformStudyAclToDiseasePanelAcl(getStudyAclBelonging(studyId, userId, groupId));
    }
}

From source file:org.jenkinsci.maven.plugins.hpi.AbstractHpiMojo.java

/**
 * Is the dynamic loading supported?/*from w  w w .j  a  v a  2  s  . c  o  m*/
 *
 * False, if the answer is known to be "No". Otherwise null, if there are some extensions
 * we don't know we can dynamic load. Otherwise, if everything is known to be dynamic loadable, return true.
 */
protected Boolean isSupportDynamicLoading() throws IOException {
    URLClassLoader cl = new URLClassLoader(
            new URL[] { new File(project.getBuild().getOutputDirectory()).toURI().toURL() },
            getClass().getClassLoader());

    EnumSet<YesNoMaybe> e = EnumSet.noneOf(YesNoMaybe.class);
    for (IndexItem<Extension, Object> i : Index.load(Extension.class, Object.class, cl)) {
        e.add(i.annotation().dynamicLoadable());
    }

    if (e.contains(YesNoMaybe.NO))
        return false;
    if (e.contains(YesNoMaybe.MAYBE))
        return null;
    return true;
}

From source file:org.apache.hadoop.corona.NodeManager.java

/**
 * return true if a new node has been added - else return false
 * @param clusterNodeInfo the node that is heartbeating
 * @return true if this is a new node that has been added, false otherwise
 *//*from w ww  . j  a  va  2  s . c  o  m*/
public boolean heartbeat(ClusterNodeInfo clusterNodeInfo) throws DisallowedNode {
    ClusterNode node = nameToNode.get(clusterNodeInfo.name);
    if (!canAllowNode(clusterNodeInfo.getAddress().getHost())) {
        if (node != null) {
            node.heartbeat(clusterNodeInfo);
        } else {
            throw new DisallowedNode(clusterNodeInfo.getAddress().getHost());
        }
        return false;
    }
    boolean newNode = false;
    Map<ResourceType, String> currentResources = clusterNodeInfo.getResourceInfos();
    if (currentResources == null) {
        currentResources = new EnumMap<ResourceType, String>(ResourceType.class);
    }

    if (node == null) {
        LOG.info("Adding node with heartbeat: " + clusterNodeInfo.toString());
        node = new ClusterNode(clusterNodeInfo, topologyCache.getNode(clusterNodeInfo.address.host),
                cpuToResourcePartitioning);
        addNode(node, currentResources);
        newNode = true;
    }

    node.heartbeat(clusterNodeInfo);

    boolean appsChanged = false;
    Map<ResourceType, String> prevResources = nameToApps.get(clusterNodeInfo.name);
    Set<ResourceType> deletedApps = null;
    for (Map.Entry<ResourceType, String> entry : prevResources.entrySet()) {
        String newAppInfo = currentResources.get(entry.getKey());
        String oldAppInfo = entry.getValue();
        if (newAppInfo == null || !newAppInfo.equals(oldAppInfo)) {
            if (deletedApps == null) {
                deletedApps = EnumSet.noneOf(ResourceType.class);
            }
            deletedApps.add(entry.getKey());
            appsChanged = true;
        }
    }
    Map<ResourceType, String> addedApps = null;
    for (Map.Entry<ResourceType, String> entry : currentResources.entrySet()) {
        String newAppInfo = entry.getValue();
        String oldAppInfo = prevResources.get(entry.getKey());
        if (oldAppInfo == null || !oldAppInfo.equals(newAppInfo)) {
            if (addedApps == null) {
                addedApps = new EnumMap<ResourceType, String>(ResourceType.class);
            }
            addedApps.put(entry.getKey(), entry.getValue());
            appsChanged = true;
        }
    }
    if (deletedApps != null) {
        for (ResourceType deleted : deletedApps) {
            clusterManager.nodeAppRemoved(clusterNodeInfo.name, deleted);
        }
    }
    if (addedApps != null) {
        for (Map.Entry<ResourceType, String> added : addedApps.entrySet()) {
            addAppToNode(node, added.getKey(), added.getValue());
        }
    }

    updateRunnability(node);
    return newNode || appsChanged;
}

From source file:com.contrastsecurity.ide.eclipse.ui.internal.views.VulnerabilitiesView.java

private EnumSet<RuleSeverity> getSelectedSeveritiesFromEclipsePreferences() {
    EnumSet<RuleSeverity> severities = EnumSet.noneOf(RuleSeverity.class);
    if (prefs.getBoolean(Constants.SEVERITY_LEVEL_NOTE, false)) {
        severities.add(RuleSeverity.NOTE);
    }//  ww w  . j ava  2 s.c  om
    if (prefs.getBoolean(Constants.SEVERITY_LEVEL_LOW, false)) {
        severities.add(RuleSeverity.LOW);
    }
    if (prefs.getBoolean(Constants.SEVERITY_LEVEL_MEDIUM, false)) {
        severities.add(RuleSeverity.MEDIUM);
    }
    if (prefs.getBoolean(Constants.SEVERITY_LEVEL_HIGH, false)) {
        severities.add(RuleSeverity.HIGH);
    }
    if (prefs.getBoolean(Constants.SEVERITY_LEVEL_CRITICAL, false)) {
        severities.add(RuleSeverity.CRITICAL);
    }
    return severities;
}

From source file:org.apache.hadoop.tools.mapred.TestCopyMapper.java

private void testPreserveUserGroupImpl(boolean preserve) {
    try {/*w  w  w  .  j a va2  s  .  c  om*/

        deleteState();
        createSourceData();
        changeUserGroup("Michael", "Corleone");

        FileSystem fs = cluster.getFileSystem();
        CopyMapper copyMapper = new CopyMapper();
        StubContext stubContext = new StubContext(getConfiguration(), null, 0);
        Mapper<Text, CopyListingFileStatus, Text, Text>.Context context = stubContext.getContext();

        Configuration configuration = context.getConfiguration();
        EnumSet<DistCpOptions.FileAttribute> fileAttributes = EnumSet.noneOf(DistCpOptions.FileAttribute.class);
        if (preserve) {
            fileAttributes.add(DistCpOptions.FileAttribute.USER);
            fileAttributes.add(DistCpOptions.FileAttribute.GROUP);
            fileAttributes.add(DistCpOptions.FileAttribute.PERMISSION);
        }

        configuration.set(DistCpOptionSwitch.PRESERVE_STATUS.getConfigLabel(),
                DistCpUtils.packAttributes(fileAttributes));
        copyMapper.setup(context);

        for (Path path : pathList) {
            final FileStatus fileStatus = fs.getFileStatus(path);
            copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
                    new CopyListingFileStatus(fileStatus), context);
        }

        // Check that the user/group attributes are preserved
        // (only) as necessary.
        for (Path path : pathList) {
            final Path targetPath = new Path(path.toString().replaceAll(SOURCE_PATH, TARGET_PATH));
            final FileStatus source = fs.getFileStatus(path);
            final FileStatus target = fs.getFileStatus(targetPath);
            if (!source.isDirectory()) {
                Assert.assertTrue(!preserve || source.getOwner().equals(target.getOwner()));
                Assert.assertTrue(!preserve || source.getGroup().equals(target.getGroup()));
                Assert.assertTrue(!preserve || source.getPermission().equals(target.getPermission()));
                Assert.assertTrue(preserve || !source.getOwner().equals(target.getOwner()));
                Assert.assertTrue(preserve || !source.getGroup().equals(target.getGroup()));
                Assert.assertTrue(preserve || !source.getPermission().equals(target.getPermission()));
                Assert.assertTrue(source.isDirectory() || source.getReplication() != target.getReplication());
            }
        }
    } catch (Exception e) {
        Assert.assertTrue("Unexpected exception: " + e.getMessage(), false);
        e.printStackTrace();
    }
}

From source file:com.google.bitcoin.core.Wallet.java

/**
 * This method is used by a {@link Peer} to find out if a transaction that has been announced is interesting,
 * that is, whether we should bother downloading its dependencies and exploring the transaction to decide how
 * risky it is. If this method returns true then {@link Wallet#receivePending(Transaction, java.util.List)}
 * will soon be called with the transactions dependencies as well.
 *///from  w  w w. j a  v  a  2 s.c  o  m
public boolean isPendingTransactionRelevant(Transaction tx) throws ScriptException {
    lock.lock();
    try {
        // Ignore it if we already know about this transaction. Receiving a pending transaction never moves it
        // between pools.
        log.info("!!!! isPendingTransactionRelevant START " + tx.getHashAsString());
        EnumSet<Pool> containingPools = getContainingPools(tx);
        if (!containingPools.equals(EnumSet.noneOf(Pool.class))) {
            log.debug("Received tx we already saw in a block or created ourselves: " + tx.getHashAsString());
            return false;
        }
        log.info("!!!! isPendingTransactionRelevant NOT IN POOLS " + tx.getHashAsString());

        // We only care about transactions that:
        //   - Send us coins
        //   - Spend our coins
        if (!isTransactionRelevant(tx)) {
            return false;
        }
        log.info("!!!! isPendingTransactionRelevant IS RELEVANT " + tx.getHashAsString());

        if (isTransactionRisky(tx, null) && !acceptRiskyTransactions) {
            log.warn(
                    "Received transaction {} with a lock time of {}, but not configured to accept these, discarding",
                    tx.getHashAsString(), tx.getLockTime());
            return false;
        }
        log.debug("Saw relevant pending transaction " + tx.toString());
        log.info("!!!! isPendingTransactionRelevant NOT RISKY " + tx.getHashAsString());

        return true;
    } finally {
        lock.unlock();
    }
}

From source file:org.codice.ddf.spatial.ogc.csw.catalog.source.CswSource.java

protected void configureCswSource() {
    detailLevels = EnumSet.noneOf(ElementSetType.class);

    capabilities = getCapabilities();/*from   w  w  w  . j  a v  a 2  s. c o m*/

    if (null != capabilities) {
        cswVersion = capabilities.getVersion();
        if (CswConstants.VERSION_2_0_1.equals(cswVersion)) {
            remoteCsw.setCsw201();
        }
        if (capabilities.getFilterCapabilities() == null) {
            return;
        }

        readGetRecordsOperation(capabilities);

        loadContentTypes();
        LOGGER.debug("{}: {}", cswSourceConfiguration.getId(), capabilities.toString());
    } else {
        LOGGER.error("{}: CSW Server did not return any capabilities.", cswSourceConfiguration.getId());
    }
}

From source file:net.sf.jabref.gui.openoffice.OOBibBase.java

private void insertFullReferenceAtCursor(XTextCursor cursor, Map<BibEntry, BibDatabase> entries,
        OOBibStyle style, String parFormat) throws UndefinedParagraphFormatException, IllegalArgumentException,
        UnknownPropertyException, PropertyVetoException, WrappedTargetException {
    Map<BibEntry, BibDatabase> correctEntries;
    // If we don't have numbered entries, we need to sort the entries before adding them:
    if (style.isSortByPosition()) {
        // Use the received map directly
        correctEntries = entries;// w  w w  . j  a  v a  2 s .c  o m
    } else {
        // Sort map
        Map<BibEntry, BibDatabase> newMap = new TreeMap<>(entryComparator);
        newMap.putAll(entries);
        correctEntries = newMap;
    }
    int number = 1;
    for (Map.Entry<BibEntry, BibDatabase> entry : correctEntries.entrySet()) {
        if (entry.getKey() instanceof UndefinedBibtexEntry) {
            continue;
        }
        OOUtil.insertParagraphBreak(text, cursor);
        if (style.isNumberEntries()) {
            int minGroupingCount = style.getIntCitProperty(OOBibStyle.MINIMUM_GROUPING_COUNT);
            OOUtil.insertTextAtCurrentLocation(text, cursor,
                    style.getNumCitationMarker(Collections.singletonList(number++), minGroupingCount, true),
                    EnumSet.noneOf(OOUtil.Formatting.class));
        }
        Layout layout = style.getReferenceFormat(entry.getKey().getType());
        layout.setPostFormatter(POSTFORMATTER);
        OOUtil.insertFullReferenceAtCurrentLocation(text, cursor, layout, parFormat, entry.getKey(),
                entry.getValue(), uniquefiers.get(entry.getKey().getCiteKey()));
    }

}

From source file:org.apache.openjpa.persistence.XMLPersistenceMetaDataParser.java

/**
 * Lazily parse cascades./*  w  w w . j a  v  a2s .com*/
 */
protected boolean startCascade(Object tag, Attributes attrs) throws SAXException {
    if (!isMetaDataMode())
        return false;

    Set<CascadeType> cascades = null;
    if (currentElement() instanceof FieldMetaData) {
        if (_cascades == null)
            _cascades = EnumSet.noneOf(CascadeType.class);
        cascades = _cascades;
    } else {
        if (_pkgCascades == null)
            _pkgCascades = EnumSet.noneOf(CascadeType.class);
        cascades = _pkgCascades;
    }
    boolean all = ELEM_CASCADE_ALL == tag;
    if (all || ELEM_CASCADE_PER == tag)
        cascades.add(PERSIST);
    if (all || ELEM_CASCADE_REM == tag)
        cascades.add(REMOVE);
    if (all || ELEM_CASCADE_MER == tag)
        cascades.add(MERGE);
    if (all || ELEM_CASCADE_REF == tag)
        cascades.add(REFRESH);
    if (all || ELEM_CASCADE_DET == tag)
        cascades.add(DETACH);
    return true;
}

From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java

/**
 * @deprecated Use getFacetRangeCounts which is more generalized
 *///from   w w w.  j  av  a 2  s  . c o m
@Deprecated
public void getFacetDateCounts(String dateFacet, NamedList<Object> resOuter)
        throws IOException, ParseException, RepositoryException, JahiaException {

    parseParams(FacetParams.FACET_DATE, dateFacet);
    String f = facetValue;

    final NamedList<Object> resInner = new SimpleOrderedMap<Object>();
    String fieldName = StringUtils.substringBeforeLast(f, PROPNAME_INDEX_SEPARATOR);
    ExtendedPropertyDefinition epd = NodeTypeRegistry.getInstance()
            .getNodeType(params.get("f." + f + ".facet.nodetype")).getPropertyDefinition(fieldName);
    String fieldNameInIndex = getFieldNameInIndex(f, fieldName, epd, params.getFieldParam(f, "facet.locale"));
    String prefix = params.getFieldParam(f, FacetParams.FACET_PREFIX);
    DateField ft = StringUtils.isEmpty(prefix) ? JahiaQueryParser.DATE_TYPE : JahiaQueryParser.JR_DATE_TYPE;
    final SchemaField sf = new SchemaField(fieldNameInIndex, ft);

    // TODO: Should we use the key now ?
    //    resOuter.add(key, resInner);
    resOuter.add(fieldName + PROPNAME_INDEX_SEPARATOR + fieldNameInIndex, resInner);

    if (!(epd.getRequiredType() == PropertyType.DATE)) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "Can not date facet on a field which is not a DateField: " + f);
    }
    Integer minCount = params.getFieldInt(f, FacetParams.FACET_MINCOUNT);
    if (minCount == null) {
        Boolean zeros = params.getFieldBool(f, FacetParams.FACET_ZEROS);
        // mincount = (zeros!=null && zeros) ? 0 : 1;
        minCount = (zeros != null && !zeros) ? 1 : 0;
        // current default is to include zeros.
    }

    final String startS = required.getFieldParam(f, FacetParams.FACET_DATE_START);
    final Date start;
    try {
        start = JahiaQueryParser.DATE_TYPE.parseMath(NOW, startS);
    } catch (SolrException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "date facet 'start' is not a valid Date string: " + startS, e);
    }
    final String endS = required.getFieldParam(f, FacetParams.FACET_DATE_END);
    Date end; // not final, hardend may change this
    try {
        end = JahiaQueryParser.DATE_TYPE.parseMath(NOW, endS);
    } catch (SolrException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "date facet 'end' is not a valid Date string: " + endS, e);
    }

    if (end.before(start)) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "date facet 'end' comes before 'start': " + endS + " < " + startS);
    }

    final String gap = required.getFieldParam(f, FacetParams.FACET_DATE_GAP);
    final DateMathParser dmp = new DateMathParser(DateField.UTC, Locale.US);
    dmp.setNow(NOW);

    String[] iStrs = params.getFieldParams(f, FacetParams.FACET_DATE_INCLUDE);
    // Legacy support for default of [lower,upper,edge] for date faceting
    // this is not handled by FacetRangeInclude.parseParam because
    // range faceting has differnet defaults
    final EnumSet<FacetRangeInclude> include = (null == iStrs || 0 == iStrs.length)
            ? EnumSet.of(FacetRangeInclude.LOWER, FacetRangeInclude.UPPER, FacetRangeInclude.EDGE)
            : FacetRangeInclude.parseParam(iStrs);

    try {
        Date low = start;
        while (low.before(end)) {
            dmp.setNow(low);
            String label = JahiaQueryParser.DATE_TYPE.toExternal(low);

            Date high = dmp.parseMath(gap);
            if (end.before(high)) {
                if (params.getFieldBool(f, FacetParams.FACET_DATE_HARD_END, false)) {
                    high = end;
                } else {
                    end = high;
                }
            }
            if (high.before(low)) {
                throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                        "date facet infinite loop (is gap negative?)");
            }
            final boolean includeLower = (include.contains(FacetRangeInclude.LOWER)
                    || (include.contains(FacetRangeInclude.EDGE) && low.equals(start)));
            final boolean includeUpper = (include.contains(FacetRangeInclude.UPPER)
                    || (include.contains(FacetRangeInclude.EDGE) && high.equals(end)));

            Query rangeQuery = getRangeQuery(ft, null, sf, prefix, low, high, includeLower, includeUpper);

            int count = rangeCount(rangeQuery);
            if (count >= minCount) {
                // TODO: Can we use just label here ?                  
                resInner.add(label + PROPNAME_INDEX_SEPARATOR + rangeQuery.toString(), count);
            }
            low = high;
        }
    } catch (java.text.ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "date facet 'gap' is not a valid Date Math string: " + gap, e);
    }

    // explicitly return the gap and end so all the counts are meaningful
    resInner.add("gap", gap);
    resInner.add("start", start);
    resInner.add("end", end);

    final String[] othersP = params.getFieldParams(f, FacetParams.FACET_DATE_OTHER);
    if (null != othersP && 0 < othersP.length) {
        final Set<FacetDateOther> others = EnumSet.noneOf(FacetDateOther.class);

        for (final String o : othersP) {
            others.add(FacetDateOther.get(o));
        }

        // no matter what other values are listed, we don't do
        // anything if "none" is specified.
        if (!others.contains(FacetDateOther.NONE)) {
            boolean all = others.contains(FacetDateOther.ALL);

            if (all || others.contains(FacetDateOther.BEFORE)) {
                Query rangeQuery = getRangeQuery(ft, null, sf, prefix, null, start, false,
                        (include.contains(FacetRangeInclude.OUTER)
                                || (!(include.contains(FacetRangeInclude.LOWER)
                                        || include.contains(FacetRangeInclude.EDGE)))));
                int count = rangeCount(rangeQuery);
                if (count >= minCount) {
                    resInner.add(
                            FacetDateOther.BEFORE.toString() + PROPNAME_INDEX_SEPARATOR + rangeQuery.toString(),
                            count);
                }
            }
            if (all || others.contains(FacetDateOther.AFTER)) {
                Query rangeQuery = getRangeQuery(ft, null, sf, prefix, end, null,
                        (include.contains(FacetRangeInclude.OUTER)
                                || (!(include.contains(FacetRangeInclude.UPPER)
                                        || include.contains(FacetRangeInclude.EDGE)))),
                        false);
                int count = rangeCount(rangeQuery);
                if (count >= minCount) {
                    resInner.add(
                            FacetDateOther.AFTER.toString() + PROPNAME_INDEX_SEPARATOR + rangeQuery.toString(),
                            count);
                }
            }
            if (all || others.contains(FacetDateOther.BETWEEN)) {
                Query rangeQuery = getRangeQuery(ft, null, sf, prefix, start, end,
                        (include.contains(FacetRangeInclude.LOWER) || include.contains(FacetRangeInclude.EDGE)),
                        (include.contains(FacetRangeInclude.UPPER)
                                || include.contains(FacetRangeInclude.EDGE)));
                int count = rangeCount(rangeQuery);
                if (count >= minCount) {
                    resInner.add(FacetDateOther.BETWEEN.toString() + PROPNAME_INDEX_SEPARATOR
                            + rangeQuery.toString(), count);
                }
            }
        }
    }
}