List of usage examples for java.util EnumSet noneOf
public static <E extends Enum<E>> EnumSet<E> noneOf(Class<E> elementType)
From source file:org.trnltk.morphology.lexicon.ImmutableRootGenerator.java
private HashSet<ImmutableRoot> generateModifiedRootNodes(final Lexeme lexeme) { final Set<LexemeAttribute> lexemeAttributes = lexeme.getAttributes(); if (lexemeAttributes.contains(LexemeAttribute.Special)) return this.handleSpecialRoots(lexeme); if (lexemeAttributes.contains(LexemeAttribute.EndsWithAyn)) { //kind of hack, didn't like it :( // if the word ends with Ayn // create roots with that attribute, and without that attribute // when creating with that attribute, add a VowelStart expectation final HashSet<ImmutableRoot> immutableRoots = new HashSet<ImmutableRoot>(); final EnumSet<LexemeAttribute> lexemeAttributesWithoutAyn = EnumSet.copyOf(lexemeAttributes); lexemeAttributesWithoutAyn.remove(LexemeAttribute.EndsWithAyn); final Lexeme lexemeWithoutAttrEndsWithAyn = new ImmutableLexeme(lexeme.getLemma(), lexeme.getLemmaRoot(), lexeme.getPrimaryPos(), lexeme.getSecondaryPos(), Sets.immutableEnumSet(lexemeAttributesWithoutAyn)); final HashSet<ImmutableRoot> rootsWithoutAynApplied = this .generateModifiedRootNodes(lexemeWithoutAttrEndsWithAyn); immutableRoots.addAll(rootsWithoutAynApplied); for (ImmutableRoot immutableRoot : rootsWithoutAynApplied) { final ImmutableSet<PhoneticAttribute> phoneticAttributesWithoutAynApplied = immutableRoot .getPhoneticAttributes(); final HashSet<PhoneticAttribute> phoneticAttributesWithAynApplied = Sets .newHashSet(phoneticAttributesWithoutAynApplied); phoneticAttributesWithAynApplied.remove(PhoneticAttribute.LastLetterVowel); phoneticAttributesWithAynApplied.add(PhoneticAttribute.LastLetterConsonant); final ImmutableRoot immutableRootWithAynApplied = new ImmutableRoot(immutableRoot.getSequence(), immutableRoot.getLexeme(), Sets.immutableEnumSet(phoneticAttributesWithAynApplied), Sets.immutableEnumSet(PhoneticExpectation.VowelStart)); immutableRoots.add(immutableRootWithAynApplied); }/*from w w w . j av a 2s . c o m*/ // just before returning, set correct lexeme again final HashSet<ImmutableRoot> immutableRootsWithCorrectLexemeAttr = new HashSet<ImmutableRoot>(); for (ImmutableRoot immutableRoot : immutableRoots) { immutableRootsWithCorrectLexemeAttr.add(new ImmutableRoot(immutableRoot.getSequence(), lexeme, immutableRoot.getPhoneticAttributes(), immutableRoot.getPhoneticExpectations())); } return immutableRootsWithCorrectLexemeAttr; } final String lemmaRoot = lexeme.getLemmaRoot(); String modifiedRootStr = lexeme.getLemmaRoot(); final EnumSet<PhoneticAttribute> originalPhoneticAttrs = phoneticsAnalyzer .calculatePhoneticAttributes(lexeme.getLemmaRoot(), null); final EnumSet<PhoneticAttribute> modifiedPhoneticAttrs = phoneticsAnalyzer .calculatePhoneticAttributes(lexeme.getLemmaRoot(), null); final EnumSet<PhoneticExpectation> originalPhoneticExpectations = EnumSet.noneOf(PhoneticExpectation.class); final EnumSet<PhoneticExpectation> modifiedPhoneticExpectations = EnumSet.noneOf(PhoneticExpectation.class); if (CollectionUtils.containsAny(lexemeAttributes, Sets.immutableEnumSet(LexemeAttribute.Voicing, LexemeAttribute.VoicingOpt))) { final TurkicLetter lastLetter = TurkishAlphabet .getLetter(modifiedRootStr.charAt(modifiedRootStr.length() - 1)); final TurkicLetter voicedLastLetter = lemmaRoot.endsWith("nk") ? TurkishAlphabet.L_g : TurkishAlphabet.voice(lastLetter); Validate.notNull(voicedLastLetter); modifiedRootStr = modifiedRootStr.substring(0, modifiedRootStr.length() - 1) + voicedLastLetter.charValue(); modifiedPhoneticAttrs.remove(PhoneticAttribute.LastLetterVoicelessStop); if (!lexemeAttributes.contains(LexemeAttribute.VoicingOpt)) { originalPhoneticExpectations.add(PhoneticExpectation.ConsonantStart); } modifiedPhoneticExpectations.add(PhoneticExpectation.VowelStart); } if (lexemeAttributes.contains(LexemeAttribute.Doubling)) { modifiedRootStr += modifiedRootStr.charAt(modifiedRootStr.length() - 1); originalPhoneticExpectations.add(PhoneticExpectation.ConsonantStart); modifiedPhoneticExpectations.add(PhoneticExpectation.VowelStart); } if (lexemeAttributes.contains(LexemeAttribute.LastVowelDrop)) { modifiedRootStr = modifiedRootStr.substring(0, modifiedRootStr.length() - 2) + modifiedRootStr.charAt(modifiedRootStr.length() - 1); if (!PrimaryPos.Verb.equals(lexeme.getPrimaryPos())) originalPhoneticExpectations.add(PhoneticExpectation.ConsonantStart); modifiedPhoneticExpectations.add(PhoneticExpectation.VowelStart); } if (lexemeAttributes.contains(LexemeAttribute.InverseHarmony)) { originalPhoneticAttrs.add(PhoneticAttribute.LastVowelFrontal); originalPhoneticAttrs.remove(PhoneticAttribute.LastVowelBack); modifiedPhoneticAttrs.add(PhoneticAttribute.LastVowelFrontal); modifiedPhoneticAttrs.remove(PhoneticAttribute.LastVowelBack); } if (lexemeAttributes.contains(LexemeAttribute.ProgressiveVowelDrop)) { modifiedRootStr = modifiedRootStr.substring(0, modifiedRootStr.length() - 1); if (this.hasVowel(modifiedRootStr)) { modifiedPhoneticAttrs.clear(); modifiedPhoneticAttrs.addAll(phoneticsAnalyzer.calculatePhoneticAttributes(modifiedRootStr, null)); } modifiedPhoneticExpectations.add(PhoneticExpectation.VowelStart); } ImmutableRoot originalRoot = new ImmutableRoot(lexeme.getLemmaRoot(), lexeme, Sets.immutableEnumSet(originalPhoneticAttrs), Sets.immutableEnumSet(originalPhoneticExpectations)); ImmutableRoot modifiedRoot = new ImmutableRoot(modifiedRootStr, lexeme, Sets.immutableEnumSet(modifiedPhoneticAttrs), Sets.immutableEnumSet(modifiedPhoneticExpectations)); if (originalRoot.equals(modifiedRoot)) return Sets.newHashSet(originalRoot); else return Sets.newHashSet(originalRoot, modifiedRoot); }
From source file:org.apache.accumulo.shell.commands.FateCommand.java
@Override public int execute(final String fullCommand, final CommandLine cl, final Shell shellState) throws ParseException, KeeperException, InterruptedException, IOException { Instance instance = shellState.getInstance(); String[] args = cl.getArgs(); if (args.length <= 0) { throw new ParseException("Must provide a command to execute"); }//from ww w .java 2s .c o m String cmd = args[0]; boolean failedCommand = false; AdminUtil<FateCommand> admin = new AdminUtil<FateCommand>(false); String path = ZooUtil.getRoot(instance) + Constants.ZFATE; String masterPath = ZooUtil.getRoot(instance) + Constants.ZMASTER_LOCK; IZooReaderWriter zk = getZooReaderWriter(shellState.getInstance(), cl.getOptionValue(secretOption.getOpt())); ZooStore<FateCommand> zs = new ZooStore<FateCommand>(path, zk); if ("fail".equals(cmd)) { if (args.length <= 1) { throw new ParseException("Must provide transaction ID"); } for (int i = 1; i < args.length; i++) { if (!admin.prepFail(zs, zk, masterPath, args[i])) { System.out.printf("Could not fail transaction: %s%n", args[i]); failedCommand = true; } } } else if ("delete".equals(cmd)) { if (args.length <= 1) { throw new ParseException("Must provide transaction ID"); } for (int i = 1; i < args.length; i++) { if (admin.prepDelete(zs, zk, masterPath, args[i])) { admin.deleteLocks(zs, zk, ZooUtil.getRoot(instance) + Constants.ZTABLE_LOCKS, args[i]); } else { System.out.printf("Could not delete transaction: %s%n", args[i]); failedCommand = true; } } } else if ("list".equals(cmd) || "print".equals(cmd)) { // Parse transaction ID filters for print display Set<Long> filterTxid = null; if (args.length >= 2) { filterTxid = new HashSet<Long>(args.length); for (int i = 1; i < args.length; i++) { try { Long val = Long.parseLong(args[i], 16); filterTxid.add(val); } catch (NumberFormatException nfe) { // Failed to parse, will exit instead of displaying everything since the intention was to potentially filter some data System.out.printf("Invalid transaction ID format: %s%n", args[i]); return 1; } } } // Parse TStatus filters for print display EnumSet<TStatus> filterStatus = null; if (cl.hasOption(statusOption.getOpt())) { filterStatus = EnumSet.noneOf(TStatus.class); String[] tstat = cl.getOptionValues(statusOption.getOpt()); for (int i = 0; i < tstat.length; i++) { try { filterStatus.add(TStatus.valueOf(tstat[i])); } catch (IllegalArgumentException iae) { System.out.printf("Invalid transaction status name: %s%n", tstat[i]); return 1; } } } StringBuilder buf = new StringBuilder(8096); Formatter fmt = new Formatter(buf); admin.print(zs, zk, ZooUtil.getRoot(instance) + Constants.ZTABLE_LOCKS, fmt, filterTxid, filterStatus); shellState.printLines(Collections.singletonList(buf.toString()).iterator(), !cl.hasOption(disablePaginationOpt.getOpt())); } else if ("dump".equals(cmd)) { List<Long> txids; if (args.length == 1) { txids = zs.list(); } else { txids = new ArrayList<>(); for (int i = 1; i < args.length; i++) { txids.add(Long.parseLong(args[i], 16)); } } Gson gson = new GsonBuilder().registerTypeAdapter(ReadOnlyRepo.class, new InterfaceSerializer<>()) .registerTypeAdapter(Repo.class, new InterfaceSerializer<>()) .registerTypeAdapter(byte[].class, new ByteArraySerializer()).setPrettyPrinting().create(); List<FateStack> txStacks = new ArrayList<>(); for (Long txid : txids) { List<ReadOnlyRepo<FateCommand>> repoStack = zs.getStack(txid); txStacks.add(new FateStack(txid, repoStack)); } System.out.println(gson.toJson(txStacks)); } else { throw new ParseException("Invalid command option"); } return failedCommand ? 1 : 0; }
From source file:org.apache.sentry.policy.indexer.TestIndexerAuthorizationProviderGeneralCases.java
@Test public void testAnalyst() throws Exception { Set<IndexerModelAction> writeOnly = EnumSet.of(IndexerModelAction.WRITE); doTestAuthProviderOnIndexer(SUB_ANALYST, IND_PURCHASES, writeOnly); Set<IndexerModelAction> allActions = EnumSet.allOf(IndexerModelAction.class); doTestAuthProviderOnIndexer(SUB_ANALYST, IND_ANALYST1, allActions); doTestAuthProviderOnIndexer(SUB_ANALYST, IND_JRANALYST1, allActions); Set<IndexerModelAction> readWriteOnly = EnumSet.of(READ, WRITE); doTestAuthProviderOnIndexer(SUB_ANALYST, IND_TMP, readWriteOnly); Set<IndexerModelAction> noActions = EnumSet.noneOf(IndexerModelAction.class); doTestAuthProviderOnIndexer(SUB_ANALYST, IND_PURCHASES_PARTIAL, noActions); }
From source file:com.l2jfree.gameserver.model.restriction.ObjectRestrictions.java
/** * Adds a restriction on startup/* w w w. j a v a 2 s. c o m*/ * * @param objId * @param restriction */ private void addRestriction(Integer objId, AvailableRestriction restriction) { EnumSet<AvailableRestriction> set = _restrictionList.get(objId); if (set == null) _restrictionList.put(objId, set = EnumSet.noneOf(AvailableRestriction.class)); if (set.add(restriction)) { final L2Player player = L2World.getInstance().findPlayer(objId); if (player != null) restriction.activatedOn(player); } }
From source file:org.apache.hadoop.tools.util.TestDistCpUtils.java
@Test public void testPreserveNothingOnDirectory() throws IOException { FileSystem fs = FileSystem.get(config); EnumSet<FileAttribute> attributes = EnumSet.noneOf(FileAttribute.class); Path dst = new Path("/tmp/abc"); Path src = new Path("/tmp/src"); createDirectory(fs, src);/*from w ww. jav a 2 s .c o m*/ createDirectory(fs, dst); fs.setPermission(src, fullPerm); fs.setOwner(src, "somebody", "somebody-group"); fs.setTimes(src, 0, 0); fs.setPermission(dst, noPerm); fs.setOwner(dst, "nobody", "nobody-group"); fs.setTimes(dst, 100, 100); CopyListingFileStatus srcStatus = new CopyListingFileStatus(fs.getFileStatus(src)); DistCpUtils.preserve(fs, dst, srcStatus, attributes, false); CopyListingFileStatus dstStatus = new CopyListingFileStatus(fs.getFileStatus(dst)); // FileStatus.equals only compares path field, must explicitly compare all fields Assert.assertFalse(srcStatus.getPermission().equals(dstStatus.getPermission())); Assert.assertFalse(srcStatus.getOwner().equals(dstStatus.getOwner())); Assert.assertFalse(srcStatus.getGroup().equals(dstStatus.getGroup())); Assert.assertTrue(dstStatus.getAccessTime() == 100); Assert.assertTrue(dstStatus.getModificationTime() == 100); Assert.assertTrue(dstStatus.getReplication() == 0); }
From source file:com.zimbra.cs.fb.RemoteFreeBusyProvider.java
@Override public Set<MailItem.Type> registerForItemTypes() { return EnumSet.noneOf(MailItem.Type.class); }
From source file:org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCacheRevocation.java
/** * Test that when we have an uncache request, and the client refuses to release * the replica for a long time, we will un-mlock it. *///w w w . j a v a 2 s .c o m @Test(timeout = 120000) public void testRevocation() throws Exception { assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); BlockReaderTestUtil.enableHdfsCachingTracing(); BlockReaderTestUtil.enableShortCircuitShmTracing(); Configuration conf = getDefaultConf(); // Set a really short revocation timeout. conf.setLong(DFSConfigKeys.DFS_DATANODE_CACHE_REVOCATION_TIMEOUT_MS, 250L); // Poll very often conf.setLong(DFSConfigKeys.DFS_DATANODE_CACHE_REVOCATION_POLLING_MS, 2L); MiniDFSCluster cluster = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); DistributedFileSystem dfs = cluster.getFileSystem(); // Create and cache a file. final String TEST_FILE = "/test_file2"; DFSTestUtil.createFile(dfs, new Path(TEST_FILE), BLOCK_SIZE, (short) 1, 0xcafe); dfs.addCachePool(new CachePoolInfo("pool")); long cacheDirectiveId = dfs.addCacheDirective(new CacheDirectiveInfo.Builder().setPool("pool") .setPath(new Path(TEST_FILE)).setReplication((short) 1).build()); FsDatasetSpi<?> fsd = cluster.getDataNodes().get(0).getFSDataset(); DFSTestUtil.verifyExpectedCacheUsage(BLOCK_SIZE, 1, fsd); // Mmap the file. FSDataInputStream in = dfs.open(new Path(TEST_FILE)); ByteBuffer buf = in.read(null, BLOCK_SIZE, EnumSet.noneOf(ReadOption.class)); // Attempt to uncache file. The file should get uncached. LOG.info("removing cache directive {}", cacheDirectiveId); dfs.removeCacheDirective(cacheDirectiveId); LOG.info("finished removing cache directive {}", cacheDirectiveId); Thread.sleep(1000); DFSTestUtil.verifyExpectedCacheUsage(0, 0, fsd); // Cleanup in.releaseBuffer(buf); in.close(); cluster.shutdown(); }
From source file:org.apache.sentry.policy.search.TestSearchAuthorizationProviderGeneralCases.java
@Test public void testAnalyst() throws Exception { Set<SearchModelAction> updateOnly = EnumSet.of(SearchModelAction.UPDATE); doTestAuthProviderOnCollection(SUB_ANALYST, COLL_PURCHASES, updateOnly); Set<SearchModelAction> allActions = EnumSet.allOf(SearchModelAction.class); doTestAuthProviderOnCollection(SUB_ANALYST, COLL_ANALYST1, allActions); doTestAuthProviderOnCollection(SUB_ANALYST, COLL_JRANALYST1, allActions); Set<SearchModelAction> queryUpdateOnly = EnumSet.of(QUERY, UPDATE); doTestAuthProviderOnCollection(SUB_ANALYST, COLL_TMP, queryUpdateOnly); Set<SearchModelAction> noActions = EnumSet.noneOf(SearchModelAction.class); doTestAuthProviderOnCollection(SUB_ANALYST, COLL_PURCHASES_PARTIAL, noActions); }
From source file:org.rhq.enterprise.server.perspective.activator.ActivatorHelper.java
public static boolean initGlobalActivators(GlobalActivatorsType rawActivators, List<Activator<?>> activators) { if (rawActivators == null) { return false; }//from www . ja va 2s . co m // Let our super class init the "common" activators. boolean debugMode = initCommonActivators(rawActivators, activators); List<InventoryActivatorType> rawInventoryActivators = rawActivators.getInventory(); for (InventoryActivatorType rawInventoryActivator : rawInventoryActivators) { List<ResourceType> rawResourceConditions = rawInventoryActivator.getResource(); List<ResourceConditionSet> resourceConditionSets = new ArrayList<ResourceConditionSet>( rawResourceConditions.size()); for (ResourceType rawResourceCondition : rawResourceConditions) { List<ResourcePermissionActivatorType> rawPermissions = rawResourceCondition.getResourcePermission(); EnumSet<Permission> permissions = EnumSet.noneOf(Permission.class); for (ResourcePermissionActivatorType rawPermission : rawPermissions) { String rawName = rawPermission.getName().toString(); Permission permission = Permission.valueOf(rawName.toUpperCase(Locale.US)); permissions.add(permission); } List<TraitActivatorType> rawTraits = rawResourceCondition.getTrait(); Map<String, Pattern> traits = new HashMap<String, Pattern>(); for (TraitActivatorType rawTraitActivator : rawTraits) { String name = rawTraitActivator.getName(); String value = rawTraitActivator.getValue(); traits.put(name, Pattern.compile(value)); } ResourceConditionSet resourceConditionSet = new ResourceConditionSet( rawResourceCondition.getPlugin(), rawResourceCondition.getType(), permissions, traits); resourceConditionSets.add(resourceConditionSet); } InventoryActivator resourceTypeActivator = new InventoryActivator(resourceConditionSets); activators.add(resourceTypeActivator); } return debugMode; }
From source file:com.netflix.genie.web.controllers.ClusterRestController.java
/** * Get cluster config based on user params. If empty strings are passed for * they are treated as nulls (not false). * * @param name cluster name (can be a pattern) * @param statuses valid types - Types.ClusterStatus * @param tags tags for the cluster * @param minUpdateTime min time when cluster configuration was updated * @param maxUpdateTime max time when cluster configuration was updated * @param page The page to get/*from ww w . j a v a2 s. c o m*/ * @param assembler The paged resources assembler to use * @return the Clusters found matching the criteria * @throws GenieException For any error */ @GetMapping(produces = MediaTypes.HAL_JSON_VALUE) @ResponseStatus(HttpStatus.OK) public PagedResources<ClusterResource> getClusters( @RequestParam(value = "name", required = false) final String name, @RequestParam(value = "status", required = false) final Set<String> statuses, @RequestParam(value = "tag", required = false) final Set<String> tags, @RequestParam(value = "minUpdateTime", required = false) final Long minUpdateTime, @RequestParam(value = "maxUpdateTime", required = false) final Long maxUpdateTime, @PageableDefault(size = 64, sort = { "updated" }, direction = Sort.Direction.DESC) final Pageable page, final PagedResourcesAssembler<Cluster> assembler) throws GenieException { log.debug("Called [name | statuses | tags | minUpdateTime | maxUpdateTime | page]"); log.debug("{} | {} | {} | {} | {} | {}", name, statuses, tags, minUpdateTime, maxUpdateTime, page); //Create this conversion internal in case someone uses lower case by accident? Set<ClusterStatus> enumStatuses = null; if (statuses != null) { enumStatuses = EnumSet.noneOf(ClusterStatus.class); for (final String status : statuses) { enumStatuses.add(ClusterStatus.parse(status)); } } // Build the self link which will be used for the next, previous, etc links final Link self = ControllerLinkBuilder.linkTo(ControllerLinkBuilder.methodOn(ClusterRestController.class) .getClusters(name, statuses, tags, minUpdateTime, maxUpdateTime, page, assembler)).withSelfRel(); return assembler.toResource( this.clusterService.getClusters(name, enumStatuses, tags, minUpdateTime == null ? null : new Date(minUpdateTime), maxUpdateTime == null ? null : new Date(maxUpdateTime), page), this.clusterResourceAssembler, self); }