List of usage examples for java.util EnumSet noneOf
public static <E extends Enum<E>> EnumSet<E> noneOf(Class<E> elementType)
From source file:com.msopentech.thali.CouchDBListener.BogusRequestAuthorization.java
@Override public boolean Authorize(Manager manager, URLConnection urlConnection) { List<String> pathSegments = Router.splitPath(urlConnection.getURL()); // Everything is legal with the key database, gives you the warm fuzzies, doesn't it? if (pathSegments.size() == 0 || pathSegments.get(0).equals(KeyDatabaseName)) { return true; }/*from w w w. j a v a 2 s . co m*/ Database keyDatabase = null; try { keyDatabase = manager.getExistingDatabase(KeyDatabaseName); } catch (CouchbaseLiteException e) { Log.e(tag, "If the DB doesn't exist we should have gotten null, not an exception. So something went wrong.", e); } // No database? Then no one is authorized. if (keyDatabase == null) { insecureConnection(urlConnection); return false; } javax.security.cert.X509Certificate[] certChain; try { certChain = urlConnection.getSSLSession().getPeerCertificateChain(); } catch (SSLPeerUnverifiedException e) { insecureConnection(urlConnection); return false; } if (certChain.length == 0) { insecureConnection(urlConnection); return false; } PublicKey publicKey = certChain[certChain.length - 1].getPublicKey(); if ((publicKey instanceof java.security.interfaces.RSAPublicKey) == false) { insecureConnection(urlConnection); return false; } java.security.interfaces.RSAPublicKey rsaPublicKey = (java.security.interfaces.RSAPublicKey) publicKey; String keyId = BogusAuthorizeCouchDocument.generateRsaKeyId(rsaPublicKey); RevisionList revisionList = keyDatabase.getAllRevisionsOfDocumentID(keyId, true); if (revisionList.size() != 1) { insecureConnection(urlConnection); return false; } EnumSet<Database.TDContentOptions> tdContentOptions = EnumSet.noneOf(Database.TDContentOptions.class); RevisionInternal revision = keyDatabase.getDocumentWithIDAndRev(keyId, revisionList.getAllRevIds().get(revisionList.getAllRevIds().size() - 1), tdContentOptions); // Looking up the doc by the RSA derived key ID but then doing a security check by comparing the values in the // document leads to a potential denial of service attack where someone figures out how to get an ID that // matches someone else's but attached to a different key. In theory this is impossible since our key ID fully // encodes the public key's value. So if we really believed that then just matching on the ID should be // enough. When we right the real code we'll have to model this more carefully. ObjectMapper mapper = new ObjectMapper(); try { BogusAuthorizeCouchDocument keyClassForTests = mapper.readValue(revision.getJson(), BogusAuthorizeCouchDocument.class); try { if (new ThaliPublicKeyComparer(publicKey) .KeysEqual(keyClassForTests.generatePublicKey()) == false) { insecureConnection(urlConnection); return false; } return true; } catch (Exception e) { // A 500 would be better insecureConnection(urlConnection); return false; } } catch (IOException e) { insecureConnection(urlConnection); return false; } }
From source file:org.trnltk.morphology.contextless.rootfinder.BruteForceVerbRootFinder.java
@Override @SuppressWarnings({ "UnnecessaryLocalVariable", "ConstantConditions" }) public Collection<DynamicRoot> findRootsForPartialInput(TurkishSequence partialInput, TurkishSequence wholeSurface) {/*from ww w . j a v a 2 s.com*/ final TurkishChar lastVowel = partialInput.getLastVowel(); final TurkishSequence rootSeq = partialInput; final TurkishSequence lemmaSeq = rootSeq; final TurkishSequence lemmaRootSeq = lemmaSeq; final PrimaryPos primaryPos = PrimaryPos.Verb; final SecondaryPos secondaryPos = null; final EnumSet<LexemeAttribute> lexemeAttributes = EnumSet.noneOf(LexemeAttribute.class); final DynamicLexeme lexeme = new DynamicLexeme(lemmaSeq.getUnderlyingString(), lemmaRootSeq.getUnderlyingString(), primaryPos, secondaryPos, lexemeAttributes); final EnumSet<PhoneticExpectation> phoneticExpectations = EnumSet.noneOf(PhoneticExpectation.class); final EnumSet<PhoneticAttribute> phoneticAttributes = phoneticsAnalyzer .calculatePhoneticAttributes(partialInput, lexemeAttributes); final DynamicRoot noAttrRoot = new DynamicRoot(rootSeq, lexeme, phoneticAttributes, phoneticExpectations); this.setLexemeAndPhoneticAttributes(Arrays.asList(noAttrRoot)); this.setLemma(Arrays.asList(noAttrRoot)); final TurkishChar lastChar = partialInput.getLastChar(); final TurkicLetter lastLetter = lastChar.getLetter(); final boolean partialSurfaceCanBeRootOfAVerb = this.seemsLikeAValidVerbRoot(partialInput); if (wholeSurface.equals(partialInput)) return partialSurfaceCanBeRootOfAVerb ? Arrays.asList(noAttrRoot) : Collections.<DynamicRoot>emptyList(); final TurkishChar firstCharAfterPartialInput = wholeSurface.charAt(partialInput.length()); final TurkicLetter firstLetterAfterPartialInput = firstCharAfterPartialInput.getLetter(); final String wholeSurfaceStr = wholeSurface.getUnderlyingString(); final String partialInputStr = partialInput.getUnderlyingString(); final boolean mightHaveProgressiveVowelDrop = !lastLetter.isVowel() && strStartsWithAnyAdditionOfStr( wholeSurfaceStr, partialInputStr, Arrays.asList("iyor", "yor", "uyor", "yor")); final boolean mightHaveAorist_A = !lastLetter.isVowel() && strStartsWithAnyAdditionOfStr(wholeSurfaceStr, partialInputStr, Arrays.asList("ar", "er")); // no Aorist_I for -ur, -r final boolean mightHaveAorist_I = !lastLetter.isVowel() && strStartsWithAnyAdditionOfStr(wholeSurfaceStr, partialInputStr, Arrays.asList("r", "ir")); // for other letters, no voicing in verbs. {git+er->gider} vs {yapar, aar, diker} final boolean voicingMightHaveHappened = lastLetter.equals(TurkishAlphabet.L_d) && firstLetterAfterPartialInput.isVowel(); final Set<DynamicRoot> possibleProgressiveVowelDropRoots = mightHaveProgressiveVowelDrop ? this.getProgressiveDropRoots(noAttrRoot, lastVowel) : new HashSet<DynamicRoot>(); final Set<DynamicRoot> possibleAorist_A_Roots = mightHaveAorist_A ? this.getAorist_A_Roots(noAttrRoot) : new HashSet<DynamicRoot>(); final Set<DynamicRoot> possibleAorist_I_Roots = mightHaveAorist_I ? this.getAorist_I_Roots(noAttrRoot) : new HashSet<DynamicRoot>(); final Set<DynamicRoot> possibleCausativeRoots = this.getPossibleCausativeRoots(lastLetter, partialInput, wholeSurface, noAttrRoot); final Set<DynamicRoot> possiblePassiveRoots = this.getPossiblePassiveRoots(lastLetter, partialInput, wholeSurface, noAttrRoot); if (voicingMightHaveHappened) { Function<DynamicRoot, DynamicRoot> voicingRootFunction = new Function<DynamicRoot, DynamicRoot>() { @Override public DynamicRoot apply(DynamicRoot input) { return getPossibleVoicingRoot(input); } }; final Collection<DynamicRoot> possibleProgressiveVowelDropRoots_voicing = Collections2 .transform(ImmutableSet.copyOf(possibleProgressiveVowelDropRoots), voicingRootFunction); possibleProgressiveVowelDropRoots.addAll(possibleProgressiveVowelDropRoots_voicing); final Collection<DynamicRoot> possibleAorist_A_Roots_voicing = Collections2 .transform(ImmutableSet.copyOf(possibleAorist_A_Roots), voicingRootFunction); possibleAorist_A_Roots.addAll(possibleAorist_A_Roots_voicing); final Collection<DynamicRoot> possibleAorist_I_Roots_voicing = Collections2 .transform(ImmutableSet.copyOf(possibleAorist_I_Roots), voicingRootFunction); possibleAorist_A_Roots.addAll(possibleAorist_I_Roots_voicing); final Collection<DynamicRoot> possibleCausativeRoots_voicing = Collections2 .transform(ImmutableSet.copyOf(possibleCausativeRoots), voicingRootFunction); possibleCausativeRoots.addAll(possibleCausativeRoots_voicing); final Collection<DynamicRoot> possiblePassiveRoots_voicing = Collections2 .transform(ImmutableSet.copyOf(possiblePassiveRoots), voicingRootFunction); possiblePassiveRoots.addAll(possiblePassiveRoots_voicing); } final HashSet<DynamicRoot> generatedRoots = new HashSet<DynamicRoot>(); generatedRoots.add(noAttrRoot); if (voicingMightHaveHappened) generatedRoots.add(this.getPossibleVoicingRoot(noAttrRoot)); generatedRoots.addAll(possibleProgressiveVowelDropRoots); generatedRoots.addAll(possibleAorist_A_Roots); generatedRoots.addAll(possibleAorist_I_Roots); generatedRoots.addAll(possibleCausativeRoots); generatedRoots.addAll(possiblePassiveRoots); this.setLexemeAndPhoneticAttributes(generatedRoots); this.setLemma(generatedRoots); return Collections2.filter(generatedRoots, new Predicate<DynamicRoot>() { @Override public boolean apply(DynamicRoot input) { return seemsLikeAValidVerbRoot(new TurkishSequence(input.getLexeme().getLemmaRoot())); } }); }
From source file:org.apache.hadoop.hive.ql.metadata.PerFileFormatMetadataRestrictionPreEventListener.java
public PerFileFormatMetadataRestrictionPreEventListener(Configuration config) throws HiveException { super(config); // By default. metadataRestrictions_per_fileFormat.put(IOConstants.AVROFILE.toLowerCase(), EnumSet.of(MetadataOperationType.ALTER_TABLE_MODIFY_COLUMNS)); // Scan conf for operation-restriction settings. Map<String, String> restrictionsFromConfig = config .getValByRegex(CONFIG_PREFIX_METADATA_RESTRICTIONS + "(.*)"); for (Map.Entry<String, String> entry : restrictionsFromConfig.entrySet()) { String fileFormat = entry.getKey().replaceAll(CONFIG_PREFIX_METADATA_RESTRICTIONS, "").toLowerCase(); EnumSet<MetadataOperationType> restriction = EnumSet.noneOf(MetadataOperationType.class); for (String opType : entry.getValue().split(",")) { try { LOG.info("Adding restriction on " + opType + " for file-format " + fileFormat); restriction.add(MetadataOperationType.valueOf(opType)); } catch (IllegalArgumentException exception) { LOG.error("Invalid MetadataOperationType: " + opType, exception); }//from ww w. j a v a2 s. c o m } metadataRestrictions_per_fileFormat.put(fileFormat, restriction); LOG.info("Final metadata restrictions on file-format:" + fileFormat + " are: " + metadataRestrictions_per_fileFormat.get(fileFormat)); } // Scan conf for permitted-serdes per fileformat. Map<String, String> permittedSerDeSettings = config.getValByRegex(CONFIG_PREFIX_PERMITTED_SERDES + "(.*)"); for (Map.Entry<String, String> entry : permittedSerDeSettings.entrySet()) { String fileFormat = entry.getKey().replaceAll(CONFIG_PREFIX_PERMITTED_SERDES, "").toLowerCase(); List<String> permittedSerDeList = Lists.newArrayList(); for (String serDeClass : entry.getValue().split(",")) { permittedSerDeList.add(serDeClass.trim()); } permitted_serDe_classes_per_fileFormat.put(fileFormat, permittedSerDeList); LOG.info("Final metadata restrictions on file-format:" + fileFormat + " are: " + permitted_serDe_classes_per_fileFormat.get(fileFormat)); } }
From source file:ru.codeinside.adm.parser.EmployeeFixtureParser.java
private Set<Role> parseRoles(Splitter groupSplitter, int lineNumber, String rolesString) { Set<String> roleNames = Sets.newTreeSet(groupSplitter.split(rolesString)); Set<Role> roles = EnumSet.noneOf(Role.class); for (String roleName : roleNames) { try {//w ww. j ava 2 s .c om roles.add(Role.valueOf(roleName)); } catch (Exception e) { throw new IllegalStateException( "??? ( ?:" + lineNumber + "):" + roleName); } } return roles; }
From source file:org.apache.hcatalog.security.HdfsAuthorizationProvider.java
protected EnumSet<FsAction> getFsActions(Privilege[] privs, Path path) { EnumSet<FsAction> actions = EnumSet.noneOf(FsAction.class); if (privs == null) { return actions; }/*from w w w . j av a 2 s .c om*/ for (Privilege priv : privs) { actions.add(getFsAction(priv, path)); } return actions; }
From source file:com.netflix.genie.core.services.impl.LocalJobRunnerUnitTests.java
/** * Test the submitJob method to check cluster/command info updated for jobs and exception if * workflow executor returns false.// w w w.j a va 2s . c om * * @throws GenieException If there is any problem. * @throws IOException when there is any IO problem */ @SuppressWarnings("unchecked") @Test(expected = GenieServerException.class) public void testSubmitJob() throws GenieException, IOException { final Set<CommandStatus> enumStatuses = EnumSet.noneOf(CommandStatus.class); enumStatuses.add(CommandStatus.ACTIVE); final String placeholder = UUID.randomUUID().toString(); final String app1 = UUID.randomUUID().toString(); final String app2 = UUID.randomUUID().toString(); final String app3 = UUID.randomUUID().toString(); final List<Application> applications = Lists.newArrayList( new Application.Builder(placeholder, placeholder, placeholder, ApplicationStatus.ACTIVE) .withId(app3).build(), new Application.Builder(placeholder, placeholder, placeholder, ApplicationStatus.ACTIVE) .withId(app1).build(), new Application.Builder(placeholder, placeholder, placeholder, ApplicationStatus.ACTIVE) .withId(app2).build()); final JobRequest jobRequest = new JobRequest.Builder(JOB_1_NAME, USER, VERSION, null, null, null) .withId(JOB_1_ID).withApplications(Lists.newArrayList(app3, app1, app2)).build(); final Cluster cluster = new Cluster.Builder(CLUSTER_NAME, USER, VERSION, ClusterStatus.UP) .withId(CLUSTER_ID).build(); final Command command = new Command.Builder(COMMAND_NAME, USER, VERSION, CommandStatus.ACTIVE, "foo", 5000L) .withId(COMMAND_ID).build(); final int memory = 2438; Mockito.doThrow(new IOException("something bad")).when(this.task2).executeTask(Mockito.anyMap()); try { this.jobSubmitterService.submitJob(jobRequest, cluster, command, applications, memory); } catch (Throwable t) { final File jobDirectory = new File(tmpFolder, JOB_1_ID); Assert.assertTrue(jobDirectory.exists()); final File initFailureFile = new File(jobDirectory, JobConstants.GENIE_INIT_FAILURE_MESSAGE_FILE_NAME); Assert.assertTrue(initFailureFile.exists()); Assert.assertTrue(initFailureFile.length() > 0); throw t; } }
From source file:org.wso2.carbon.cassandra.server.CarbonCassandraAuthorizer.java
/** * Authorize the given user for performing actions on the given resource * * @param authenticatedUser <code>AuthenticatedUser</code> instance * @param resource Cassandra's resource such as cf, keyspace * @return A set of <code>Permission</code> the given user allowed for the given resource * @see #authorize(org.apache.cassandra.auth.AuthenticatedUser, org.apache.cassandra.auth.IResource) *//* w ww . ja v a 2 s .c o m*/ public Set<Permission> authorize(AuthenticatedUser authenticatedUser, IResource resource) { String resourcePath = null; if (resource instanceof DataResource) { resourcePath = resource.getName(); } else { resourcePath = getResourcePath(resource); } resourcePath = AuthUtils.RESOURCE_PATH_PREFIX + File.separator + resourcePath; String rootPath = AuthUtils.RESOURCE_PATH_PREFIX + File.separator + DataResource.root().getName(); if (!resourcePath.startsWith(rootPath)) { return Permission.NONE; } try { String user = authenticatedUser.getName(); String domainName = MultitenantUtils.getTenantDomain(user); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext cc = PrivilegedCarbonContext.getThreadLocalCarbonContext(); if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(domainName)) { cc.setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); cc.setTenantId(MultitenantConstants.SUPER_TENANT_ID); } else { UserRealmService realmService = CassandraServerDataHolder.getInstance().getRealmService(); int tenantID = realmService.getTenantManager().getTenantId(domainName); cc.setTenantDomain(domainName); cc.setTenantId(tenantID); } UserRealm userRealm = getRealmForTenant(domainName); AuthorizationManager authorizationManager = userRealm.getAuthorizationManager(); String tenantLessUsername = MultitenantUtils.getTenantAwareUsername(user); EnumSet<Permission> permissions = EnumSet.noneOf(Permission.class); for (String action : Action.ALL_ACTIONS_ARRAY) { try { boolean isAuthorized = authorizationManager.isUserAuthorized(tenantLessUsername, resourcePath, action); if (isAuthorized) { permissions.add(AuthUtils.getCassandraPermission(action)); } } catch (UserStoreException ex) { log.error(ex.getMessage(), ex); } } if (permissions.isEmpty()) { return Permission.NONE; } return permissions; } catch (UserStoreException e) { log.error("Error during authorizing a user for a resource" + resourcePath, e); return Permission.NONE; } finally { PrivilegedCarbonContext.endTenantFlow(); } }
From source file:org.openscada.da.server.spring.tools.csv.CSVLoader.java
private void createItem(final ItemEntry entry, final String sourceName) { final EnumSet<IODirection> io = EnumSet.noneOf(IODirection.class); if (entry.isReadable()) { io.add(IODirection.INPUT);//from ww w . ja va 2 s . c om } if (entry.isWritable()) { io.add(IODirection.OUTPUT); } final Map<String, Variant> attributes = new HashMap<String, Variant>(); attributes.put("description", Variant.valueOf(entry.getDescription())); attributes.put("loader.csv.source", Variant.valueOf(sourceName)); attributes.put("initialValue", Variant.valueOf(entry.getInitialValue())); final CSVDataItem item = new CSVDataItem(this.hive, this.itemPrefix + entry.getId(), io); injectItem(item, attributes); // create and inject the controller item attributes.put("loader.csv.controllerFor", Variant.valueOf(this.itemPrefix + entry.getId())); final CSVControllerDataItem controllerItem = new CSVControllerDataItem(item, this.executor); Loader.injectItem(this.hive, this._controllerStorages, controllerItem, attributes); // set the initial value try { controllerItem.startWriteValue(entry.getInitialValue(), null).get(); } catch (final Throwable e) { logger.warn("Failed to set initial value: " + entry.getInitialValue(), e); } }
From source file:ch.cyberduck.core.Local.java
@Override public EnumSet<Type> getType() { final EnumSet<Type> set = EnumSet.noneOf(Type.class); if (this.isFile()) { set.add(Type.file);//from w w w.j a va 2s . c o m } if (this.isDirectory()) { set.add(Type.directory); } if (this.isVolume()) { set.add(Type.volume); } if (this.isSymbolicLink()) { set.add(Type.symboliclink); } return set; }