List of usage examples for java.util Collections singleton
public static <T> Set<T> singleton(T o)
From source file:com.manydesigns.portofino.shiro.AbstractPortofinoRealm.java
/** * {@inheritDoc}/*ww w.j a va 2 s. c o m*/ * <p>This default implementation handles built-in groups (all, anonymous, registered, etc.), delegating * to loadAuthorizationInfo method the actual loading of application-specific groups.</p> * * @return */ public AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) { Object principal = principals.getPrimaryPrincipal(); Set<String> groups = new HashSet<String>(); groups.add(SecurityLogic.getAllGroup(portofinoConfiguration)); if (principal == null) { groups.add(SecurityLogic.getAnonymousGroup(portofinoConfiguration)); } else if (principal instanceof Serializable) { groups.add(SecurityLogic.getRegisteredGroup(portofinoConfiguration)); groups.addAll(loadAuthorizationInfo((Serializable) principal)); } else { throw new AuthorizationException("Invalid principal: " + principal); } SimpleAuthorizationInfo info = new SimpleAuthorizationInfo(groups); if (groups.contains(SecurityLogic.getAdministratorsGroup(portofinoConfiguration))) { info.addStringPermission("*"); } Permission permission = new GroupPermission(groups); info.setObjectPermissions(Collections.singleton(permission)); return info; }
From source file:org.openmrs.module.casereport.web.rest.v1_0.controller.CaseReportControllerTest.java
@Test public void shouldAddTheTriggerToAnExistingQueueItemForThePatient() throws Exception { long initialCount = getAllCount(); CaseReport existingReport = service.getCaseReportByPatient(Context.getPatientService().getPatient(6)); int initialTriggerCount = existingReport.getReportTriggers().size(); assertEquals(initialCount, Util.getResultsSize(deserialize(handle(newGetRequest(getURI()))))); SimpleObject reportQueueItem = new SimpleObject(); reportQueueItem.add("patient", existingReport.getPatient().getUuid()); SimpleObject trigger = new SimpleObject(); trigger.add("name", "HIV Patient Died"); reportQueueItem.add("reportTriggers", Collections.singleton(trigger)); SimpleObject updatedReport = deserialize(handle(newPostRequest(getURI(), reportQueueItem))); assertEquals(initialCount, Util.getResultsSize(deserialize(handle(newGetRequest(getURI()))))); assertEquals(++initialTriggerCount, ((List) Util.getByPath(updatedReport, "reportTriggers")).size()); }
From source file:com.mastfrog.parameters.processor.Processor.java
@Override public Set<String> getSupportedAnnotationTypes() { return Collections.singleton("com.mastfrog.parameters.Params"); }
From source file:io.gravitee.management.repository.plugins.RepositoryPluginHandler.java
@Override public void handle(Plugin plugin) { try {/* w ww. j a v a 2s . co m*/ ClassLoader classloader = pluginClassLoaderFactory.getOrCreateClassLoader(plugin, this.getClass().getClassLoader()); final Class<?> repositoryClass = classloader.loadClass(plugin.clazz()); LOGGER.info("Register a new repository: {} [{}]", plugin.id(), plugin.clazz()); Assert.isAssignable(Repository.class, repositoryClass); Repository repository = createInstance((Class<Repository>) repositoryClass); Collection<Scope> scopes = scopeByRepositoryType.getOrDefault(repository.type(), Collections.EMPTY_LIST); for (Scope scope : scopes) { if (!repositories.containsKey(scope)) { // Not yet loaded, let's mount the repository in application context try { ApplicationContext applicationContext = pluginContextFactory .create(new AnnotationBasedPluginContextConfigurer(plugin) { @Override public Set<Class<?>> configurations() { return Collections.singleton(repository.configuration(scope)); } }); registerRepositoryDefinitions(repository, applicationContext); repositories.put(scope, repository); } catch (Exception iae) { LOGGER.error("Unexpected error while creating context for repository instance", iae); pluginContextFactory.remove(plugin); } } else { LOGGER.warn("Repository scope {} already loaded by {}", scope, repositories.get(scope)); } } } catch (Exception iae) { LOGGER.error("Unexpected error while create repository instance", iae); } }
From source file:podd.util.WebappInitialisationUtilUnitTest.java
private void deleteFCObjects(String pattern) throws RepositoryObjectHandlingException { final Condition condition = new Condition("pid", ComparisonOperator.has, pattern); final CloseableIterator<ObjectFields> iterator = repoUtil.searchForObjectFields(20, Collections.singleton("pid"), condition); try {/*from w w w.j av a 2s. c o m*/ while (iterator.hasNext()) { repoUtil.ensureDeleted(iterator.next().getPid()); } } finally { iterator.close(); } }
From source file:com.rapidminer.operator.preprocessing.normalization.Normalization.java
@Override protected Collection<AttributeMetaData> modifyAttributeMetaData(ExampleSetMetaData emd, AttributeMetaData amd) throws UndefinedParameterError { if (amd.isNumerical()) { amd.setType(Ontology.REAL);//ww w . ja v a2s .c o m int method = getParameterAsInt(PARAMETER_NORMALIZATION_METHOD); NormalizationMethod normalizationMethod = METHODS.get(method); return normalizationMethod.modifyAttributeMetaData(emd, amd, getExampleSetInputPort(), this); } return Collections.singleton(amd); }
From source file:com.hortonworks.streamline.streams.runtime.splitjoin.SplitJoinTest.java
@Test public void testSplitJoinProcessorsWithRuleHavingStreams() throws Exception { String[] outputStreams = { "stream-1", "stream-2", "stream-3" }; SplitJoinRule splitRule = new SplitJoinRule("split", new SplitAction(), Sets.newHashSet(outputStreams)); SplitJoinRule joinRule = new SplitJoinRule("join", new JoinAction(), Collections.singleton("output-stream")); runSplitJoin(splitRule, joinRule);/*from w ww . j a v a 2 s .c om*/ }
From source file:de.hybris.platform.commerceservices.setup.impl.DefaultSetupSyncJobServiceTest.java
@Test public void testAssignDependantSyncJob() { setupSyncJobService.assignDependentSyncJobs(TEST_PRODUCT_CATALOG, Collections.singleton(TEST_CONTENT_CATALOG));//perform assign final CatalogVersionSyncJobModel modelExample = new CatalogVersionSyncJobModel(); modelExample.setCode(PRODUCT_CATALOG_SYNC_JOB); final CatalogVersionSyncJobModel productCatalogSyncJob = flexibleSearchService .getModelByExample(modelExample); assertNotNull(productCatalogSyncJob); assertNotNull(PRODUCT_CATALOG_SYNC_JOB + " should have a dependent sync jobs ", productCatalogSyncJob.getDependentSyncJobs()); assertEquals(PRODUCT_CATALOG_SYNC_JOB + " should have one dependent sync jobs ", 1, productCatalogSyncJob.getDependentSyncJobs().size()); assertEquals(CONTENT_CATALOG_SYNC_JOB, productCatalogSyncJob.getDependentSyncJobs().iterator().next().getCode()); }
From source file:com.wavemaker.tools.compiler.ProjectCompiler.java
public String compile(final Project project) { try {/*from w w w .j a va2s .c om*/ copyRuntimeServiceFiles(project.getWebAppRootFolder(), project.getClassOutputFolder()); JavaCompiler compiler = new WaveMakerJavaCompiler(); StandardJavaFileManager standardFileManager = compiler.getStandardFileManager(null, null, null); standardFileManager.setLocation(StandardLocation.CLASS_PATH, getStandardClassPath()); ResourceJavaFileManager projectFileManager = new ResourceJavaFileManager(standardFileManager); projectFileManager.setLocation(StandardLocation.SOURCE_PATH, project.getSourceFolders()); projectFileManager.setLocation(StandardLocation.CLASS_OUTPUT, Collections.singleton(project.getClassOutputFolder())); projectFileManager.setLocation(StandardLocation.CLASS_PATH, getClasspath(project)); copyResources(project); Iterable<JavaFileObject> compilationUnits = projectFileManager.list(StandardLocation.SOURCE_PATH, "", Collections.singleton(Kind.SOURCE), true); StringWriter compilerOutput = new StringWriter(); CompilationTask compilationTask = compiler.getTask(compilerOutput, projectFileManager, null, getCompilerOptions(project), null, compilationUnits); ServiceDefProcessor serviceDefProcessor = configure(new ServiceDefProcessor(), projectFileManager); ServiceConfigurationProcessor serviceConfigurationProcessor = configure( new ServiceConfigurationProcessor(), projectFileManager); compilationTask.setProcessors(Arrays.asList(serviceConfigurationProcessor, serviceDefProcessor)); if (!compilationTask.call()) { throw new WMRuntimeException("Compile failed with output:\n\n" + compilerOutput.toString()); } return compilerOutput.toString(); } catch (IOException e) { throw new WMRuntimeException("Unable to compile " + project.getProjectName(), e); } }
From source file:com.yimidida.shards.ShardedConfiguration.java
public ShardedSqlSessionFactory buildShardedSessionFactory() { Map<SqlSessionFactory, Set<ShardId>> sqlSessionFactories = new HashMap<SqlSessionFactory, Set<ShardId>>(); for (ShardConfiguration config : shardConfigs) { // populatePrototypeWithVariableProperties(config); // get the shardId from the shard-specific config Integer shardId = config.getShardId(); if (shardId == null) { final String msg = "Attempt to build a ShardedSessionFactory using a " + "ShardConfiguration that has a null shard id."; log.fatal(msg);// w w w . ja v a2 s . co m throw new NullPointerException(msg); } Set<ShardId> virtualShardIds; if (virtualShardToShardMap.isEmpty()) { // simple case, virtual and physical are the same virtualShardIds = Collections.singleton(new ShardId(shardId)); } else { // get the set of shard ids that are mapped to the physical // shard // described by this config virtualShardIds = shardToVirtualShardIdMap.get(shardId); } // sqlSessionFactories.put(buildSessionFactory(), virtualShardIds); sqlSessionFactories.put(config.getSqlSessionFactory(), virtualShardIds); } // final boolean doFullCrossShardRelationshipChecking = // PropertiesHelper.getBoolean( // ShardedEnvironment.CHECK_ALL_ASSOCIATED_OBJECTS_FOR_DIFFERENT_SHARDS, // prototypeConfiguration.getProperties(), // true); return new ShardedSqlSessionFactoryImpl(sqlSessionFactories, shardStrategyFactory, idGenerator); }