List of usage examples for java.util.concurrent CopyOnWriteArraySet CopyOnWriteArraySet
public CopyOnWriteArraySet()
From source file:org.rifidi.edge.configuration.DefaultConfigurationImpl.java
/** * Constructor.// www . j av a2 s.c om * * @param context * @param serviceID * @param factoryID * @param attributes * @param sessionDTOs */ public DefaultConfigurationImpl(final String serviceID, final String factoryID, final AttributeList attributes, final NotifierService notifierService, final JMXService jmxService, Set<SessionDTO> sessionDTOs) { this.notifierService = notifierService; this.nameToProperty = new HashMap<String, Property>(); this.nameToOperation = new HashMap<String, Operation>(); this.factoryID = factoryID; this.serviceID = serviceID; this.attributes = (AttributeList) attributes.clone(); disableAutoStart = false; for (Object o : this.attributes) { Attribute att = (Attribute) o; String name = att.getName(); if (name.equals("DisableAutoStart")) { // This is the 'override' autostart, so we will default to true // unless it is explicitly set to false if (att.getValue() != null && ((String) att.getValue()).equalsIgnoreCase("true")) { disableAutoStart = true; } break; } } this.listeners = new CopyOnWriteArraySet<AttributesChangedListener>(); this.target = new AtomicReference<RifidiService>(null); this.jmxService = jmxService; this.sessionDTOs = new ConcurrentHashMap<SessionDTO, String>(); if (sessionDTOs != null) { for (SessionDTO dto : sessionDTOs) { this.sessionDTOs.put(dto, "-1"); } } }
From source file:com.espertech.esper.core.service.StatementLifecycleSvcImpl.java
/** * Ctor.//from w w w . j a va2 s .c o m * @param epServiceProvider is the engine instance to hand to statement-aware listeners * @param services is engine services */ public StatementLifecycleSvcImpl(EPServiceProvider epServiceProvider, EPServicesContext services) { this.services = services; this.epServiceProvider = (EPServiceProviderSPI) epServiceProvider; // lock for starting and stopping statements this.eventProcessingRWLock = services.getEventProcessingRWLock(); this.stmtIdToDescMap = new HashMap<String, EPStatementDesc>(); this.stmtNameToStmtMap = new HashMap<String, EPStatement>(); this.stmtNameToIdMap = new LinkedHashMap<String, String>(); observers = new CopyOnWriteArraySet<StatementLifecycleObserver>(); }
From source file:org.rifidi.edge.core.configuration.services.ConfigurationServiceImpl.java
/** * Load the configuration. Not thread safe. * //w ww .j a v a 2 s. c om * @return */ private ConcurrentHashMap<String, Set<DefaultConfigurationImpl>> loadConfig() { ConcurrentHashMap<String, Set<DefaultConfigurationImpl>> ret = new ConcurrentHashMap<String, Set<DefaultConfigurationImpl>>(); ConfigurationStore store; try { store = (ConfigurationStore) jaxbContext.createUnmarshaller().unmarshal(persistanceResource.getFile()); } catch (IOException e) { logger.error(e); return ret; } catch (JAXBException e) { logger.error(e); return ret; } if (store.getServices() != null) { for (ServiceStore service : store.getServices()) { if (ret.get(service.getFactoryID()) == null) { ret.put(service.getFactoryID(), new CopyOnWriteArraySet<DefaultConfigurationImpl>()); } AttributeList attributes = new AttributeList(); // get all properties for (String key : service.getAttributes().keySet()) { // factoryid is already processed if (Constants.FACTORYID.equals(key)) { continue; } // type is already processed if (Constants.FACTORY_TYPE.equals(key)) { continue; } attributes.add(new Attribute(key, service.getAttributes().get(key))); } if (!checkName(service.getServiceID())) { continue; } ret.get(service.getFactoryID()).add(createAndRegisterConfiguration(service.getServiceID(), service.getFactoryID(), attributes, service.getSessionDTOs())); serviceNames.add(service.getServiceID()); } } return ret; }
From source file:org.rifidi.edge.configuration.ConfigurationServiceImpl.java
/** * Load the configuration. Not thread safe. * //from w ww . j a va2 s . c om * @return */ private ConcurrentHashMap<String, Set<DefaultConfigurationImpl>> loadConfig() { ConcurrentHashMap<String, Set<DefaultConfigurationImpl>> ret = new ConcurrentHashMap<String, Set<DefaultConfigurationImpl>>(); ConfigurationStore store; try { store = (ConfigurationStore) jaxbContext.createUnmarshaller().unmarshal(persistanceResource.getFile()); } catch (IOException e) { logger.error("Error loading config/rifidi.xml, no configuration loaded"); return ret; } catch (JAXBException e) { logger.error("Exception loading config/rifidi.xml or file not found, no configuration loaded"); return ret; } if (store.getServices() != null) { for (ServiceStore service : store.getServices()) { if (ret.get(service.getFactoryID()) == null) { ret.put(service.getFactoryID(), new CopyOnWriteArraySet<DefaultConfigurationImpl>()); } AttributeList attributes = new AttributeList(); // get all properties for (String key : service.getAttributes().keySet()) { // factoryid is already processed if (Constants.FACTORYID.equals(key)) { continue; } // type is already processed if (Constants.FACTORY_TYPE.equals(key)) { continue; } attributes.add(new Attribute(key, service.getAttributes().get(key))); } if (!checkName(service.getServiceID())) { continue; } ret.get(service.getFactoryID()).add(createAndRegisterConfiguration(service.getServiceID(), service.getFactoryID(), attributes, service.getSessionDTOs())); serviceNames.add(service.getServiceID()); } } return ret; }
From source file:org.red5.server.scope.Scope.java
/** * Creates a scope/*from w w w.ja v a 2 s. co m*/ */ @ConstructorProperties(value = { "" }) public Scope() { super(null, ScopeType.UNDEFINED, null, false); children = new ConcurrentScopeSet(); clients = new CopyOnWriteArraySet<IClient>(); }
From source file:org.red5.server.scope.Scope.java
/** * Creates scope using a Builder//from w ww .ja va 2 s . c o m * * @param builder * Builder */ @ConstructorProperties({ "builder" }) public Scope(Builder builder) { super(builder.parent, builder.type, builder.name, builder.persistent); children = new ConcurrentScopeSet(); clients = new CopyOnWriteArraySet<IClient>(); }
From source file:org.springframework.ide.eclipse.beans.core.internal.model.BeansJavaConfig.java
public BeansJavaConfig(IBeansProject project, IType configClass, String configClassName, Type type) { super(project, BeansConfigFactory.JAVA_CONFIG_TYPE + configClassName, type); this.configClass = configClass; this.configClassName = configClassName; modificationTimestamp = IResource.NULL_STAMP; if (this.configClass != null) { IResource resource = this.configClass.getResource(); if (resource != null && resource instanceof IFile) { file = (IFile) resource;//from www . j a v a 2 s . co m } else { IClassFile classFile = configClass.getClassFile(); PackageFragment pkg = (PackageFragment) configClass.getPackageFragment(); IPackageFragmentRoot root = (IPackageFragmentRoot) pkg.getParent(); if (root.isArchive()) { IPath zipPath = root.getPath(); String classFileName = classFile.getElementName(); String path = Util.concatWith(pkg.names, classFileName, '/'); file = new ExternalFile(zipPath.toFile(), path, project.getProject()); } } } if (file == null || !file.exists()) { modificationTimestamp = IResource.NULL_STAMP; String msg = "Beans Java config class '" + configClassName + "' not accessible"; problems = new CopyOnWriteArraySet<ValidationProblem>(); problems.add(new ValidationProblem(IMarker.SEVERITY_ERROR, msg, file, -1)); } else { modificationTimestamp = file.getModificationStamp(); try { file.setSessionProperty(IBeansConfig.CONFIG_FILE_TAG, IBeansConfig.CONFIG_FILE_TAG_VALUE); } catch (CoreException e) { BeansCorePlugin.log(new Status(IStatus.WARNING, BeansCorePlugin.PLUGIN_ID, String.format("Error occured while tagging config file '%s'", file.getFullPath()), e)); } } }
From source file:org.elasticsearch.client.sniff.SnifferTests.java
/** * Test multiple sniffing rounds by mocking the {@link Scheduler} as well as the {@link HostsSniffer}. * Simulates the ordinary behaviour of {@link Sniffer} when sniffing on failure is not enabled. * The {@link CountingHostsSniffer} doesn't make any network connection but may throw exception or return no hosts, which makes * it possible to verify that errors are properly handled and don't affect subsequent runs and their scheduling. * The {@link Scheduler} implementation submits rather than scheduling tasks, meaning that it doesn't respect the requested sniff * delays while allowing to assert that the requested delays for each requested run and the following one are the expected values. *///from w w w . jav a2 s .com public void testOrdinarySniffRounds() throws Exception { final long sniffInterval = randomLongBetween(1, Long.MAX_VALUE); long sniffAfterFailureDelay = randomLongBetween(1, Long.MAX_VALUE); RestClient restClient = mock(RestClient.class); CountingHostsSniffer hostsSniffer = new CountingHostsSniffer(); final int iters = randomIntBetween(30, 100); final Set<Future<?>> futures = new CopyOnWriteArraySet<>(); final CountDownLatch completionLatch = new CountDownLatch(1); final AtomicInteger runs = new AtomicInteger(iters); final ExecutorService executor = Executors.newSingleThreadExecutor(); final AtomicReference<Future<?>> lastFuture = new AtomicReference<>(); final AtomicReference<Sniffer.Task> lastTask = new AtomicReference<>(); Scheduler scheduler = new Scheduler() { @Override public Future<?> schedule(Sniffer.Task task, long delayMillis) { assertEquals(sniffInterval, task.nextTaskDelay); int numberOfRuns = runs.getAndDecrement(); if (numberOfRuns == iters) { //the first call is to schedule the first sniff round from the Sniffer constructor, with delay O assertEquals(0L, delayMillis); assertEquals(sniffInterval, task.nextTaskDelay); } else { //all of the subsequent times "schedule" is called with delay set to the configured sniff interval assertEquals(sniffInterval, delayMillis); assertEquals(sniffInterval, task.nextTaskDelay); if (numberOfRuns == 0) { completionLatch.countDown(); return null; } } //we submit rather than scheduling to make the test quick and not depend on time Future<?> future = executor.submit(task); futures.add(future); if (numberOfRuns == 1) { lastFuture.set(future); lastTask.set(task); } return future; } @Override public void shutdown() { //the executor is closed externally, shutdown is tested separately } }; try { new Sniffer(restClient, hostsSniffer, scheduler, sniffInterval, sniffAfterFailureDelay); assertTrue("timeout waiting for sniffing rounds to be completed", completionLatch.await(1000, TimeUnit.MILLISECONDS)); assertEquals(iters, futures.size()); //the last future is the only one that may not be completed yet, as the count down happens //while scheduling the next round which is still part of the execution of the runnable itself. assertTrue(lastTask.get().hasStarted()); lastFuture.get().get(); for (Future<?> future : futures) { assertTrue(future.isDone()); future.get(); } } finally { executor.shutdown(); assertTrue(executor.awaitTermination(1000, TimeUnit.MILLISECONDS)); } int totalRuns = hostsSniffer.runs.get(); assertEquals(iters, totalRuns); int setHostsRuns = totalRuns - hostsSniffer.failures.get() - hostsSniffer.emptyList.get(); verify(restClient, times(setHostsRuns)).setHosts(Matchers.<HttpHost>anyVararg()); verifyNoMoreInteractions(restClient); }
From source file:com.agileapes.couteau.maven.mojo.AbstractPluginExecutor.java
/** * This method will fetch project resource objects *//*from w w w . jav a 2 s.c om*/ private void fetchProjectResources() { final Set<Class> classes = getClasses(); final Set<Resource> resources = getResources(); projectResources = new CopyOnWriteArraySet<ProjectResource>(); projectResources.addAll(with(classes).transform(new Transformer<Class, ProjectResource>() { @Override public ProjectResource map(Class input) { return new ProjectResource(input); } }).list()); projectResources.addAll(with(resources).transform(new Transformer<Resource, ProjectResource>() { @Override public ProjectResource map(Resource input) { return new ProjectResource(input); } }).list()); }
From source file:org.solmix.runtime.support.spring.SpringConfigurer.java
/** * {@inheritDoc}//from ww w . j a va 2s . co m * * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) */ @Override public void setApplicationContext(ApplicationContext ac) throws BeansException { appContexts = new CopyOnWriteArraySet<ApplicationContext>(); addApplicationContext(ac); this.beanFactory = ac.getAutowireCapableBeanFactory(); super.setBeanFactory(this.beanFactory); }