List of usage examples for java.util Collections synchronizedSet
public static <T> Set<T> synchronizedSet(Set<T> s)
From source file:org.marketcetera.photon.strategy.engine.AbstractStrategyEngineConnection.java
@Override public void refresh() throws Exception { Set<ModuleURN> deployedModules = Sets.newHashSet(getDeployed()); final Set<DeployedStrategy> removedStrategies = Collections .synchronizedSet(Sets.<DeployedStrategy>newHashSet()); /*//from w ww . j a v a 2 s . c o m * Iterate visible strategies, refresh ones that should be there and * mark for removal those that should not. */ for (DeployedStrategy deployedStrategy : getEngine().getDeployedStrategies()) { if (deployedModules.contains(deployedStrategy.getUrn())) { /* * This may theoretically fail if the strategy was deleted since * the call to getDeployed() above. The user would just have to * refresh again to recover. */ internalRefresh(deployedStrategy); deployedModules.remove(deployedStrategy.getUrn()); } else { removedStrategies.add(deployedStrategy); } } /* * Remove the ones that should not be there. */ ExceptionUtils.launderedGet(getGUIExecutor().submit(new Runnable() { @Override public void run() { /* * Cannot call * getDeployedStrategies().removeAll(removedStrategies) due to * Eclipse bug. See note on * StrategyEngine#getDeployedStrategies(). */ EList<DeployedStrategy> deployedStrategies = getEngine().getDeployedStrategies(); for (DeployedStrategy toRemove : removedStrategies) { deployedStrategies.remove(toRemove); } } })); /* * Now add new ones. */ for (ModuleURN missing : deployedModules) { internalDeploy(null, missing); } }
From source file:libepg.epg.section.descriptor.servicedescriptor.SERVICE_ID.java
private SERVICE_ID(String serviceType, Integer serviceId, Integer... serviceIds) { this.serviceType = serviceType; if ((this.serviceType == null) || (this.serviceType.equals(""))) { throw new IllegalArgumentException("??????"); }//w w w .jav a 2 s.c o m List<Integer> t = new ArrayList<>(); if (serviceId != null) { t.add(serviceId); } else { throw new NullPointerException("ID??????"); } if (serviceIds != null) { t.addAll(Arrays.asList(serviceIds)); } Range<Integer> r = Range.between(0x0, 0xFF); for (Integer i : t) { if (!r.contains(i)) { MessageFormat msg = new MessageFormat( "ID????ID={0}"); Object[] parameters = { Integer.toHexString(i) }; throw new IllegalArgumentException(msg.format(parameters)); } } Set<Integer> temp = Collections.synchronizedSet(new HashSet<Integer>()); temp.addAll(t); this.serviceIds = Collections.unmodifiableSet(temp); }
From source file:libepg.epg.section.descriptor.DESCRIPTOR_TAG.java
private DESCRIPTOR_TAG(String tagName, Class<? extends Descriptor> dataType, Integer tag, Integer... tags) { this.tagName = tagName; if ((this.tagName == null) || ("".equals(this.tagName))) { throw new IllegalArgumentException("???????????"); }/*from w w w.ja va 2 s .c om*/ List<Integer> t = new ArrayList<>(); if (tag != null) { t.add(tag); } else { throw new NullPointerException("??????"); } if (tags != null) { t.addAll(Arrays.asList(tags)); } Range<Integer> r = Range.between(0x0, 0xFF); for (Integer i : t) { if (!r.contains(i)) { MessageFormat msg = new MessageFormat("????={0}"); Object[] parameters = { Integer.toHexString(i) }; throw new IllegalArgumentException(msg.format(parameters)); } } Set<Integer> temp = Collections.synchronizedSet(new HashSet<Integer>()); temp.addAll(t); this.tags = Collections.unmodifiableSet(temp); this.dataType = dataType; }
From source file:com.rivbird.guichecker.bitmapfun.ImageCache.java
/** * Initialize the cache, providing all parameters. * * @param cacheParams The cache parameters to initialize the cache *//* www . ja v a2 s . co m*/ private void init(ImageCacheParams cacheParams) { mCacheParams = cacheParams; // BEGIN_INCLUDE(init_memory_cache) // Set up memory cache if (mCacheParams.memoryCacheEnabled) { if (BuildConfig.DEBUG) { Log.d(TAG, "Memory cache created (size = " + mCacheParams.memCacheSize + ")"); } // If we're running on Honeycomb or newer, create a set of reusable // bitmaps that can be // populated into the inBitmap field of BitmapFactory.Options. Note // that the set is // of SoftReferences which will actually not be very effective due // to the garbage // collector being aggressive clearing Soft/WeakReferences. A better // approach // would be to use a strongly references bitmaps, however this would // require some // balancing of memory usage between this set and the bitmap // LruCache. It would also // require knowledge of the expected size of the bitmaps. From // Honeycomb to JellyBean // the size would need to be precise, from KitKat onward the size // would just need to // be the upper bound (due to changes in how inBitmap can re-use // bitmaps). if (Utils.hasHoneycomb()) { mReusableBitmaps = Collections.synchronizedSet(new HashSet<SoftReference<Bitmap>>()); } mMemoryCache = new LruCache<String, BitmapDrawable>(mCacheParams.memCacheSize) { /** * Notify the removed entry that is no longer being cached */ @Override protected void entryRemoved(boolean evicted, String key, BitmapDrawable oldValue, BitmapDrawable newValue) { if (RecyclingBitmapDrawable.class.isInstance(oldValue)) { // The removed entry is a recycling drawable, so notify // it // that it has been removed from the memory cache ((RecyclingBitmapDrawable) oldValue).setIsCached(false); } else { // The removed entry is a standard BitmapDrawable if (Utils.hasHoneycomb()) { // We're running on Honeycomb or later, so add the // bitmap // to a SoftReference set for possible use with // inBitmap later mReusableBitmaps.add(new SoftReference<Bitmap>(oldValue.getBitmap())); } } } /** * Measure item size in kilobytes rather than units which is more practical * for a bitmap cache */ @Override protected int sizeOf(String key, BitmapDrawable value) { final int bitmapSize = getBitmapSize(value) / 1024; return bitmapSize == 0 ? 1 : bitmapSize; } }; } // END_INCLUDE(init_memory_cache) // By default the disk cache is not initialized here as it should be // initialized // on a separate thread due to disk access. if (cacheParams.initDiskCacheOnCreate) { // Set up disk cache initDiskCache(); } }
From source file:org.apache.spark.network.RpcIntegrationSuite.java
private RpcResult sendRpcWithStream(String... streams) throws Exception { TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort()); final Semaphore sem = new Semaphore(0); RpcResult res = new RpcResult(); res.successMessages = Collections.synchronizedSet(new HashSet<String>()); res.errorMessages = Collections.synchronizedSet(new HashSet<String>()); for (String stream : streams) { int idx = stream.lastIndexOf('/'); ManagedBuffer meta = new NioManagedBuffer(JavaUtils.stringToBytes(stream)); String streamName = (idx == -1) ? stream : stream.substring(idx + 1); ManagedBuffer data = testData.openStream(conf, streamName); client.uploadStream(meta, data, new RpcStreamCallback(stream, res, sem)); }/* w ww . j av a2 s. c o m*/ if (!sem.tryAcquire(streams.length, 5, TimeUnit.SECONDS)) { fail("Timeout getting response from the server"); } streamCallbacks.values().forEach(streamCallback -> { try { streamCallback.verify(); } catch (IOException e) { throw new RuntimeException(e); } }); client.close(); return res; }
From source file:org.pentaho.big.data.impl.vfs.hdfs.nc.NamedClusterProvider.java
private void addCacheEntry(FileName rootName, FileSystem fs) throws FileSystemException { addFileSystem(getFileSystemKey(rootName.toString(), fs.getFileSystemOptions()), fs); String embeddedMetastoreKey = getEmbeddedMetastoreKey(fs.getFileSystemOptions()); Set<FileSystem> fsSet = cacheEntries.get(embeddedMetastoreKey); if (fsSet == null) { fsSet = Collections.synchronizedSet(new HashSet<FileSystem>()); cacheEntries.put(embeddedMetastoreKey, fsSet); }// ww w . ja v a 2 s . c o m fsSet.add(fs); }
From source file:libepg.epg.section.TABLE_ID.java
private TABLE_ID(String tableName, MAX_SECTION_LENGTH maxSectionLength, Class<? extends SectionBody> dataType, Integer tableID, Integer... tableIDs) { this.tableName = tableName; if ((this.tableName == null) || ("".equals(this.tableName))) { throw new IllegalArgumentException("???????????"); }//w ww. j a v a 2s . co m List<Integer> t = new ArrayList<>(); if (tableID != null) { t.add(tableID); } else { throw new NullPointerException("ID??????"); } if (tableIDs != null) { t.addAll(Arrays.asList(tableIDs)); } Range<Integer> r = Range.between(0x0, 0xFF); for (Integer i : t) { if (!r.contains(i)) { MessageFormat msg = new MessageFormat( "ID????ID={0}"); Object[] parameters = { Integer.toHexString(i) }; throw new IllegalArgumentException(msg.format(parameters)); } } Set<Integer> temp = Collections.synchronizedSet(new HashSet<Integer>()); temp.addAll(t); this.tableIDs = Collections.unmodifiableSet(temp); this.dataType = dataType; this.maxSectionLength = maxSectionLength; }
From source file:eu.itesla_project.modules.validation.OverloadValidationTool.java
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance(); String caseFormat = line.getOptionValue("case-format"); Path caseDir = Paths.get(line.getOptionValue("case-dir")); String workflowId = line.getOptionValue("workflow"); Path outputDir = Paths.get(line.getOptionValue("output-dir")); double purityThreshold = line.hasOption("purity-threshold") ? Double.parseDouble(line.getOptionValue("purity-threshold")) : DEFAULT_PURITY_THRESHOLD;// w ww . j av a 2s . c o m ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass() .newInstance().create(); LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); try (ComputationManager computationManager = new LocalComputationManager(); RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName)) { Importer importer = Importers.getImporter(caseFormat, computationManager); if (importer == null) { throw new RuntimeException("Format " + caseFormat + " not supported"); } Map<String, Map<String, OverloadStatus>> statusPerContingencyPerCase = Collections .synchronizedMap(new TreeMap<>()); Set<String> contingencyIds = Collections.synchronizedSet(new LinkedHashSet<>()); Importers.importAll(caseDir, importer, true, network -> { try { List<Contingency> contingencies = contingencyDb.getContingencies(network); contingencyIds .addAll(contingencies.stream().map(Contingency::getId).collect(Collectors.toList())); System.out.println("running security analysis..."); SecurityAnalysis securityAnalysis = new SecurityAnalysisImpl(network, computationManager, loadFlowFactory); SecurityAnalysisResult securityAnalysisResult = securityAnalysis .runAsync(network1 -> contingencies).join(); System.out.println("checking rules..."); Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> offlineRuleCheckPerContingency = SecurityRuleUtil .checkRules(network, rulesDb, workflowId, RuleAttributeSet.MONTE_CARLO, EnumSet.of(SecurityIndexType.TSO_OVERLOAD), null, purityThreshold); Map<String, OverloadStatus> statusPerContingency = new HashMap<>(); for (PostContingencyResult postContingencyResult : securityAnalysisResult .getPostContingencyResults()) { Contingency contingency = postContingencyResult.getContingency(); boolean lfOk = postContingencyResult.isComputationOk() && postContingencyResult.getLimitViolations().isEmpty(); Map<SecurityIndexType, SecurityRuleCheckStatus> offlineRuleCheck = offlineRuleCheckPerContingency .get(contingency.getId()); boolean offlineRuleOk = offlineRuleCheck != null && offlineRuleCheck .get(SecurityIndexType.TSO_OVERLOAD) == SecurityRuleCheckStatus.OK; statusPerContingency.put(contingency.getId(), new OverloadStatus(offlineRuleOk, lfOk)); } statusPerContingencyPerCase.put(network.getId(), statusPerContingency); } catch (Exception e) { LOGGER.error(e.toString(), e); } }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + " ...")); writeCsv(contingencyIds, statusPerContingencyPerCase, outputDir); } }
From source file:org.opensilk.cast.manager.MediaCastManager.java
private MediaCastManager(Context context, String applicationId, String dataNamespace) { super(context, applicationId); LOGD(TAG, "MediaCastManager is instantiated"); mVideoConsumers = Collections.synchronizedSet(new HashSet<IMediaCastConsumer>()); mDataNamespace = dataNamespace;//from w w w. j av a 2s. c o m if (null != mDataNamespace) { Utils.saveStringToPreference(mContext, PREFS_KEY_CAST_CUSTOM_DATA_NAMESPACE, dataNamespace); } }
From source file:com.silverwrist.dynamo.app.ApplicationContainer.java
/** * Creates the application container.//from w w w. j ava 2 s . co m * * @param config_file A reference to the Dynamo XML configuration file. * @param substrate A reference to the {@link com.silverwrist.dynamo.iface.ApplicationSubstrate ApplicationSubstrate} * object, which provides certain services and object references to the * <CODE>ApplicationContainer</CODE>. (Usually, this will be specific to the Dynamo application * type, for example, Web application.) * @exception com.silverwrist.dynamo.except.ConfigException If there is an error in the configuration which will * not allow Dynamo to be initialized. */ public ApplicationContainer(File config_file, ApplicationSubstrate substrate) throws ConfigException { if (logger.isDebugEnabled()) logger.debug("new ApplicationContainer - config file is " + config_file.getAbsolutePath()); substrate.initialize(); m_substrate = substrate; m_refs = 1; m_app_sm = new ApplicationServiceManager(); m_pss = new PropertySerializationSupport(); m_known_sessions = Collections.synchronizedSet(new HashSet()); XMLLoader loader = XMLLoader.get(); // Initialize the init services and runtime services with defaults. m_app_sm.addInitService(ResourceProvider.class, (ResourceProvider) this); m_app_sm.addInitService(ResourceProviderManager.class, (ResourceProviderManager) this); m_app_sm.addInitService(RendererRegistration.class, (RendererRegistration) this); m_app_sm.addInitService(ObjectProvider.class, (ObjectProvider) this); m_app_sm.addInitService(EventListenerRegistration.class, (EventListenerRegistration) this); m_app_sm.addInitService(OutputObjectFilterRegistration.class, (OutputObjectFilterRegistration) this); m_app_sm.addInitService(PropertySerializer.class, (PropertySerializer) m_pss); m_app_sm.addInitService(PropertySerializerRegistration.class, (PropertySerializerRegistration) m_pss); m_app_sm.addInitService(PostDynamicUpdate.class, (PostDynamicUpdate) this); m_app_sm.addInitService(RequestPreprocessorRegistration.class, (RequestPreprocessorRegistration) this); m_app_sm.addInitService(ExceptionTranslatorRegistration.class, (ExceptionTranslatorRegistration) this); m_app_sm.addInitService(FinalStageRegistration.class, (FinalStageRegistration) this); m_app_sm.addRuntimeService(ResourceProvider.class, (ResourceProvider) this); m_app_sm.addRuntimeService(ObjectProvider.class, (ObjectProvider) this); m_app_sm.addRuntimeService(EventListenerRegistration.class, (EventListenerRegistration) this); m_app_sm.addRuntimeService(PropertySerializer.class, (PropertySerializer) m_pss); m_app_sm.addRuntimeService(PostDynamicUpdate.class, (PostDynamicUpdate) this); m_app_sm.addRuntimeService(RenderImmediate.class, (RenderImmediate) this); m_app_sm.addOutputService(ResourceProvider.class, (ResourceProvider) this); m_app_sm.addOutputService(ObjectProvider.class, (ObjectProvider) this); m_app_sm.addOutputService(QueryRenderer.class, (QueryRenderer) this); // Create initialization services interface object. ServiceProvider init_svcs = m_app_sm.createInitServices(); try { // load the configuration file Document config_doc = loader.load(config_file, false); Element root = loader.getRootElement(config_doc, "configuration"); // get the <control/> element and process it Element control = loader.getSubElement(root, "control"); processControlSection(control); m_shutdown_list.addFirst(m_background); m_app_sm.addInitService(BackgroundScheduler.class, m_background); m_app_sm.addRuntimeService(BackgroundScheduler.class, m_background); // initialize some default renderers m_shutdown_list.addFirst(registerRenderer(DataItem.class, new DataItemRenderer())); m_shutdown_list.addFirst(registerRenderer(java.util.List.class, new ListRenderer())); // initialize the scripting engine m_script_ctrl = new ScriptController(); m_script_ctrl.initialize(control, init_svcs); m_shutdown_list.addFirst(m_script_ctrl); // add the scripting engine's services so they can be used by external components m_app_sm.addInitService(ScriptEngineConfig.class, m_script_ctrl); m_app_sm.addRuntimeService(ScriptExecute.class, m_script_ctrl); // get all database connection configurations List l = loader.getMatchingSubElements(root, "dbconnection"); Iterator it = l.iterator(); Element elt; while (it.hasNext()) { // get each element in turn elt = (Element) (it.next()); DBConnectionPool pool = (DBConnectionPool) createNamedObject(elt, init_svcs, DBConnectionPool.class, "no.notDBPool"); m_connections.put(pool.getName(), pool); } // end while m_app_sm.addInitService(HookServiceProviders.class, m_app_sm); // Sort the "object" definitions by priority order to determine in what order to instantiate them. l = loader.getMatchingSubElements(root, "object"); if (!(l.isEmpty())) { // copy elements into binary heap it = l.iterator(); BinaryHeap prioheap = new BinaryHeap(l.size()); while (it.hasNext()) { // sort elements by priority elt = (Element) (it.next()); prioheap.insert(new HeapContents(elt)); } // end while while (prioheap.size() > 0) { // now remove and instantiate the elements HeapContents hc = (HeapContents) (prioheap.remove()); NamedObject nobj = createNamedObject(hc.getElement(), init_svcs, null, null); m_objects.put(nobj.getName(), nobj); } // end while } // end if // Find the application definition and initialize the application. elt = loader.getSubElement(root, "application"); m_application = (Application) createNamedObject(elt, init_svcs, Application.class, "no.notApp"); } // end try catch (IOException e) { // unable to read config file - send back a ConfigException logger.fatal("ApplicationContainer config read failed", e); ConfigException ce = new ConfigException(ApplicationContainer.class, "ApplicationContainerMessages", "creation.ioError", e); ce.setParameter(0, config_file.getAbsolutePath()); throw ce; } // end catch catch (XMLLoadException e) { // XML loader failed - send back a ConfigException logger.fatal("ApplicationContainer config load failed", e); throw new ConfigException(e); } // end catch // Create the "server identity" string. StringBuffer buf = new StringBuffer(); String app_id = m_application.getIdentity(); if (app_id != null) buf.append(app_id).append(' '); buf.append("Dynamo/").append(DynamoVersion.VERSION); m_identity = buf.toString(); logger.info("Server: " + m_identity); try { // Call the "final stage" initialization hooks. logger.info(m_final_stage_inits.size() + " final-stage init hook(s) to call"); while (!(m_final_stage_inits.isEmpty())) { // call the hooks in FIFO order FinalStageInitHook hook = (FinalStageInitHook) (m_final_stage_inits.removeFirst()); hook.initialize(m_application, init_svcs); } // end while m_final_stage_inits = null; // done with this list } // end try catch (DynamoException e) { // final-stage initialization failed - post an error ConfigException ce = new ConfigException(ApplicationContainer.class, "ApplicationContainerMessages", "finalinit.fail", e); ce.setParameter(0, e.getMessage()); throw ce; } // end catch // Fire the "application initialized" events. ApplicationListener[] listeners = (ApplicationListener[]) (m_application_listeners .toArray(APP_LISTENER_TEMPLATE)); if (listeners.length > 0) { // call the event handlers... if (logger.isDebugEnabled()) logger.debug("ApplicationContainer: " + listeners.length + " init handler(s) to call"); ApplicationEventRequest req = new ApplicationEventRequest(m_app_sm.createRuntimeServices(), true); ApplicationEvent evt = new ApplicationEvent(req, m_application); for (int i = 0; i < listeners.length; i++) listeners[i].applicationInitialized(evt); } // end if if (logger.isDebugEnabled()) logger.debug("ApplicationContainer initialization done"); }