List of usage examples for java.util.concurrent CopyOnWriteArrayList CopyOnWriteArrayList
public CopyOnWriteArrayList()
From source file:de.ingrid.iplug.csw.dsc.cache.impl.AbstractUpdateStrategy.java
/** * Fetch all records that satisfy the given filter using the GetRecords and * return the ids and put them into the cache * @note This method guarantees to query the server without a constraint, if the * provided filter set is empty //from w ww.ja va 2 s . com * * @param client The CSWClient to use * @param elementSetName The ElementSetName of the records to fetch * @param filterSet The filter set used to select the records * @param doCache Determines wether to cache the record or not * @return A list of ids of the fetched records * @throws Exception */ protected List<String> fetchRecords(CSWClient client, ElementSetName elementSetName, Set<Document> filterSet, boolean doCache) throws Exception { CSWFactory factory = client.getFactory(); Log log = this.getLog(); // if the filter set is empty, we add a null a least // this causes execution of the iteration below, but // but will not add a constraint definition to the request if (filterSet == null) filterSet = new HashSet<Document>(); if (filterSet.size() == 0) filterSet.add(null); // variables for complete fetch process // int numTotal = 0; List<String> fetchedRecordIds = new CopyOnWriteArrayList<String>(); // iterate over all filters int filterIndex = 1; for (Document filter : filterSet) { if (log.isDebugEnabled()) log.debug("Processing filter " + filterIndex + ": " + StringUtils.nodeToString(filter).replace("\n", "") + "."); // variables for current fetch process (current filter) int numRecordsTotal = 0; int numRecordsFetched = 0; List<String> currentFetchedRecordIds = new ArrayList<String>(); // create the query CSWQuery query = factory.createQuery(); query.setConstraint(filter); query.setResultType(ResultType.RESULTS); query.setElementSetName(elementSetName); query.setMaxRecords(this.recordsPerCall); query.setStartPosition(1); // do requests // do first request CSWSearchResult result = client.getRecords(query); numRecordsFetched += result.getNumberOfRecords(); numRecordsTotal = result.getNumberOfRecordsTotal(); if (log.isInfoEnabled()) log.info(numRecordsTotal + " record(s) from filter " + filterIndex + ":"); if (numRecordsTotal > 0) { if (log.isInfoEnabled()) { log.info("\nPARAMETERS OF FETCHING PROCESS:" + "\nrecords per chunk (request): " + recordsPerCall + "\ngeneral pause between requesting next chunk (msec): " + requestPause + "\nnum retries per chunk: " + cswConfig.numRetriesPerRequest + "\npause between retries (msec): " + cswConfig.timeBetweenRetries + "\nmax number of lost chunks: " + cswConfig.maxNumSkippedRequests); } // process currentFetchedRecordIds.addAll(processResult(result, doCache)); int numSkippedRequests = 0; String logLostRecordChunks = ""; int numLostRecords = 0; while (numRecordsFetched < numRecordsTotal) { if (cswConfig.maxNumSkippedRequests > -1) { // fetching should end when a maximum number of failures (in a row) is reached. if (numSkippedRequests > cswConfig.maxNumSkippedRequests) { log.error("Problems fetching records. Total number of skipped requests reached (" + cswConfig.maxNumSkippedRequests + " requests without results). We end fetching process for this filter."); statusProvider.addState( "ERROR_FETCH", "Error during fetch, since more than " + cswConfig.maxNumSkippedRequests + " records have been skipped.", Classification.ERROR); break; } } // generic pause between requests, set via spring Thread.sleep(this.requestPause); String logCurrRecordChunk = ""; try { // prepare next request // Just for safety: get number of last fetched records from last result, if we have a result and records. int numLastFetch = query.getMaxRecords(); if (result != null && (result.getNumberOfRecords() > 0)) { numLastFetch = result.getNumberOfRecords(); } numRecordsFetched += numLastFetch; statusProvider.addState("FETCH", "Fetching record " + (numRecordsFetched - numLastFetch + 1) + "-" + numRecordsFetched + " / " + numRecordsTotal + " from " + client.getFactory().getServiceUrl()); query.setStartPosition(query.getStartPosition() + numLastFetch); // for logging below logCurrRecordChunk = "" + query.getStartPosition() + " - " + (query.getStartPosition() + query.getMaxRecords()); // do next request, if problems retry with increasing pause in between int numRetries = 0; while (true) { try { result = null; result = client.getRecords(query); break; } catch (Exception e) { if (numRetries == cswConfig.numRetriesPerRequest) { log.error("Retried " + numRetries + " times ! We skip records " + logCurrRecordChunk, e); break; } numRetries++; int timeBetweenRetry = numRetries * cswConfig.timeBetweenRetries; log.error("Error fetching records " + logCurrRecordChunk + ". We retry " + numRetries + ". time after " + timeBetweenRetry + " msec !", e); Thread.sleep(timeBetweenRetry); } } // process if (result == null || result.getNumberOfRecords() == 0) { // no result from this query, we count the failures to check whether fetching process should be ended ! numSkippedRequests++; numLostRecords += query.getMaxRecords(); logLostRecordChunks += logCurrRecordChunk + "\n"; } else { currentFetchedRecordIds.addAll(processResult(result, doCache)); } } catch (Exception e) { statusProvider.addState("ERROR_FETCH_PROCESS", "Error during processing record: " + logCurrRecordChunk, Classification.ERROR); log.error("Error processing records " + logCurrRecordChunk); log.error(ExceptionUtils.getStackTrace(e)); } } if (numLostRecords > 0) { statusProvider.addState("ERROR_FETCH_PROCESS", "Error during fetching of record: " + logLostRecordChunks, Classification.ERROR); log.error("\nWe had failed GetRecords requests !!!" + "\nThe following " + numLostRecords + " records were NOT fetched and are \"lost\":" + "\n" + logLostRecordChunks); } } // collect record ids fetchedRecordIds.addAll(currentFetchedRecordIds); // numTotal += currentFetchedRecordIds.size(); filterIndex++; } return fetchedRecordIds; }
From source file:core.com.qiniu.AmazonWebServiceClient.java
/** * Constructs a new AmazonWebServiceClient object using the specified * configuration.//from w w w . j a va 2 s. c o m * * @param clientConfiguration The client configuration for this client. */ @Deprecated protected AmazonWebServiceClient(ClientConfiguration clientConfiguration, HttpClient httpClient, RequestMetricCollector requestMetricCollector) { this.clientConfiguration = clientConfiguration; client = new AmazonHttpClient(clientConfiguration, httpClient, requestMetricCollector); requestHandler2s = new CopyOnWriteArrayList<RequestHandler2>(); }
From source file:com.neophob.sematrix.core.glue.Collector.java
/** * Instantiates a new collector.//from w w w. ja va 2s .co m */ private Collector() { allVisuals = new CopyOnWriteArrayList<Visual>(); this.nrOfScreens = 0; ioMapping = new CopyOnWriteArrayList<OutputMapping>(); initialized = false; selectedPreset = 0; presets = PresetSettings.initializePresetSettings(NR_OF_PRESET_SLOTS); pixelControllerShufflerSelect = new PixelControllerShufflerSelect(); pixelControllerShufflerSelect.initAll(); }
From source file:org.wso2.carbon.event.input.adaptor.websocket.local.WebsocketLocalEventAdaptorType.java
@Override public String subscribe(InputEventAdaptorMessageConfiguration inputEventAdaptorMessageConfiguration, InputEventAdaptorListener inputEventAdaptorListener, InputEventAdaptorConfiguration inputEventAdaptorConfiguration, AxisConfiguration axisConfiguration) { String subscriptionId = UUID.randomUUID().toString(); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); String topic = inputEventAdaptorMessageConfiguration.getInputMessageProperties() .get(WebsocketLocalEventAdaptorConstants.ADAPTOR_MESSAGE_TOPIC); ConcurrentHashMap<String, ConcurrentHashMap<String, CopyOnWriteArrayList<WebsocketAdaptorListener>>> tenantSpecificListenerMap = inputEventAdaptorListenerMap .get(tenantId);/* ww w . j a v a2s.c o m*/ if (tenantSpecificListenerMap == null) { tenantSpecificListenerMap = new ConcurrentHashMap<String, ConcurrentHashMap<String, CopyOnWriteArrayList<WebsocketAdaptorListener>>>(); if (null != inputEventAdaptorListenerMap.putIfAbsent(tenantId, tenantSpecificListenerMap)) { tenantSpecificListenerMap = inputEventAdaptorListenerMap.get(tenantId); } } ConcurrentHashMap<String, CopyOnWriteArrayList<WebsocketAdaptorListener>> adaptorSpecificListeners = tenantSpecificListenerMap .get(inputEventAdaptorConfiguration.getName()); if (adaptorSpecificListeners == null) { adaptorSpecificListeners = new ConcurrentHashMap<String, CopyOnWriteArrayList<WebsocketAdaptorListener>>(); if (null != tenantSpecificListenerMap.putIfAbsent(inputEventAdaptorConfiguration.getName(), adaptorSpecificListeners)) { adaptorSpecificListeners = tenantSpecificListenerMap.get(inputEventAdaptorConfiguration.getName()); } } CopyOnWriteArrayList<WebsocketAdaptorListener> topicSpecificListeners = adaptorSpecificListeners.get(topic); if (topicSpecificListeners == null) { topicSpecificListeners = new CopyOnWriteArrayList<WebsocketAdaptorListener>(); if (null != adaptorSpecificListeners.putIfAbsent(topic, topicSpecificListeners)) { topicSpecificListeners = adaptorSpecificListeners.get(topic); } } topicSpecificListeners .add(new WebsocketAdaptorListener(subscriptionId, inputEventAdaptorListener, tenantId)); return subscriptionId; }
From source file:com.googlecode.android_scripting.facade.ui.UiFacade.java
public UiFacade(FacadeManager manager) { super(manager); mService = manager.getService();/* ww w .ja va 2 s .com*/ mTaskQueue = ((BaseApplication) mService.getApplication()).getTaskExecutor(); mContextMenuItems = new CopyOnWriteArrayList<UiMenuItem>(); mOptionsMenuItems = new CopyOnWriteArrayList<UiMenuItem>(); mEventFacade = manager.getReceiver(EventFacade.class); mMenuUpdated = new AtomicBoolean(false); }
From source file:org.apache.cayenne.access.DataDomain.java
private void init(String name) { this.filters = new CopyOnWriteArrayList<DataChannelFilter>(); this.nodesByDataMapName = new ConcurrentHashMap<String, DataNode>(); this.nodes = new ConcurrentHashMap<String, DataNode>(); // properties are read-only, so no need for concurrent map, or any specific map // for that matter this.properties = Collections.EMPTY_MAP; setName(name);//from w ww . jav a 2s. com }
From source file:org.red5.server.stream.NoSyncServerStream.java
/** Constructs a new ServerStream. */ public NoSyncServerStream() { defaultController = new SimplePlaylistController(); items = new CopyOnWriteArrayList<IPlayItem>(); state = State.UNINIT;//from w w w . j a v a2 s .c om }
From source file:org.springframework.web.servlet.support.DefaultFlashMapManager.java
/** * Retrieve all FlashMap instances from the current HTTP session. * If {@code allowCreate} is "true" and no flash maps exist yet, a new list * is created and stored as a session attribute. * @param request the current request/*from w ww.j ava2 s .c o m*/ * @param allowCreate whether to create the session if necessary * @return a List to add FlashMap instances to or {@code null} * assuming {@code allowCreate} is "false". */ @SuppressWarnings("unchecked") protected List<FlashMap> retrieveFlashMaps(HttpServletRequest request, boolean allowCreate) { HttpSession session = request.getSession(allowCreate); if (session == null) { return null; } List<FlashMap> allFlashMaps = (List<FlashMap>) session.getAttribute(FLASH_MAPS_SESSION_ATTRIBUTE); if (allFlashMaps == null && allowCreate) { synchronized (this) { allFlashMaps = (List<FlashMap>) session.getAttribute(FLASH_MAPS_SESSION_ATTRIBUTE); if (allFlashMaps == null) { allFlashMaps = new CopyOnWriteArrayList<FlashMap>(); session.setAttribute(FLASH_MAPS_SESSION_ATTRIBUTE, allFlashMaps); } } } return allFlashMaps; }
From source file:voldemort.store.cachestore.impl.CacheStore.java
public CacheStore(String path, BlockSize blockSize, int curIndex, String filename, boolean delayWrite, int mode) { this.delayWrite = delayWrite; if (delayWrite) { delayWriteQueue = new LinkedBlockingQueue<DelayBlock>(QUEUE_SIZE); }/* w ww. j av a 2 s. co m*/ this.mode = mode; checkPath(path); this.path = path; this.blockSize = blockSize; this.curIndex = curIndex; this.overflow = new AtomicLong(0); this.list = new CopyOnWriteArrayList<ChannelStore>(); this.namePrefix = filename; this.map = new ConcurrentHashMap(findMapSize(getPath(path) + filename)); //check to see if more file 0 / 1 if (curIndex <= 1) { list.add(0, open(getPath(path) + filename + curIndex, map, false, 0, delayWrite, mode)); deleted += list.get(0).getDeleted(); if (ChannelStore.isChannelExist(getPath(path) + filename + 1 + ".ndx")) { // close channel 0 list.get(0).close(delayWriteQueue); list.add(1, open(getPath(path) + filename + 1, map, false, 1, delayWrite, mode)); this.curIndex = 1; deleted += list.get(1).getDeleted(); } } else { throw new StoreException("not support for index " + curIndex + " > 1 "); } //if ( delayWrite ) new WriteBackThread().start(); serverState = State.Active; //init(); }
From source file:com.clavain.munin.MuninNode.java
/** * Will load the plugin list from munin-node *//*from ww w . ja v a2s . com*/ public boolean loadPlugins() { setLoadedPlugins(new CopyOnWriteArrayList<MuninPlugin>()); String l_lastProceeded = ""; try { Socket cs = new Socket(); cs.setKeepAlive(false); cs.setSoLinger(true, 0); cs.setReuseAddress(true); cs.setSoTimeout(com.clavain.muninmxcd.socketTimeout); if (!str_via.equals("unset")) { cs.connect(new InetSocketAddress(this.getStr_via(), this.getPort()), com.clavain.muninmxcd.socketTimeout); } else { cs.connect(new InetSocketAddress(this.getHostname(), this.getPort()), com.clavain.muninmxcd.socketTimeout); } if (p.getProperty("kill.sockets").equals("true")) { SocketCheck sc = new SocketCheck(cs, getUnixtime()); sc.setHostname(this.getHostname()); com.clavain.muninmxcd.v_sockets.add(sc); } PrintStream os = new PrintStream(cs.getOutputStream()); BufferedReader in = new BufferedReader(new InputStreamReader(cs.getInputStream())); String s = in.readLine(); if (s != null) { // Set version os.println("version"); Thread.sleep(150); s = in.readLine(); String version = s.substring(s.indexOf(":") + 1, s.length()).trim(); this.str_muninVersion = version; if (authpw != null) { // if authpw is set, verify if (!authpw.trim().equals("")) { os.println("config muninmxauth"); Thread.sleep(150); String apw = in.readLine(); s = in.readLine(); if (!apw.trim().equals(this.getAuthpw())) { logger.error("Invalid muninmxauth password for host: " + this.getHostname()); cs.close(); return false; } } } // check anyway if muninmxauth plugin is present else { os.println("config muninmxauth"); Thread.sleep(100); String apw = in.readLine(); if (!apw.trim().equals("# Unknown service")) { logger.error( "no auth password given, but muninmxauth plugin present on " + this.getHostname()); cs.close(); return false; } s = in.readLine(); } // get list of available plugins if (str_via.equals("unset")) { os.println("list"); } else { os.println("list " + str_hostname); } Thread.sleep(250); s = in.readLine(); // if response is empty and host is not via, do a list $hostname if (s.trim().equals("") && str_via.equals("unset")) { logger.info("Plugin Response Empty on " + this.getHostname() + " trying to load with list $hostname"); os.println("list " + this.getHostname()); Thread.sleep(250); s = in.readLine(); } String l_tmp; StringTokenizer l_st = new StringTokenizer(s, " "); // create plugin MuninPlugin l_mp = new MuninPlugin(); // negative support ArrayList<String> tmp_negatives = new ArrayList<String>(); while (l_st.hasMoreTokens()) { String l_strPlugin = l_st.nextToken(); // check for track_pkg and muninmx essentials if (l_strPlugin.equals("muninmx_trackpkg")) { this.setTrack_pkg(true); continue; } // got essentials? if (l_strPlugin.equals("muninmx_essentials")) { this.setEssentials(true); continue; } if (isPluginIgnored(l_strPlugin.toUpperCase())) { continue; } l_mp.setPluginName(l_strPlugin); os.println("config " + l_strPlugin); // create graphs for plugin int l_iGraphsFound = 0; int l_iTmp = 0; MuninGraph l_mg = new MuninGraph(); l_mg.setQueryInterval(this.getQueryInterval()); while ((l_tmp = in.readLine()) != null) { if (l_tmp.startsWith(".")) { break; } // collect graphs only for plugin String l_strName; String l_strType; String l_strValue; if (!l_tmp.contains("graph_") && !l_tmp.trim().equals("") && !l_tmp.contains("host_name") && !l_tmp.contains("multigraph") && !l_tmp.trim().equals("graph no") && !l_tmp.trim().equals("# Bad exit") && !l_tmp.trim().contains("info Currently our peer") && !l_tmp.trim().startsWith("#") && !l_tmp.trim().contains("Bonding interface errors")) { l_lastProceeded = l_tmp; l_strName = l_tmp.substring(0, l_tmp.indexOf(".")); l_strType = l_tmp.substring(l_tmp.indexOf(".") + 1, l_tmp.indexOf(" ")); l_strValue = l_tmp.substring(l_tmp.indexOf(" ") + 1, l_tmp.length()); //System.err.println("Name: " + l_strName + " Type: " + l_strType + " Value: " + l_strValue); if (l_strType.equals("label")) { l_iTmp++; if (l_iTmp > 1) { l_mp.addGraph(l_mg); l_mg = new MuninGraph(); l_mg.setQueryInterval(this.getQueryInterval()); } l_mg.setGraphName(l_strName); l_mg.setGraphLabel(l_strValue); } else if (l_strType.equals("draw")) { l_mg.setGraphDraw(l_strValue); } else if (l_strType.equals("type")) { l_mg.setGraphType(l_strValue); } else if (l_strType.equals("info")) { l_mg.setGraphInfo(l_strValue); } else if (l_strType.equals("negative")) { // add to temporary negative list to set negatives later tmp_negatives.add(l_strValue); } //System.out.println(l_strName); //System.out.println(l_strType); //System.out.println(l_strValue); } else { // set plugin title if (l_tmp.contains("graph_title")) { l_mp.setPluginTitle(l_tmp.substring(12, l_tmp.length())); } // set plugin info, if any if (l_tmp.contains("graph_info")) { l_mp.setPluginInfo(l_tmp.substring(11, l_tmp.length())); } // set graph category if (l_tmp.contains("graph_category")) { l_mp.setPluginCategory(l_tmp.substring(15, l_tmp.length())); } // set graph vlabel if (l_tmp.contains("graph_vlabel")) { l_mp.setPluginLabel(l_tmp.substring(13, l_tmp.length())); } // set plugin title if (l_tmp.contains("graph_mxdraw")) { l_mp.setStr_LineMode(l_tmp.substring(13, l_tmp.length())); } } } // add to pluginlist l_mp.addGraph(l_mg); Iterator it = l_mp.getGraphs().iterator(); while (it.hasNext()) { MuninGraph l_mpNg = (MuninGraph) it.next(); if (tmp_negatives.contains(l_mpNg.getGraphName())) { l_mpNg.setNegative(true); } } // add plugin if it got valid graphs and add nodeid (req. for alerts) if (l_mp.getGraphs().size() > 0) { l_mp.set_NodeId(this.getNode_id()); getLoadedPlugins().add(l_mp); } // flush temporary negatives tmp_negatives.clear(); l_mp = null; l_mp = new MuninPlugin(); //String l_strGraphTitle = s.substring(s.indexOf("graph_title") + 11,s.length()); //System.out.println(" - " + l_strGraphTitle); } cs.close(); in.close(); os.close(); last_plugin_load = getUnixtime(); //System.out.println(s); } else { cs.close(); in.close(); os.close(); logger.warn("Error loading plugins on " + str_hostname + " (" + this.getNode_id() + "). Check connectivity or munin-node"); } /* for (MuninPlugin l_mn : getLoadedPlugins()) { i_GraphCount = i_GraphCount + l_mn.getGraphs().size(); logger.debug(l_mn.getGraphs().size() + " graphs found for plugin: " + l_mn.getPluginName().toUpperCase() + " on node: " + this.getNodename()); }*/ } catch (Exception ex) { logger.error("Error loading plugins on " + str_hostname + " (" + this.getNode_id() + ") : " + ex.getMessage()); ex.printStackTrace(); return false; } return true; }