List of usage examples for java.util Collections synchronizedList
public static <T> List<T> synchronizedList(List<T> list)
From source file:com.medlog.webservice.vo.PatientVO.java
/** * @param medicationList the medicationList to set */// www . j ava 2 s.com public void setMedicationList(List<MedicationVO> medicationList) { this.medicationList = Collections.synchronizedList(medicationList); ; }
From source file:jp.co.acroquest.endosnipe.perfdoctor.rule.RuleManager.java
/** * ???// w w w. j a v a 2s.c o m */ private void initialize() { this.ruleSetMap_ = new HashMap<String, RuleSetDef>(); this.ruleSetConfigMap_ = loadConfigurations(); this.activeRuleSetId_ = loadActiveRuleSetId(); this.dirtyRuleSetIds_ = new HashSet<String>(); this.removeList_ = Collections.synchronizedList(new ArrayList<RuleSetConfig>()); }
From source file:org.mqnaas.core.impl.BindingManagement.java
public BindingManagement() { boundCapabilities = Collections.synchronizedList(new ArrayList<CapabilityInstance>()); applications = Collections.synchronizedList(new ArrayList<ApplicationNode>()); knownCapabilities = new HashSet<Class<? extends ICapability>>(); knownApplications = new HashSet<Class<? extends IApplication>>(); }
From source file:org.piwik.sdk.dispatcher.DispatcherTest.java
@Test public void testForceDispatch() throws Exception { List<Packet> dryRunData = Collections.synchronizedList(new ArrayList<Packet>()); mDispatcher.setDryRunTarget(dryRunData); mDispatcher.setDispatchInterval(-1L); final int threadCount = 10; final int queryCount = 10; final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>()); launchTestThreads(mApiUrl, mDispatcher, threadCount, queryCount, createdEvents); Thread.sleep(500);/*from w ww .j av a2 s .c o m*/ assertEquals(threadCount * queryCount, createdEvents.size()); assertEquals(0, dryRunData.size()); mDispatcher.forceDispatch(); checkForMIAs(threadCount * queryCount, createdEvents, dryRunData); }
From source file:org.apache.tajo.worker.TaskImpl.java
public void initPlan() throws IOException { plan = LogicalNodeDeserializer.deserialize(queryContext, context.getEvalContext(), request.getPlan()); updateDescsForScanNodes(NodeType.SCAN); updateDescsForScanNodes(NodeType.PARTITIONS_SCAN); updateDescsForScanNodes(NodeType.INDEX_SCAN); interQuery = request.getProto().getInterQuery(); if (interQuery) { context.setInterQuery();// w ww. j a va2 s .com this.shuffleType = context.getDataChannel().getShuffleType(); if (shuffleType == ShuffleType.RANGE_SHUFFLE) { SortNode sortNode = PlannerUtil.findTopNode(plan, NodeType.SORT); this.finalSchema = PlannerUtil.sortSpecsToSchema(sortNode.getSortKeys()); this.sortComp = new BaseTupleComparator(finalSchema, sortNode.getSortKeys()); } } else { Path outFilePath = ((FileTablespace) TablespaceManager.get(queryContext.getStagingDir().toUri())) .getAppenderFilePath(getId(), queryContext.getStagingDir()); LOG.info("Output File Path: " + outFilePath); context.setOutputPath(outFilePath); } this.localChunks = Collections.synchronizedList(new ArrayList<>()); this.remoteChunks = Collections.synchronizedList(new ArrayList<>()); LOG.info(String.format( "* Task %s is initialized. InterQuery: %b, Shuffle: %s, Fragments: %d, Fetches:%d, " + "Local dir: %s", request.getId(), interQuery, shuffleType, request.getFragments().size(), request.getFetches().size(), taskDir)); if (LOG.isDebugEnabled()) { for (FetchProto f : request.getFetches()) { LOG.debug("Table Id: " + f.getName() + ", Simple URIs: " + Repartitioner.createSimpleURIs(maxUrlLength, f)); } } if (LOG.isDebugEnabled()) { LOG.debug("* plan:\n"); LOG.debug(plan.toString()); } }
From source file:com.gargoylesoftware.htmlunit.WebClientWaitForBackgroundJobsTest.java
/** * When waitForBackgroundJavaScriptStartingBefore is called while a job is being executed, it has * to wait for this job to finish, even if this clearXXX has been called for it. * @throws Exception if the test fails/*from ww w. j av a 2 s . c o m*/ */ @Test @Tries(3) public void waitCalledDuringJobExecution() throws Exception { final String html = "<html>\n" + "<head>\n" + " <title>test</title>\n" + " <script>\n" + " var intervalId;\n" + " function test() {\n" + " intervalId = setTimeout(doWork, 100);\n" + " }\n" + " function doWork() {\n" + " clearTimeout(intervalId);\n" + " // waitForBackgroundJavaScriptStartingBefore should be called when JS execution is here\n" + " var request = " + XHRInstantiation_ + ";\n" + " request.open('GET', 'wait', false);\n" + " request.send('');\n" + " alert('end work');\n" + " }\n" + " </script>\n" + "</head>\n" + "<body onload='test()'>\n" + "</body>\n" + "</html>"; final ThreadSynchronizer threadSynchronizer = new ThreadSynchronizer(); final MockWebConnection webConnection = new MockWebConnection() { @Override public WebResponse getResponse(final WebRequest request) throws IOException { if (request.getUrl().toExternalForm().endsWith("/wait")) { threadSynchronizer.waitForState("just before waitForBackgroundJavaScriptStartingBefore"); threadSynchronizer.sleep(400); // main thread need to be able to process next instruction } return super.getResponse(request); } }; webConnection.setResponse(URL_FIRST, html); webConnection.setDefaultResponse(""); final WebClient client = getWebClient(); client.setWebConnection(webConnection); final List<String> collectedAlerts = Collections.synchronizedList(new ArrayList<String>()); client.setAlertHandler(new CollectingAlertHandler(collectedAlerts)); final HtmlPage page = client.getPage(URL_FIRST); final JavaScriptJobManager jobManager = page.getEnclosingWindow().getJobManager(); assertNotNull(jobManager); assertEquals(1, jobManager.getJobCount()); startTimedTest(); threadSynchronizer.setState("just before waitForBackgroundJavaScriptStartingBefore"); assertEquals(0, client.waitForBackgroundJavaScriptStartingBefore(20_000)); assertMaxTestRunTime(600); assertEquals(0, jobManager.getJobCount()); final String[] expectedAlerts = { "end work" }; assertEquals(expectedAlerts, collectedAlerts); }
From source file:org.apache.syncope.core.persistence.jpa.dao.JPAAnySearchDAO.java
@Override @SuppressWarnings("unchecked") protected <T extends Any<?>> List<T> doSearch(final Set<String> adminRealms, final SearchCond cond, final int page, final int itemsPerPage, final List<OrderByClause> orderBy, final AnyTypeKind kind) { try {/*from w ww . j a v a 2 s. co m*/ List<Object> parameters = Collections.synchronizedList(new ArrayList<>()); SearchSupport svs = new SearchSupport(kind); Pair<String, Set<String>> filter = getAdminRealmsFilter(adminRealms, svs, parameters); // 1. get the query string from the search condition StringBuilder queryString = getQuery(buildEffectiveCond(cond, filter.getRight()), parameters, svs); // 2. take into account realms and ordering OrderBySupport obs = parseOrderBy(kind, svs, orderBy); if (queryString.charAt(0) == '(') { queryString.insert(0, buildSelect(obs)); queryString.append(buildWhere(svs, obs)); } else { queryString.insert(0, buildSelect(obs).append('(')); queryString.append(')').append(buildWhere(svs, obs)); } queryString.append(filter.getLeft()).append(buildOrderBy(obs)); // 3. prepare the search query Query query = entityManager().createNativeQuery(queryString.toString()); // 4. page starts from 1, while setFirtResult() starts from 0 query.setFirstResult(itemsPerPage * (page <= 0 ? 0 : page - 1)); if (itemsPerPage >= 0) { query.setMaxResults(itemsPerPage); } // 5. populate the search query with parameter values fillWithParameters(query, parameters); // 6. Prepare the result (avoiding duplicates) return buildResult(query.getResultList(), kind); } catch (Exception e) { LOG.error("While searching for {}", kind, e); } return Collections.emptyList(); }
From source file:org.springframework.integration.jms.SubscribableJmsChannelTests.java
@Test public void queueName() throws Exception { final CountDownLatch latch = new CountDownLatch(2); final List<Message<?>> receivedList1 = Collections.synchronizedList(new ArrayList<Message<?>>()); MessageHandler handler1 = new MessageHandler() { public void handleMessage(Message<?> message) { receivedList1.add(message);/* w ww . j a va 2 s . c om*/ latch.countDown(); } }; final List<Message<?>> receivedList2 = Collections.synchronizedList(new ArrayList<Message<?>>()); MessageHandler handler2 = new MessageHandler() { public void handleMessage(Message<?> message) { receivedList2.add(message); latch.countDown(); } }; JmsChannelFactoryBean factoryBean = new JmsChannelFactoryBean(true); factoryBean.setConnectionFactory(this.connectionFactory); factoryBean.setDestinationName("dynamicQueue"); factoryBean.setPubSubDomain(false); factoryBean.afterPropertiesSet(); SubscribableJmsChannel channel = (SubscribableJmsChannel) factoryBean.getObject(); channel.afterPropertiesSet(); channel.start(); channel.subscribe(handler1); channel.subscribe(handler2); channel.send(new GenericMessage<String>("foo")); channel.send(new GenericMessage<String>("bar")); assertTrue("Countdown latch should have counted down to 0 but was " + latch.getCount(), latch.await(TIMEOUT, TimeUnit.MILLISECONDS)); assertEquals(1, receivedList1.size()); assertNotNull(receivedList1.get(0)); assertEquals("foo", receivedList1.get(0).getPayload()); assertEquals(1, receivedList2.size()); assertNotNull(receivedList2.get(0)); assertEquals("bar", receivedList2.get(0).getPayload()); channel.stop(); }
From source file:org.apache.hadoop.hdfs.server.namenode.NNStorage.java
/** * Construct the NNStorage.//from w ww . j a v a2 s . c om * @param conf Namenode configuration. * @param imageDirs Directories the image can be stored in. * @param editsDirs Directories the editlog can be stored in. * @throws IOException if any directories are inaccessible. */ public NNStorage(Configuration conf, Collection<URI> imageDirs, Collection<URI> editsDirs, Map<URI, NNStorageLocation> locationMap) throws IOException { super(NodeType.NAME_NODE); storageDirs = Collections.synchronizedList(new ArrayList<StorageDirectory>()); // this may modify the editsDirs, so copy before passing in setStorageDirectories(imageDirs, new ArrayList<URI>(editsDirs), locationMap); this.conf = conf; }
From source file:org.openhab.binding.plugwise.internal.Stick.java
/** * Initialize this device and open the serial port * //from w ww . jav a 2 s.c om * @throws PlugwiseInitializationException if port can not be opened */ @SuppressWarnings("rawtypes") private void initialize() throws PlugwiseInitializationException { //Flush the deviceCache if (this.plugwiseDeviceCache != null) { plugwiseDeviceCache = Collections.synchronizedList(new ArrayList<PlugwiseDevice>()); } // parse ports and if the default port is found, initialized the reader Enumeration portList = CommPortIdentifier.getPortIdentifiers(); while (portList.hasMoreElements()) { CommPortIdentifier id = (CommPortIdentifier) portList.nextElement(); if (id.getPortType() == CommPortIdentifier.PORT_SERIAL) { if (id.getName().equals(port)) { logger.debug("Serial port '{}' has been found.", port); portId = id; } } } if (portId != null) { // initialize serial port try { serialPort = (SerialPort) portId.open("openHAB", 2000); } catch (PortInUseException e) { throw new PlugwiseInitializationException(e); } try { serialPort.addEventListener(this); } catch (TooManyListenersException e) { throw new PlugwiseInitializationException(e); } // activate the DATA_AVAILABLE notifier serialPort.notifyOnDataAvailable(true); try { // set port parameters serialPort.setSerialPortParams(115200, SerialPort.DATABITS_8, SerialPort.STOPBITS_1, SerialPort.PARITY_NONE); } catch (UnsupportedCommOperationException e) { throw new PlugwiseInitializationException(e); } try { // get the output stream outputChannel = Channels.newChannel(serialPort.getOutputStream()); } catch (IOException e) { throw new PlugwiseInitializationException(e); } } else { StringBuilder sb = new StringBuilder(); portList = CommPortIdentifier.getPortIdentifiers(); while (portList.hasMoreElements()) { CommPortIdentifier id = (CommPortIdentifier) portList.nextElement(); if (id.getPortType() == CommPortIdentifier.PORT_SERIAL) { sb.append(id.getName() + "\n"); } } throw new PlugwiseInitializationException( "Serial port '" + port + "' could not be found. Available ports are:\n" + sb.toString()); } // set up the Quartz jobs Scheduler sched = null; try { sched = StdSchedulerFactory.getDefaultScheduler(); } catch (SchedulerException e) { logger.error("Error getting a reference to the Quartz Scheduler"); } JobDataMap map = new JobDataMap(); map.put("Stick", this); JobDetail job = newJob(SendJob.class).withIdentity("Send-0", "Plugwise").usingJobData(map).build(); Trigger trigger = newTrigger().withIdentity("Send-0", "Plugwise").startNow().build(); try { sched.getListenerManager().addJobListener(new SendJobListener("JobListener-" + job.getKey().toString()), KeyMatcher.keyEquals(job.getKey())); } catch (SchedulerException e1) { logger.error("An exception occured while attaching a Quartz Send Job Listener"); } try { sched.scheduleJob(job, trigger); } catch (SchedulerException e) { logger.error("Error scheduling a job with the Quartz Scheduler"); } map = new JobDataMap(); map.put("Stick", this); job = newJob(ProcessMessageJob.class).withIdentity("ProcessMessage", "Plugwise").usingJobData(map).build(); trigger = newTrigger().withIdentity("ProcessMessage", "Plugwise").startNow() .withSchedule(simpleSchedule().repeatForever().withIntervalInMilliseconds(50)).build(); try { sched.scheduleJob(job, trigger); } catch (SchedulerException e) { logger.error("Error scheduling a job with the Quartz Scheduler"); } // initialise the Stick initialised = true; InitialiseRequestMessage message = new InitialiseRequestMessage(); sendMessage(message); }