List of usage examples for java.util.concurrent ConcurrentHashMap put
public V put(K key, V value)
From source file:com.taobao.tddl.common.util.TDDLMBeanServer.java
private String getId(String name, String idPrefix) { ConcurrentHashMap<String, AtomicLong> subMap = idMap.get(name); if (null == subMap) { lock.lock();//from ww w . j av a 2s . co m try { subMap = idMap.get(name); if (null == subMap) { subMap = new ConcurrentHashMap<String, AtomicLong>(); idMap.put(name, subMap); } } finally { lock.unlock(); } } AtomicLong indexValue = subMap.get(idPrefix); if (null == indexValue) { lock.lock(); try { indexValue = subMap.get(idPrefix); if (null == indexValue) { indexValue = new AtomicLong(0); subMap.put(idPrefix, indexValue); } } finally { lock.unlock(); } } long value = indexValue.incrementAndGet(); String result = idPrefix + "-" + value; return result; }
From source file:com.github.jackygurui.vertxredissonrepository.repository.SaveAndSearchAndGetCallInConcurrentTest.java
@Test public void test2SaveAndSearchAndGetCallIn(TestContext context) throws Exception { Async async = context.async();/*from www.j a v a2s .co m*/ JsonNode source = JsonLoader.fromResource("/CallIn.json"); int records = 1000; AtomicLong total = new AtomicLong(0); ConcurrentHashMap<JsonObject, String> m = new ConcurrentHashMap<>(); Stream<JsonObject> stream = IntStream.rangeClosed(0, records).mapToObj(e -> { JsonObject clone = new JsonObject(Json.encode(source)); Long number = Long.parseLong(clone.getString("phoneNumber")) + e; clone.put("phoneNumber", number + ""); Long callTime = clone.getLong("callTime") + e; clone.put("callTime", callTime); return clone; }); StopWatch sw = new StopWatch(); sw.start(); stream.parallel().forEach(e -> { org.simondean.vertx.async.Async.waterfall().<String>task(t -> { callInRepository.create(Json.encode(e), t); }).<List<CallIn>>task((id, t) -> { m.put(e, id); AtomicLong idc = new AtomicLong(0); org.simondean.vertx.async.Async.retry().<List<CallIn>>task(tt -> { callInRepository.searchIndexByScoreAndGet("callTime", e.getDouble("callTime"), e.getDouble("callTime"), 0, 1, ttt -> { logger.info("id = " + id + " | retry count: " + idc.incrementAndGet()); tt.handle(ttt.succeeded() && ttt.result() != null && !ttt.result().isEmpty() ? Future.succeededFuture(ttt.result()) : Future.failedFuture(ttt.cause())); }); }).times(100000).run(t); }).run(r -> { context.assertTrue(r.succeeded()); if (r.succeeded()) { context.assertFalse(r.result().isEmpty()); context.assertEquals(1, r.result().size()); CallIn ci = r.result().iterator().next(); context.assertNotNull(ci); logger.info(Json.encode(ci)); CallIn cii = Json.decodeValue(e.put("id", m.get(e)).encode(), CallIn.class); context.assertEquals(Json.encode(cii), Json.encode(ci)); } long t; if ((t = total.incrementAndGet()) == records) { sw.stop(); logger.info("time to concurrently save and search and get " + records + " call in records: " + sw.getTime()); async.complete(); } else { logger.info("t = " + t); } }); }); }
From source file:ddf.catalog.source.opensearch.TestOpenSearchSource.java
private ConcurrentHashMap<String, String> createMapFor(List<NameValuePair> pairs) { ConcurrentHashMap<String, String> map = new ConcurrentHashMap<String, String>(); for (NameValuePair pair : pairs) { map.put(pair.getName(), pair.getValue()); }/*from w w w . j ava 2s .c o m*/ return map; }
From source file:org.apache.storm.daemon.nimbus.NimbusUtils.java
@SuppressWarnings("rawtypes") @ClojureClass(className = "backtype.storm.daemon.nimbus#update-heartbeats!") public static void updateHeartbeats(NimbusData nimbus, String stormId, Set<ExecutorInfo> allExecutors, Assignment existingAssignment) throws Exception { LOG.debug("Updating heartbeats for {}:{}", stormId, allExecutors.toString()); StormClusterState stormClusterState = nimbus.getStormClusterState(); Map<ExecutorInfo, ExecutorHeartbeat> executorBeats = stormClusterState.executorBeats(stormId, existingAssignment.getExecutorToNodeport()); Map<ExecutorInfo, ExecutorCache> cache = nimbus.getExecutorHeartbeatsCache().get(stormId); Map conf = nimbus.getConf();// w ww. j a va 2s . com int taskTimeOutSecs = CoreUtil.parseInt(conf.get(Config.NIMBUS_TASK_TIMEOUT_SECS), 30); Map<ExecutorInfo, ExecutorCache> newCache = updateHeartbeatCache(cache, executorBeats, allExecutors, taskTimeOutSecs); ConcurrentHashMap<String, Map<ExecutorInfo, ExecutorCache>> executorCache = nimbus .getExecutorHeartbeatsCache(); executorCache.put(stormId, newCache); nimbus.setExecutorHeartbeatsCache(executorCache); }
From source file:org.wso2.carbon.event.output.adaptor.jms.JMSEventAdaptorType.java
private PublisherDetails initPublisher(OutputEventAdaptorConfiguration outputEventAdaptorConfiguration, ConcurrentHashMap<String, PublisherDetails> topicEventSender, String topicName, Map<String, String> messageConfig) { PublisherDetails publisherDetails;//from ww w . j ava 2 s .co m Hashtable<String, String> adaptorProperties = new Hashtable<String, String>(); adaptorProperties.putAll(outputEventAdaptorConfiguration.getOutputProperties()); JMSConnectionFactory jmsConnectionFactory = new JMSConnectionFactory(adaptorProperties, outputEventAdaptorConfiguration.getName()); JMSMessageSender jmsMessageSender = new JMSMessageSender(jmsConnectionFactory, messageConfig); publisherDetails = new PublisherDetails(jmsConnectionFactory, jmsMessageSender); topicEventSender.put(topicName, publisherDetails); return publisherDetails; }
From source file:com.thoughtworks.go.server.service.PipelineConfigServicePerformanceTest.java
private void run(Runnable runnable, int numberOfRequests, final ConcurrentHashMap<String, Boolean> results) throws InterruptedException { Boolean finalResult = true;// w w w . j a v a 2 s . c o m LOGGER.info("Tests start now!"); final ArrayList<Thread> threads = new ArrayList<>(); for (int i = 0; i < numberOfRequests; i++) { Thread t = new Thread(runnable, "pipeline" + i); threads.add(t); } for (Thread t : threads) { Thread.sleep(1000 * (new Random().nextInt(3) + 1)); t.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { LOGGER.error("Exception " + e + " from thread " + t); results.put(t.getName(), false); } }); t.start(); } for (Thread t : threads) { int i = threads.indexOf(t); if (i == (numberOfRequests - 1)) { // takeHeapDump(dumpDir, i); } t.join(); } for (String threadId : results.keySet()) { finalResult = results.get(threadId) && finalResult; } assertThat(finalResult, is(true)); }
From source file:net.yacy.kelondro.util.FileUtils.java
public static ConcurrentHashMap<String, String> table(final Iterator<String> li) { String line;/*from www. j av a 2 s . co m*/ final ConcurrentHashMap<String, String> props = new ConcurrentHashMap<String, String>(); while (li.hasNext()) { int pos = 0; line = li.next().trim(); if (!line.isEmpty() && line.charAt(0) == '#') { continue; // exclude comments } do { // search for unescaped = pos = line.indexOf('=', pos + 1); } while (pos > 0 && line.charAt(pos - 1) == '\\'); if (pos > 0) try { String key = StringUtils.replaceEach(line.substring(0, pos).trim(), escaped_strings_in, unescaped_strings_out); String value = StringUtils.replaceEach(line.substring(pos + 1).trim(), escaped_strings_in, unescaped_strings_out); //System.out.println("key = " + key + ", value = " + value); props.put(key, value); } catch (final IndexOutOfBoundsException e) { ConcurrentLog.logException(e); } } return props; }
From source file:org.wso2.carbon.event.input.adaptor.http.HTTPEventAdaptorType.java
public String subscribe(InputEventAdaptorMessageConfiguration inputEventAdaptorMessageConfiguration, InputEventAdaptorListener inputEventAdaptorListener, InputEventAdaptorConfiguration inputEventAdaptorConfiguration, AxisConfiguration axisConfiguration) { String subscriptionId = UUID.randomUUID().toString(); String topic = inputEventAdaptorMessageConfiguration.getInputMessageProperties() .get(HTTPEventAdaptorConstants.ADAPTOR_MESSAGE_TOPIC); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); ConcurrentHashMap<String, ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>> tenantSpecificListenerMap = inputEventAdaptorListenerMap .get(tenantId);/*from ww w .j a v a 2s . co m*/ if (tenantSpecificListenerMap == null) { tenantSpecificListenerMap = new ConcurrentHashMap<String, ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>>(); inputEventAdaptorListenerMap.put(tenantId, tenantSpecificListenerMap); } ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>> adaptorSpecificListeners = tenantSpecificListenerMap .get(inputEventAdaptorConfiguration.getName()); if (adaptorSpecificListeners == null) { adaptorSpecificListeners = new ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>(); if (null != tenantSpecificListenerMap.put(inputEventAdaptorConfiguration.getName(), adaptorSpecificListeners)) { adaptorSpecificListeners = tenantSpecificListenerMap.get(inputEventAdaptorConfiguration.getName()); } } ArrayList<HTTPAdaptorListener> topicSpecificListeners = adaptorSpecificListeners.get(topic); ArrayList<HTTPAdaptorListener> newTopicSpecificListeners; if (topicSpecificListeners == null || topicSpecificListeners.size() == 0) { HTTPEventAdaptorServiceDS.registerDynamicEndpoint(inputEventAdaptorConfiguration.getName(), topic, tenantId); newTopicSpecificListeners = new ArrayList<HTTPAdaptorListener>(); } else { newTopicSpecificListeners = new ArrayList<HTTPAdaptorListener>(topicSpecificListeners); } newTopicSpecificListeners.add(new HTTPAdaptorListener(subscriptionId, inputEventAdaptorListener, tenantId)); adaptorSpecificListeners.put(topic, newTopicSpecificListeners); return subscriptionId; }
From source file:com.thoughtworks.go.server.service.PipelineConfigServicePerformanceTest.java
@Test public void performanceTestForDeletePipeline() throws Exception { setupPipelines(numberOfRequests);/*from w w w.ja v a 2 s . co m*/ final ConcurrentHashMap<String, Boolean> results = new ConcurrentHashMap<>(); run(new Runnable() { @Override public void run() { PipelineConfig pipelineConfig = goConfigService.getConfigForEditing() .pipelineConfigByName(new CaseInsensitiveString(Thread.currentThread().getName())); pipelineConfig.add(new StageConfig(new CaseInsensitiveString("additional_stage"), new JobConfigs(new JobConfig(new CaseInsensitiveString("addtn_job"))))); PerfTimer updateTimer = PerfTimer.start("Saving pipelineConfig : " + pipelineConfig.name()); pipelineConfigService.deletePipelineConfig(user, pipelineConfig, result); updateTimer.stop(); results.put(Thread.currentThread().getName(), result.isSuccessful()); if (!result.isSuccessful()) { LOGGER.error(result.toString()); LOGGER.error("Errors on pipeline" + Thread.currentThread().getName() + " are : " + StringUtils.join(getAllErrors(pipelineConfig), ", ")); } } }, numberOfRequests, results); }
From source file:com.taobao.diamond.server.service.GroupService.java
/** * ip/*from w w w .j a v a2s . c om*/ * * @param address * @param dataId * @param group */ @Deprecated public boolean addAddress2GroupMapping(String address, String dataId, String group) { synchronized (this) { if (this.addressGroupCache.containsKey(address)) { ConcurrentHashMap<String, GroupInfo> subMap = this.addressGroupCache.get(address); if (subMap != null && subMap.containsKey(dataId)) return false; } ConcurrentHashMap<String, GroupInfo> dataIdGroupMap = this.addressGroupCache.get(address); if (dataIdGroupMap == null) { dataIdGroupMap = new ConcurrentHashMap<String, GroupInfo>(); ConcurrentHashMap<String, GroupInfo> oldMap = this.addressGroupCache.putIfAbsent(address, dataIdGroupMap); if (oldMap != null) { dataIdGroupMap = oldMap; } } GroupInfo groupInfo = new GroupInfo(address, dataId, group); this.persistService.addGroupInfo(groupInfo); // id groupInfo = this.persistService.findGroupInfoByAddressDataId(address, dataId); dataIdGroupMap.put(dataId, groupInfo); } // this.notifyService.notifyGroupChanged(); return true; }