Example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

List of usage examples for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap.

Prototype

public ConcurrentHashMap() 

Source Link

Document

Creates a new, empty map with the default initial table size (16).

Usage

From source file:io.pravega.controller.eventProcessor.impl.SerializedRequestHandlerTest.java

@Test(timeout = 10000)
public void testProcessEvent() throws InterruptedException, ExecutionException {
    final ConcurrentHashMap<String, List<Integer>> orderOfProcessing = new ConcurrentHashMap<>();

    SerializedRequestHandler<TestEvent> requestHandler = new SerializedRequestHandler<TestEvent>(
            executorService()) {/*from   w  w  w  . ja v  a  2 s. c om*/
        @Override
        public CompletableFuture<Void> processEvent(TestEvent event) {
            orderOfProcessing.compute(event.getKey(), (x, y) -> {
                if (y == null) {
                    y = new ArrayList<>();
                }
                y.add(event.getNumber());
                return y;
            });
            return event.getFuture();
        }
    };

    List<Pair<TestEvent, CompletableFuture<Void>>> stream1Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNull(stream1Queue);
    // post 3 work for stream1
    TestEvent s1e1 = new TestEvent("scope", "stream1", 1);
    CompletableFuture<Void> s1p1 = requestHandler.process(s1e1);
    TestEvent s1e2 = new TestEvent("scope", "stream1", 2);
    CompletableFuture<Void> s1p2 = requestHandler.process(s1e2);
    TestEvent s1e3 = new TestEvent("scope", "stream1", 3);
    CompletableFuture<Void> s1p3 = requestHandler.process(s1e3);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    List<Integer> collect = stream1Queue.stream().map(x -> x.getLeft().getNumber())
            .collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e3.complete();

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));

    // verify that no processing is complete
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    // post 3 work for stream2
    TestEvent s2e1 = new TestEvent("scope", "stream2", 1);
    CompletableFuture<Void> s2p1 = requestHandler.process(s2e1);
    TestEvent s2e2 = new TestEvent("scope", "stream2", 2);
    CompletableFuture<Void> s2p2 = requestHandler.process(s2e2);
    TestEvent s2e3 = new TestEvent("scope", "stream2", 3);
    CompletableFuture<Void> s2p3 = requestHandler.process(s2e3);

    List<Pair<TestEvent, CompletableFuture<Void>>> stream2Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream2Queue.size() >= 2);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e1.complete();
    Futures.await(s1p1);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 1);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now make sure that we have concurrently run for two streams
    s2e1.complete();
    Futures.await(s2p1);

    stream2Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2"));
    assertTrue(stream2Queue.size() >= 1);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now complete all processing
    s2e2.complete();
    Futures.await(s2p2);

    s2e3.complete();

    s1e2.complete();
    Futures.await(s1p2);

    Futures.await(s1p3);
    Futures.await(s2p3);

    assertTrue(
            orderOfProcessing.get(s1e1.getKey()).get(0) == 1 && orderOfProcessing.get(s1e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s1e1.getKey()).get(2) == 3);
    assertTrue(
            orderOfProcessing.get(s2e1.getKey()).get(0) == 1 && orderOfProcessing.get(s2e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s2e1.getKey()).get(2) == 3);

    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());
    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());

    // now that we have drained all the work from the processor.
    // let's post new work for stream 1
    TestEvent s1e4 = new TestEvent("scope", "stream1", 4);
    CompletableFuture<Void> s1p4 = requestHandler.process(s1e4);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNotNull(stream1Queue);

    s1e4.complete();
    Futures.await(s1p4);

    assertTrue(orderOfProcessing.get(s1e1.getKey()).get(3) == 4);
}

From source file:org.apache.servicemix.nmr.management.ManagementEndpointRegistry.java

public ManagementEndpointRegistry() {
    endpoints = new ConcurrentHashMap<String, ManagedEndpoint>();
}

From source file:com.cisco.oss.foundation.message.AbstractHornetQConcurrentMessageHandler.java

public AbstractHornetQConcurrentMessageHandler(String consumerName, MessageIdentifier messageIdentifier) {
    super(consumerName);
    this.messageIdentifier = messageIdentifier;
    onWorkIdentifierMap = new ConcurrentHashMap<String, Object>();
}

From source file:com.cisco.oss.foundation.message.AbstractRabbitMQConcurrentMessageHandler.java

public AbstractRabbitMQConcurrentMessageHandler(String consumerName, MessageIdentifier messageIdentifier) {
    super(consumerName);
    this.messageIdentifier = messageIdentifier;
    onWorkIdentifierMap = new ConcurrentHashMap<String, Object>();
}

From source file:com.kerkr.edu.cache.CookieUtils.java

/**
 * Construct a persistent cookie store.//from ww w .j  a  va  2  s .c o  m
 */
public CookieUtils(Context context) {
    cookiePrefs = context.getSharedPreferences(COOKIE_PREFS, Context.MODE_PRIVATE);
    cookies = new ConcurrentHashMap<String, Cookie>();

    // Load any previously stored cookies into the store
    String storedCookieNames = cookiePrefs.getString(COOKIE_NAME_STORE, null);
    if (storedCookieNames != null) {
        String[] cookieNames = TextUtils.split(storedCookieNames, ",");
        for (String name : cookieNames) {
            String encodedCookie = cookiePrefs.getString(COOKIE_NAME_PREFIX + name, null);
            if (encodedCookie != null) {
                Cookie decodedCookie = decodeCookie(encodedCookie);
                if (decodedCookie != null) {
                    cookies.put(name, decodedCookie);
                }
            }
        }

        // Clear out expired cookies
        clearExpired(new Date());
    }
}

From source file:com.l2jfree.gameserver.util.DynamicExtension.java

/**
 * create an instance of DynamicExtension this will be done by GameServer
 * according to the altsettings.properties
 * /*from w w  w. j  av a 2 s. c  o m*/
 */
private DynamicExtension() {
    _getters = new ConcurrentHashMap<String, ExtensionFunction>();
    _setters = new ConcurrentHashMap<String, ExtensionFunction>();
    initExtensions();
}

From source file:com.clustercontrol.repository.action.GetNodeList.java

/**
 * ??????/* w w w. ja  v  a 2 s . co  m*/
 *
 * @param managerName ???
 * @return 
 */
public List<NodeInfo> getAll(String managerName) {

    List<NodeInfo> records = null;
    Map<String, String> errorMsgs = new ConcurrentHashMap<>();

    try {
        RepositoryEndpointWrapper wrapper = RepositoryEndpointWrapper.getWrapper(managerName);
        records = wrapper.getNodeListAll();
    } catch (InvalidRole_Exception e) {
        errorMsgs.put(managerName, Messages.getString("message.accesscontrol.16"));
    } catch (Exception e) {
        m_log.warn("getAll(), " + e.getMessage(), e);
        errorMsgs.put(managerName, Messages.getString("message.hinemos.failure.unexpected") + ", "
                + HinemosMessage.replace(e.getMessage()));
    }

    //
    if (0 < errorMsgs.size()) {
        UIManager.showMessageBox(errorMsgs, true);
    }
    return records;
}

From source file:be.agiv.security.client.ClientProxySelector.java

/**
 * Main constructor. Delegates unknown host requests to the given default
 * proxy selector./*  w w w.  ja  va2 s . c om*/
 * 
 * @param defaultProxySelector
 *            the default proxy selector.
 */
public ClientProxySelector(ProxySelector defaultProxySelector) {
    this.defaultProxySelector = defaultProxySelector;
    this.proxies = new ConcurrentHashMap<String, Proxy>();
}

From source file:com.qwazr.crawler.web.manager.CurrentSessionImpl.java

CurrentSessionImpl(WebCrawlDefinition crawlDefinition, String name, TimeTracker timeTracker) {
    this.crawlDefinition = crawlDefinition;
    this.timeTracker = timeTracker;
    this.name = name;
    abort = new AtomicBoolean(false);
    this.variables = new ConcurrentHashMap<>();
    if (crawlDefinition.variables != null)
        for (Map.Entry<String, String> entry : crawlDefinition.variables.entrySet())
            if (entry.getKey() != null && entry.getValue() != null)
                this.variables.put(entry.getKey(), entry.getValue());
}

From source file:com.ushahidi.swiftriver.core.rules.RulesUpdateQueueConsumerTest.java

@Before
public void setUp() {
    dropRulesMap = new ConcurrentHashMap<Long, List<Object>>();
    rulesRegistry = mock(RulesRegistry.class);

    rulesUpdateQueueConsumer = new RulesUpdateQueueConsumer();
    rulesUpdateQueueConsumer.setDropRulesMap(dropRulesMap);
    rulesUpdateQueueConsumer.setRulesRegistry(rulesRegistry);
    rulesUpdateQueueConsumer.setObjectMapper(objectMapper);
}