Example usage for java.util.concurrent ConcurrentHashMap put

List of usage examples for java.util.concurrent ConcurrentHashMap put

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap put.

Prototype

public V put(K key, V value) 

Source Link

Document

Maps the specified key to the specified value in this table.

Usage

From source file:org.wso2.andes.server.cassandra.OnflightMessageTracker.java

/**
 * Track that this message is buffered. Return true if eligible to buffer
 *
 * @param slot                 slot message being read in
 * @param andesMessageMetadata metadata to buffer
 * @return eligibility to buffer// w  w w. ja va  2 s  . c  om
 */
public boolean addMessageToBufferingTracker(Slot slot, AndesMessageMetadata andesMessageMetadata) {
    long messageID = andesMessageMetadata.getMessageID();
    boolean isOKToBuffer;
    if (log.isDebugEnabled()) {
        log.debug("Buffering message id = " + messageID + " slot = " + slot.toString());
    }
    ConcurrentHashMap<Long, MsgData> messagesOfSlot = messageBufferingTracker.get(slot);
    if (messagesOfSlot == null) {
        messagesOfSlot = new ConcurrentHashMap<Long, MsgData>();
        messageBufferingTracker.put(slot, messagesOfSlot);
    }
    MsgData trackingData = messagesOfSlot.get(messageID);
    if (trackingData == null) {
        trackingData = new MsgData(messageID, slot, slot.getDestinationOfMessagesInSlot(),
                System.currentTimeMillis(), andesMessageMetadata.getExpirationTime(), MessageStatus.BUFFERED,
                andesMessageMetadata.getArrivalTime());
        msgId2MsgData.put(messageID, trackingData);
        messagesOfSlot.put(messageID, trackingData);
        isOKToBuffer = true;
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Buffering rejected message id = " + messageID);
        }
        isOKToBuffer = false;
    }
    return isOKToBuffer;
}

From source file:org.wso2.carbon.databridge.streamdefn.registry.datastore.RegistryStreamDefinitionStore.java

public Collection<StreamDefinition> getAllStreamDefinitionsFromStore(int tenantId) {
    ConcurrentHashMap<String, StreamDefinition> map = new ConcurrentHashMap<String, StreamDefinition>();

    try {// w w  w .  j  ava  2  s  .  com
        UserRegistry registry = ServiceHolder.getRegistryService().getGovernanceSystemRegistry(tenantId);

        if (!registry.resourceExists(RegistryStreamDefinitionStoreUtil.getStreamDefinitionStorePath())) {
            registry.put(RegistryStreamDefinitionStoreUtil.getStreamDefinitionStorePath(),
                    registry.newCollection());
        } else {
            org.wso2.carbon.registry.core.Collection collection = (org.wso2.carbon.registry.core.Collection) registry
                    .get(RegistryStreamDefinitionStoreUtil.getStreamDefinitionStorePath());
            for (String streamNameCollection : collection.getChildren()) {

                org.wso2.carbon.registry.core.Collection innerCollection = (org.wso2.carbon.registry.core.Collection) registry
                        .get(streamNameCollection);
                for (String streamVersionCollection : innerCollection.getChildren()) {

                    Resource resource = (Resource) registry.get(streamVersionCollection);
                    try {
                        StreamDefinition streamDefinition = EventDefinitionConverterUtils
                                .convertFromJson(RegistryUtils.decodeBytes((byte[]) resource.getContent()));
                        map.put(streamDefinition.getStreamId(), streamDefinition);
                    } catch (Throwable e) {
                        log.error("Error in retrieving streamDefinition from the resource at "
                                + resource.getPath(), e);
                    }
                }
            }
        }

    } catch (RegistryException e) {
        log.error("Error in retrieving streamDefinitions from the registry", e);
    }

    return map.values();
}

From source file:org.dataconservancy.dcs.integration.main.UpdateDepositIT.java

@Test
public void testConcurrentUpdatesToDifferentLineages() throws Exception {
    // Attempt to update different lineages simultaneously from a number of
    // threads. All updates should succeed.

    DcsDeliverableUnit[] predecessors = new DcsDeliverableUnit[NUM_CONCURRENT_DEPOSITS];

    for (int i = 0; i < predecessors.length; i++) {
        SIP sip = new SIP();

        sip.addVersionWithNewFile(null, "v11");

        Dcp result = deposit(sip);// w  ww  .j  ava  2 s  . c  o m

        assertNotNull(result);
        assertEquals(1, result.getDeliverableUnits().size());

        final DcsDeliverableUnit du = result.getDeliverableUnits().iterator().next();

        assertNotNull(du.getLineageId());

        predecessors[i] = du;
    }

    Thread[] threads = new Thread[NUM_CONCURRENT_DEPOSITS];

    final ConcurrentHashMap<String, String> success = new ConcurrentHashMap<String, String>();

    for (int i = 0; i < threads.length; i++) {
        final String thread_name = "" + i;
        final DcsDeliverableUnit pred = predecessors[i];

        threads[i] = new Thread(new Runnable() {
            public void run() {
                SIP sip2 = new SIP();
                final String title = "v2 " + thread_name;

                try {
                    sip2.addVersion(pred.getId(), title);
                    Dcp result2 = deposit(sip2);

                    assertNotNull(result2);
                    assertEquals(1, result2.getDeliverableUnits().size());
                    DcsDeliverableUnit du2 = result2.getDeliverableUnits().iterator().next();
                    assertNotNull(du2.getLineageId());

                    assertEquals(pred.getLineageId(), du2.getLineageId());

                    success.put(du2.getId(), du2.getLineageId());
                } catch (Exception e) {
                    StringWriter sw = new StringWriter();
                    e.printStackTrace(new PrintWriter(sw));
                    fail("SIP deposit (with predecessor [" + pred.getId() + "] and title [" + title
                            + "]) failed: " + sw.toString());
                }
            }
        }, thread_name);
    }

    for (int i = 0; i < threads.length; i++) {
        threads[i].start();
    }

    for (int i = 0; i < threads.length; i++) {
        threads[i].join();
    }

    assertEquals(NUM_CONCURRENT_DEPOSITS, success.size());
}

From source file:org.dataconservancy.dcs.integration.main.UpdateDepositIT.java

@Test
public void testConcurrentUpdatesToSameLineage() throws Exception {
    SIP sip1 = new SIP();

    sip1.addVersionWithNewFile(null, "v11");

    Dcp result1 = deposit(sip1);/*from  ww w  .j av  a2 s . c  o  m*/

    assertNotNull(result1);
    assertEquals(1, result1.getDeliverableUnits().size());

    final DcsDeliverableUnit du1 = result1.getDeliverableUnits().iterator().next();

    assertNotNull(du1.getLineageId());

    // Attempt to update the du from sip1 simultaneously from a number of
    // threads. Only one update should succeed.

    Thread[] threads = new Thread[NUM_CONCURRENT_DEPOSITS];

    final ConcurrentHashMap<String, String> success = new ConcurrentHashMap<String, String>();

    for (int i = 0; i < threads.length; i++) {
        final String thread_name = "" + i;

        threads[i] = new Thread(new Runnable() {
            public void run() {
                SIP sip2 = new SIP();

                try {
                    sip2.addVersion(du1.getId(), "v2 " + thread_name);
                    Dcp result2 = deposit(sip2);

                    if (result2 == null) {
                        // Another update is in progress or succeeded
                    } else {
                        assertEquals(1, result2.getDeliverableUnits().size());
                        DcsDeliverableUnit du2 = result2.getDeliverableUnits().iterator().next();
                        assertNotNull(du2.getLineageId());
                        assertEquals(du1.getLineageId(), du2.getLineageId());

                        success.put(du2.getId(), du2.getLineageId());
                    }
                } catch (Exception e) {
                    StringWriter sw = new StringWriter();
                    e.printStackTrace(new PrintWriter(sw));
                    log.info("Deposit failed: " + sw.toString());
                }
            }
        }, thread_name);
    }

    for (int i = 0; i < threads.length; i++) {
        threads[i].start();
    }

    for (int i = 0; i < threads.length; i++) {
        threads[i].join();
    }

    assertEquals(1, success.size());
}

From source file:org.apache.oodt.cas.catalog.struct.impl.index.DataSourceIndex.java

/**
 * {@inheritDoc}//w w w . j ava2s . c  o m
 */
public List<TermBucket> getBuckets(TransactionId<?> transactionId) throws QueryServiceException {
    Connection conn = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        ConcurrentHashMap<String, TermBucket> termBuckets = new ConcurrentHashMap<String, TermBucket>();
        conn = this.dataSource.getConnection();
        stmt = conn.createStatement();
        rs = stmt.executeQuery(
                "SELECT bucket_name,term_name,term_value FROM transaction_terms WHERE transaction_id = '"
                        + transactionId + "'");
        while (rs.next()) {
            String bucketName = rs.getString("bucket_name");
            String termName = rs.getString("term_name");
            String termValue = rs.getString("term_value");
            TermBucket bucket = termBuckets.get(bucketName);
            if (bucket == null) {
                bucket = new TermBucket(bucketName);
            }
            Term term = new Term(termName, Collections
                    .singletonList((this.useUTF8 ? URLDecoder.decode(termValue, "UTF8") : termValue)));
            bucket.addTerm(term);
            termBuckets.put(bucketName, bucket);
        }
        return new Vector<TermBucket>(termBuckets.values());
    } catch (Exception e) {
        throw new QueryServiceException(
                "Failed to get term buckets for transaction id '" + transactionId + "' : " + e.getMessage(), e);
    } finally {
        try {
            conn.close();
        } catch (Exception ignored) {
        }
        try {
            stmt.close();
        } catch (Exception ignored) {
        }
        try {
            rs.close();
        } catch (Exception ignored) {
        }
    }
}

From source file:com.alibaba.napoli.metamorphosis.client.consumer.ConsumerZooKeeper.java

protected ZKLoadRebalanceListener registerConsumerInternal(final ZKLoadRebalanceListener loadBalanceListener)
        throws UnknownHostException, InterruptedException, Exception {
    final ZKGroupDirs dirs = this.metaZookeeper.new ZKGroupDirs(loadBalanceListener.consumerConfig.getGroup());

    final String topicString = this.getTopicsString(loadBalanceListener.topicSubcriberRegistry);

    if (this.zkClient == null) {
        // ?/*www  .j a  v a 2  s.c  o  m*/
        loadBalanceListener.fetchManager.stopFetchRunner();
        loadBalanceListener.fetchManager.resetFetchState();
        // zkClientnull??fetch
        for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
            final SubscriberInfo subInfo = loadBalanceListener.topicSubcriberRegistry.get(topic);
            ConcurrentHashMap<Partition, TopicPartitionRegInfo> topicPartRegInfoMap = loadBalanceListener.topicRegistry
                    .get(topic);
            if (topicPartRegInfoMap == null) {
                topicPartRegInfoMap = new ConcurrentHashMap<Partition, TopicPartitionRegInfo>();
                loadBalanceListener.topicRegistry.put(topic, topicPartRegInfoMap);
            }
            final Partition partition = new Partition(loadBalanceListener.consumerConfig.getPartition());
            final long offset = loadBalanceListener.consumerConfig.getOffset();
            final TopicPartitionRegInfo regInfo = new TopicPartitionRegInfo(topic, partition, offset);
            topicPartRegInfoMap.put(partition, regInfo);
            loadBalanceListener.fetchManager.addFetchRequest(
                    new FetchRequest(new Broker(0, loadBalanceListener.consumerConfig.getServerUrl()), 0L,
                            regInfo, subInfo.getMaxSize()));
        }
        loadBalanceListener.fetchManager.startFetchRunner();
    } else {

        // consumer id
        ZkUtils.createEphemeralPathExpectConflict(this.zkClient,
                dirs.consumerRegistryDir + "/" + loadBalanceListener.consumerIdString, topicString);
        // ?consumer??
        this.zkClient.subscribeChildChanges(dirs.consumerRegistryDir, loadBalanceListener);

        // topic??
        for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
            final String partitionPath = this.metaZookeeper.brokerTopicsPath + "/" + topic;
            ZkUtils.makeSurePersistentPathExists(this.zkClient, partitionPath);
            this.zkClient.subscribeChildChanges(partitionPath, loadBalanceListener);
            ZKTopicRightDirs rightDirs = this.metaZookeeper.new ZKTopicRightDirs(topic);
            this.zkClient.subscribeChildChanges(rightDirs.infoTopicReadRightDir, loadBalanceListener);

        }
        this.zkClient.subscribeStateChanges(new ZKSessionExpireListenner(loadBalanceListener));

        // ??balance
        loadBalanceListener.syncedRebalance(false);
    }
    return loadBalanceListener;
}

From source file:com.app.server.WarDeployer.java

public void init(Vector serviceList, ServerConfig serverConfig, MBeanServer mbeanServer) {
    try {/*  w  w  w  .  j  a va 2s . c o m*/
        this.serviceList = serviceList;
        this.serverConfig = serverConfig;
        this.mbeanServer = mbeanServer;
        this.scanDirectory = serverConfig.getDeploydirectory();
        DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() {

            protected void loadRules() {

                try {
                    loadXMLRules(new InputSource(new FileInputStream("./config/executorservices-config.xml")));
                } catch (Exception e) {
                    log.error("Could not able to load config xml rules ./config/executorservices-config.xml",
                            e);
                    //e.printStackTrace();
                }

            }
        });
        serverdigester = serverdigesterLoader.newDigester();
    } catch (Exception e1) {
        log.error("Could not create digester executorservices-config.xml", e1);
        //e1.printStackTrace();
    }
    try {
        DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() {

            protected void loadRules() {

                try {
                    loadXMLRules(new InputSource(new FileInputStream("./config/messagingclass-rules.xml")));
                } catch (FileNotFoundException e) {
                    log.error("Could not able to load config xml rules ./config/messagingclass-rules.xml", e);
                    //e.printStackTrace();
                }

            }
        });
        messagedigester = serverdigesterLoader.newDigester();
        DigesterLoader messagingdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() {

            protected void loadRules() {
                // TODO Auto-generated method stub
                try {
                    loadXMLRules(new InputSource(new FileInputStream("./config/messagingconfig-rules.xml")));
                } catch (Exception e) {
                    log.error("Could not able to load xml config file ./config/messagingclass-rules.xml", e);
                    e.printStackTrace();
                }

            }
        });
        Digester messagingdigester = messagingdigesterLoader.newDigester();
        messagingElem = (MessagingElem) messagingdigester
                .parse(new InputSource(new FileInputStream("./config/messaging.xml")));
        synchronized (messagingElem) {
            ConcurrentHashMap randomQueue = messagingElem.randomQueue;
            Set<String> randomQueueSet = randomQueue.keySet();
            Iterator<String> ite = randomQueueSet.iterator();
            while (ite.hasNext()) {
                Queue queue = (Queue) randomQueue.get(ite.next());
                ConcurrentHashMap randomqueuemap = (ConcurrentHashMap) messagingClassMap.get("RandomQueue");
                if (randomqueuemap == null) {
                    randomqueuemap = new ConcurrentHashMap();
                    messagingClassMap.put("RandomQueue", randomqueuemap);
                }
                CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) randomqueuemap
                        .get(queue.getQueuename());
                if (randomqueuelist == null)
                    randomqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList());
            }

            ConcurrentHashMap roundrobinQueue = messagingElem.roundrobinQueue;
            Set<String> roundrobinQueueSet = roundrobinQueue.keySet();
            ite = roundrobinQueueSet.iterator();
            while (ite.hasNext()) {
                Queue queue = (Queue) roundrobinQueue.get(ite.next());
                ConcurrentHashMap roundrobinqueuemap = (ConcurrentHashMap) messagingClassMap
                        .get("RoundRobinQueue");
                if (roundrobinqueuemap == null) {
                    roundrobinqueuemap = new ConcurrentHashMap();
                    messagingClassMap.put("RoundRobinQueue", roundrobinqueuemap);
                }
                CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) roundrobinqueuemap
                        .get(queue.getQueuename());
                if (randomqueuelist == null)
                    roundrobinqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList());
            }

            ConcurrentHashMap topicMap = messagingElem.topicMap;
            Set<String> topicSet = topicMap.keySet();
            Iterator<String> iter = topicSet.iterator();
            while (iter.hasNext()) {
                Topic topic = (Topic) topicMap.get(iter.next());
                ConcurrentHashMap topicmap = (ConcurrentHashMap) messagingClassMap.get("Topic");
                if (topicmap == null) {
                    topicmap = new ConcurrentHashMap();
                    messagingClassMap.put("Topic", topicmap);
                }
                CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) topicmap
                        .get(topic.getTopicname());
                if (randomqueuelist == null)
                    topicmap.put(topic.getTopicname(), new CopyOnWriteArrayList());
            }
            //log.info(messagingClassMap);
        }
    } catch (Exception e1) {
        log.error("", e1);
        //e1.printStackTrace();
    }

    try {
        DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() {

            protected void loadRules() {

                try {
                    loadXMLRules(new InputSource(new FileInputStream("./config/webxml-rules.xml")));
                } catch (FileNotFoundException e) {
                    log.error("could not able to load xml config rules ./config/webxml-rules.xml", e);
                    //e.printStackTrace();
                }

            }
        });
        webxmldigester = serverdigesterLoader.newDigester();
    } catch (Exception ex) {
        log.error("could not able to create web.xml digester", ex);
        // ex.printStackTrace();
    }
    log.info("initialized");
}

From source file:org.wso2.carbon.event.input.adaptor.soap.SoapEventAdaptorType.java

@Override
public String subscribe(InputEventAdaptorMessageConfiguration inputEventAdaptorMessageConfiguration,
        InputEventAdaptorListener inputEventAdaptorListener,
        InputEventAdaptorConfiguration inputEventAdaptorConfiguration, AxisConfiguration axisConfiguration) {

    String subscriptionId = UUID.randomUUID().toString();

    String operation = inputEventAdaptorMessageConfiguration.getInputMessageProperties()
            .get(SoapEventAdaptorConstants.ADAPTOR_MESSAGE_OPERATION_NAME);
    int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
    Map<String, ConcurrentHashMap<String, ConcurrentHashMap<String, SoapAdaptorListener>>> tenantSpecificListenerMap = inputEventAdaptorListenerMap
            .get(tenantId);/* w w  w .  j ava 2  s .c o  m*/
    if (tenantSpecificListenerMap == null) {
        tenantSpecificListenerMap = new ConcurrentHashMap<String, ConcurrentHashMap<String, ConcurrentHashMap<String, SoapAdaptorListener>>>();
        inputEventAdaptorListenerMap.put(tenantId, tenantSpecificListenerMap);

    }

    ConcurrentHashMap<String, ConcurrentHashMap<String, SoapAdaptorListener>> adaptorSpecificListeners = tenantSpecificListenerMap
            .get(inputEventAdaptorConfiguration.getName());

    if (adaptorSpecificListeners == null) {
        adaptorSpecificListeners = new ConcurrentHashMap<String, ConcurrentHashMap<String, SoapAdaptorListener>>();
        if (null != tenantSpecificListenerMap.put(inputEventAdaptorConfiguration.getName(),
                adaptorSpecificListeners)) {
            adaptorSpecificListeners = tenantSpecificListenerMap.get(inputEventAdaptorConfiguration.getName());
        }
    }

    AxisService axisService = null;

    ConcurrentHashMap<String, SoapAdaptorListener> operationSpecificListeners = adaptorSpecificListeners
            .get(operation);
    if (operationSpecificListeners == null || operationSpecificListeners.size() == 0) {
        try {
            axisService = Axis2Util.registerAxis2Service(inputEventAdaptorMessageConfiguration,
                    inputEventAdaptorConfiguration, axisConfiguration);
        } catch (AxisFault axisFault) {
            throw new InputEventAdaptorEventProcessingException(
                    "Can not create " + "the axis2 service to receive events", axisFault);
        }
        operationSpecificListeners = new ConcurrentHashMap<String, SoapAdaptorListener>();
        if (null != adaptorSpecificListeners.put(operation, operationSpecificListeners)) {
            operationSpecificListeners = adaptorSpecificListeners.get(operation);
        }
    }

    if (axisService == null) {
        String axisServiceName = inputEventAdaptorConfiguration.getName();
        try {
            axisService = axisConfiguration.getService(axisServiceName);
        } catch (AxisFault axisFault) {
            throw new InputEventAdaptorEventProcessingException(
                    "There is no service with the name ==> " + axisServiceName, axisFault);
        }

    }

    String operationNameWithoutSlash = inputEventAdaptorMessageConfiguration.getInputMessageProperties()
            .get(SoapEventAdaptorConstants.ADAPTOR_MESSAGE_OPERATION_NAME).replaceAll("/", "");
    AxisOperation axisOperation = axisService.getOperation(new QName("", operationNameWithoutSlash));
    SubscriptionMessageReceiver messageReceiver = (SubscriptionMessageReceiver) axisOperation
            .getMessageReceiver();
    messageReceiver.addEventAdaptorListener(subscriptionId, inputEventAdaptorListener);

    operationSpecificListeners.put(subscriptionId,
            new SoapAdaptorListener(subscriptionId, inputEventAdaptorListener));
    return subscriptionId;
}

From source file:org.openhab.io.caldav.internal.CalDavLoaderImpl.java

private synchronized void addEventToMap(EventContainer eventContainer, boolean createTimer) {
    CalendarRuntime calendarRuntime = EventStorage.getInstance().getEventCache()
            .get(eventContainer.getCalendarId());

    ConcurrentHashMap<String, EventContainer> eventContainerMap = calendarRuntime.getEventMap();

    if (eventContainerMap.containsKey(eventContainer.getEventId())) {
        EventContainer eventContainerOld = eventContainerMap.get(eventContainer.getEventId());
        // event is already in map
        if (eventContainer.getLastChanged().isAfter(eventContainerOld.getLastChanged())) {
            log.debug("event is already in event map and newer -> delete the old one, reschedule timer");
            // cancel old jobs
            for (String timerKey : eventContainerOld.getTimerMap()) {
                try {
                    this.scheduler.deleteJob(JobKey.jobKey(timerKey));
                } catch (SchedulerException e) {
                    log.error("cannot cancel event with job-id: " + timerKey, e);
                }/*from   w  ww  .  j  a v  a 2 s .  co m*/
            }
            eventContainerOld.getTimerMap().clear();

            // override event
            eventContainerMap.put(eventContainer.getEventId(), eventContainer);

            for (EventNotifier notifier : eventListenerList) {
                for (CalDavEvent event : eventContainerOld.getEventList()) {
                    log.trace("notify listener... {}", notifier);
                    try {
                        notifier.eventRemoved(event);
                    } catch (Exception e) {
                        log.error("error while invoking listener", e);
                    }
                }
            }
            for (EventNotifier notifier : eventListenerList) {
                for (CalDavEvent event : eventContainer.getEventList()) {
                    log.trace("notify listener... {}", notifier);
                    try {
                        notifier.eventLoaded(event);
                    } catch (Exception e) {
                        log.error("error while invoking listener", e);
                    }
                }
            }

            if (createTimer) {
                int index = 0;
                for (CalDavEvent event : eventContainer.getEventList()) {
                    if (event.getEnd().isAfterNow()) {
                        try {
                            createJob(eventContainer, event, index);
                        } catch (SchedulerException e) {
                            log.error("cannot create jobs for event: " + event.getShortName());
                        }
                    }
                    index++;
                }
            }
        } else {
            // event is already in map and not updated, ignoring
        }
    } else {
        // event is new
        eventContainerMap.put(eventContainer.getEventId(), eventContainer);
        log.trace("listeners for events: {}", eventListenerList.size());
        for (EventNotifier notifier : eventListenerList) {
            for (CalDavEvent event : eventContainer.getEventList()) {
                log.trace("notify listener... {}", notifier);
                try {
                    notifier.eventLoaded(event);
                } catch (Exception e) {
                    log.error("error while invoking listener", e);
                }
            }
        }
        if (createTimer) {
            int index = 0;
            for (CalDavEvent event : eventContainer.getEventList()) {
                if (event.getEnd().isAfterNow()) {
                    try {
                        createJob(eventContainer, event, index);
                    } catch (SchedulerException e) {
                        log.error("cannot create jobs for event: " + event.getShortName());
                    }
                }
                index++;
            }
        }
    }
}

From source file:com.web.server.WebServer.java

/**
 * This method parses the encoded url /*from  w  w w  .jav a 2s .  com*/
 * @param urlEncoded
 * @return
 */
public static ConcurrentHashMap parseUrlEncoded(String urlEncoded) {
    ConcurrentHashMap ParamValue = new ConcurrentHashMap();
    URLDecoder urlDecoder = new URLDecoder();
    StringTokenizer paramGroup = new StringTokenizer(urlDecoder.decode(urlEncoded), "&");

    while (paramGroup.hasMoreTokens()) {

        StringTokenizer token = new StringTokenizer(paramGroup.nextToken(), "=");
        String key = "";
        String value = "";
        if (token.hasMoreTokens())
            key = token.nextToken();
        if (token.hasMoreTokens())
            value = token.nextToken();
        ParamValue.put(key, value);

    }
    return ParamValue;
}