Example usage for java.util.concurrent ConcurrentHashMap get

List of usage examples for java.util.concurrent ConcurrentHashMap get

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.atricore.idbus.kernel.main.databinding.JAXBUtils.java

/**
 * Get a JAXBContext for the class// w ww  .j a v  a 2s .c  om
 *
 * Note: The contextPackage object is used by multiple threads.  It should be considered immutable
 * and not altered by this method.
 *
 * @param contextPackage  Set<Package>
 * @param contructionType (output value that indicates how the context was constructed)
 * @param forceArrays (forces the returned JAXBContext to include the array types)
 * @param cacheKey ClassLoader
 * @return JAXBContext
 * @throws javax.xml.bind.JAXBException
 */
public static JAXBContext getJAXBContext(TreeSet<String> contextPackages,
        Holder<CONSTRUCTION_TYPE> constructionType, boolean forceArrays, String key, ClassLoader cacheKey,
        Map<String, ?> properties) throws JAXBException {
    // JAXBContexts for the same class can be reused and are supposed to be thread-safe
    if (log.isDebugEnabled()) {
        log.debug("Following packages are in this batch of getJAXBContext() :");
        for (String pkg : contextPackages) {
            log.debug(pkg);
        }
    }
    if (JAXBUtilsMonitor.isMonitoring()) {
        JAXBUtilsMonitor.addPackageKey(contextPackages.toString());
    }

    // Get or Create The InnerMap using the package key
    ConcurrentHashMap<ClassLoader, JAXBContextValue> innerMap = null;
    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef = jaxbMap.get(key);

    if (softRef != null) {
        innerMap = softRef.get();
    }

    if (innerMap == null) {
        synchronized (jaxbMap) {
            softRef = jaxbMap.get(key);
            if (softRef != null) {
                innerMap = softRef.get();
            }
            if (innerMap == null) {
                innerMap = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                softRef = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(innerMap);
                jaxbMap.put(key, softRef);
            }
        }
    }

    // Now get the contextValue using either the classloader key or
    // the current Classloader
    ClassLoader cl = getContextClassLoader();
    JAXBContextValue contextValue = null;
    if (cacheKey != null) {
        if (log.isDebugEnabled()) {
            log.debug("Using supplied classloader to retrieve JAXBContext: " + cacheKey);
        }
        contextValue = innerMap.get(cacheKey);
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Using classloader from Thread to retrieve JAXBContext: " + cl);
        }
        contextValue = innerMap.get(cl);
    }

    // If the context value is found, but the caller requested that the JAXBContext
    // contain arrays, then rebuild the JAXBContext value
    if (forceArrays && contextValue != null
            && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
        if (log.isDebugEnabled()) {
            log.debug("Found a JAXBContextValue with constructionType=" + contextValue.constructionType
                    + "  but the caller requested a JAXBContext "
                    + " that includes arrays.  A new JAXBContext will be built");
        }
        contextValue = null;
    }

    if (contextPackages == null) {
        contextPackages = new TreeSet<String>();
    }
    if (contextValue == null) {
        synchronized (innerMap) {
            // Try to get the contextValue once more since sync was temporarily exited.
            ClassLoader clKey = (cacheKey != null) ? cacheKey : cl;
            contextValue = innerMap.get(clKey);
            adjustPoolSize(innerMap);
            if (forceArrays && contextValue != null
                    && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
                contextValue = null;
            }
            if (contextValue == null) {
                // Create a copy of the contextPackages.  This new TreeSet will
                // contain only the valid contextPackages.
                // Note: The original contextPackage set is accessed by multiple
                // threads and should not be altered.

                TreeSet<String> validContextPackages = new TreeSet<String>(contextPackages);

                List<String> classRefs = pruneDirectives(validContextPackages);

                int numPackages = validContextPackages.size();

                contextValue = createJAXBContextValue(validContextPackages, clKey, forceArrays, properties,
                        classRefs);

                synchronized (jaxbMap) {
                    // Add the context value with the original package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map1 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef1 = jaxbMap.get(key);
                    if (softRef1 != null) {
                        map1 = softRef1.get();
                    }
                    if (map1 == null) {
                        map1 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef1 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map1);
                        jaxbMap.put(key, softRef1);
                    }
                    map1.put(clKey, contextValue);

                    String validPackagesKey = validContextPackages.toString();

                    // Add the context value with the new package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map2 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef2 = jaxbMap
                            .get(validPackagesKey);
                    if (softRef2 != null) {
                        map2 = softRef2.get();
                    }
                    if (map2 == null) {
                        map2 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef2 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map2);
                        jaxbMap.put(validPackagesKey, softRef2);
                    }
                    map2.put(clKey, contextValue);

                    if (log.isDebugEnabled()) {
                        log.debug("JAXBContext [created] for " + key);
                        log.debug("JAXBContext also stored by the list of valid packages:" + validPackagesKey);
                    }
                }
            }
        }
    } else {
        if (log.isDebugEnabled()) {
            log.debug("JAXBContext [from pool] for " + key);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("JAXBContext constructionType= " + contextValue.constructionType);
        log.debug("JAXBContextValue = " + JavaUtils.getObjectIdentity(contextValue));
        log.debug("JAXBContext = " + JavaUtils.getObjectIdentity(contextValue.jaxbContext));
    }
    constructionType.value = contextValue.constructionType;
    return contextValue.jaxbContext;
}

From source file:org.apache.axis2.jaxws.message.databinding.JAXBUtils.java

/**
 * Get a JAXBContext for the class//from  w  w  w  .j  a  v  a 2 s .  c o m
 *
 * Note: The contextPackage object is used by multiple threads.  It should be considered immutable
 * and not altered by this method.
 * 
 * @param contextPackage  Set<Package> 
 * @param contructionType (output value that indicates how the context was constructed)
 * @param forceArrays (forces the returned JAXBContext to include the array types)
 * @param cacheKey ClassLoader
 * @return JAXBContext
 * @throws JAXBException
 */
public static JAXBContext getJAXBContext(TreeSet<String> contextPackages,
        Holder<CONSTRUCTION_TYPE> constructionType, boolean forceArrays, String key, ClassLoader cacheKey,
        Map<String, ?> properties) throws JAXBException {
    // JAXBContexts for the same class can be reused and are supposed to be thread-safe
    if (log.isDebugEnabled()) {
        log.debug("Following packages are in this batch of getJAXBContext() :");
        for (String pkg : contextPackages) {
            log.debug(pkg);
        }
    }
    if (JAXBUtilsMonitor.isMonitoring()) {
        JAXBUtilsMonitor.addPackageKey(contextPackages.toString());
    }

    // Get or Create The InnerMap using the package key
    ConcurrentHashMap<ClassLoader, JAXBContextValue> innerMap = null;
    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef = jaxbMap.get(key);

    if (softRef != null) {
        innerMap = softRef.get();
    }

    if (innerMap == null) {
        synchronized (jaxbMap) {
            softRef = jaxbMap.get(key);
            if (softRef != null) {
                innerMap = softRef.get();
            }
            if (innerMap == null) {
                innerMap = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                softRef = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(innerMap);
                jaxbMap.put(key, softRef);
            }
        }
    }

    // Now get the contextValue using either the classloader key or 
    // the current Classloader
    ClassLoader cl = getContextClassLoader();
    JAXBContextValue contextValue = null;
    if (cacheKey != null) {
        if (log.isDebugEnabled()) {
            log.debug("Using supplied classloader to retrieve JAXBContext: " + cacheKey);
        }
        contextValue = innerMap.get(cacheKey);
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Using classloader from Thread to retrieve JAXBContext: " + cl);
        }
        contextValue = innerMap.get(cl);
    }

    // If the context value is found, but the caller requested that the JAXBContext
    // contain arrays, then rebuild the JAXBContext value
    if (forceArrays && contextValue != null
            && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
        if (log.isDebugEnabled()) {
            log.debug("Found a JAXBContextValue with constructionType=" + contextValue.constructionType
                    + "  but the caller requested a JAXBContext "
                    + " that includes arrays.  A new JAXBContext will be built");
        }
        contextValue = null;
    }

    if (contextPackages == null) {
        contextPackages = new TreeSet<String>();
    }
    if (contextValue == null) {
        synchronized (innerMap) {
            // Try to get the contextValue once more since sync was temporarily exited.
            ClassLoader clKey = (cacheKey != null) ? cacheKey : cl;
            contextValue = innerMap.get(clKey);
            adjustPoolSize(innerMap);
            if (forceArrays && contextValue != null
                    && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
                contextValue = null;
            }
            if (contextValue == null) {
                // Create a copy of the contextPackages.  This new TreeSet will
                // contain only the valid contextPackages.
                // Note: The original contextPackage set is accessed by multiple 
                // threads and should not be altered.

                TreeSet<String> validContextPackages = new TreeSet<String>(contextPackages);

                List<String> classRefs = pruneDirectives(validContextPackages);

                int numPackages = validContextPackages.size();

                contextValue = createJAXBContextValue(validContextPackages, clKey, forceArrays, properties,
                        classRefs);

                synchronized (jaxbMap) {
                    // Add the context value with the original package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map1 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef1 = jaxbMap.get(key);
                    if (softRef1 != null) {
                        map1 = softRef1.get();
                    }
                    if (map1 == null) {
                        map1 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef1 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map1);
                        jaxbMap.put(key, softRef1);
                    }
                    map1.put(clKey, contextValue);

                    String validPackagesKey = validContextPackages.toString();

                    // Add the context value with the new package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map2 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef2 = jaxbMap
                            .get(validPackagesKey);
                    if (softRef2 != null) {
                        map2 = softRef2.get();
                    }
                    if (map2 == null) {
                        map2 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef2 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map2);
                        jaxbMap.put(validPackagesKey, softRef2);
                    }
                    map2.put(clKey, contextValue);

                    if (log.isDebugEnabled()) {
                        log.debug("JAXBContext [created] for " + key);
                        log.debug("JAXBContext also stored by the list of valid packages:" + validPackagesKey);
                    }
                }
            }
        }
    } else {
        if (log.isDebugEnabled()) {
            log.debug("JAXBContext [from pool] for " + key);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("JAXBContext constructionType= " + contextValue.constructionType);
        log.debug("JAXBContextValue = " + JavaUtils.getObjectIdentity(contextValue));
        log.debug("JAXBContext = " + JavaUtils.getObjectIdentity(contextValue.jaxbContext));
    }
    constructionType.value = contextValue.constructionType;
    return contextValue.jaxbContext;
}

From source file:be.solidx.hot.test.TestScriptExecutors.java

@SuppressWarnings("rawtypes")
private Collection<Long> multiThreadedTest(final Script script, final int max,
        final ScriptExecutor scriptExecutor) throws InterruptedException {
    final int iterations = 100;
    ExecutorService executor = Executors.newFixedThreadPool(8);
    final ConcurrentHashMap<String, Long> results = new ConcurrentHashMap<String, Long>();
    final ConcurrentHashMap<String, Long> avgs = new ConcurrentHashMap<String, Long>();
    long benchStart = System.currentTimeMillis();
    for (int i = 0; i < iterations; i++) {
        Runnable runnable = new Runnable() {
            @SuppressWarnings("unchecked")
            @Override/* www  .j  av  a2  s  .  c  o  m*/
            public void run() {
                try {
                    long res = 0;
                    Map<String, Object> parameters = new HashMap<String, Object>();
                    parameters.put("i", new Integer(max));
                    parameters.put("n", new Integer(0));

                    //long starting = System.currentTimeMillis();
                    Object object = scriptExecutor.execute(script, parameters);
                    if (object instanceof Bindings) {
                        Bindings bindings = (Bindings) object;
                        res = (Integer) bindings.get("result");
                        bindings.clear();
                    } else if (object instanceof Double) {
                        res = Math.round((Double) object);
                    } else if (object instanceof Long) {
                        res = (long) object;
                    } else
                        res = new Long((Integer) object);
                    long end = System.currentTimeMillis() - avgs.get(this.toString());
                    results.put(UUID.randomUUID().getLeastSignificantBits() + "", res);
                    avgs.put(this.toString(), end);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };
        avgs.put(runnable.toString(), System.currentTimeMillis());
        executor.submit(runnable);
    }

    while (results.size() < iterations) {
        Thread.sleep(50);
    }
    //Thread.sleep(20000);
    double sum = 0;
    for (Long value : avgs.values()) {
        sum += value;
    }
    System.out.println((sum / (double) iterations) + "");
    System.out.println("==== Time needed for all requests: " + (System.currentTimeMillis() - benchStart));
    results.remove("avg");
    executor = null;
    return results.values();
}

From source file:org.wso2.carbon.event.output.adaptor.cassandraext.CassandraExtendedEventAdaptorType.java

/**
 * @param outputEventMessageConfiguration
 *                 - topic name to publish messages
 * @param message  - is and Object[]{Event, EventDefinition}
 * @param outputEventAdaptorConfiguration
 *
 * @param tenantId// w ww . j  ava 2 s. c  o  m
 */
public void publish(OutputEventAdaptorMessageConfiguration outputEventMessageConfiguration, Object message,
        OutputEventAdaptorConfiguration outputEventAdaptorConfiguration, int tenantId) {
    ConcurrentHashMap<OutputEventAdaptorConfiguration, EventAdaptorInfo> cassandraClusterCache = null;
    if (message instanceof Map) {
        try {

            cassandraClusterCache = tenantedCassandraClusterCache.get(tenantId);
            if (null == cassandraClusterCache) {
                cassandraClusterCache = new ConcurrentHashMap<OutputEventAdaptorConfiguration, EventAdaptorInfo>();
                if (null != tenantedCassandraClusterCache.putIfAbsent(tenantId, cassandraClusterCache)) {
                    cassandraClusterCache = tenantedCassandraClusterCache.get(tenantId);
                }
            }

            String keySpaceName = outputEventMessageConfiguration.getOutputMessageProperties()
                    .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_KEY_SPACE_NAME);
            String columnFamilyName = outputEventMessageConfiguration.getOutputMessageProperties()
                    .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_COLUMN_FAMILY_NAME);

            EventAdaptorInfo eventAdaptorInfo = cassandraClusterCache.get(outputEventAdaptorConfiguration);
            if (null == eventAdaptorInfo) {
                Map<String, String> properties = outputEventAdaptorConfiguration.getOutputProperties();

                String username = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_USER_NAME);
                String password = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_PASSWORD);
                String cassandraHost = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_HOSTNAME);
                String cassandraPort = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_PORT);
                String clusterName = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_CLUSTER_NAME);

                Cluster.Builder clusterBuilder = Cluster.builder().addContactPoint(cassandraHost);
                if (cassandraPort != null && cassandraPort.length() > 0) {
                    clusterBuilder.withPort(Integer.parseInt(cassandraPort));
                }
                clusterBuilder.withClusterName(clusterName);
                if (username != null && username.length() > 0) {
                    clusterBuilder.withCredentials(username, password);
                }

                Cluster cluster = clusterBuilder.build();

                String indexAllColumnsString = properties
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_INDEX_ALL_COLUMNS);
                boolean indexAllColumns = false;
                if (indexAllColumnsString != null && indexAllColumnsString.equals("true")) {
                    indexAllColumns = true;
                }
                eventAdaptorInfo = new EventAdaptorInfo(cluster, indexAllColumns);
                if (null != cassandraClusterCache.putIfAbsent(outputEventAdaptorConfiguration,
                        eventAdaptorInfo)) {
                    eventAdaptorInfo = cassandraClusterCache.get(outputEventAdaptorConfiguration);
                } else {
                    log.info("Initiated Cassandra Writer " + outputEventAdaptorConfiguration.getName());
                }
            }

            MessageInfo messageInfo = eventAdaptorInfo.getMessageInfoMap().get(outputEventMessageConfiguration);

            String executionMode = outputEventMessageConfiguration.getOutputMessageProperties()
                    .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_EXECUTION_MODE);
            String updateColKeys = outputEventMessageConfiguration.getOutputMessageProperties()
                    .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_INDEX_KEYS);

            if (null == messageInfo) {

                messageInfo = new MessageInfo();
                // this is eternal and thread-safe.
                Session session = eventAdaptorInfo.getCluster().connect(keySpaceName);

                messageInfo.setSession(session);
                messageInfo.setInsertOrUpdate(executionMode.equalsIgnoreCase(resourceBundle.getString(
                        CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_EXECUTION_MODE_UPDATE)));
                if (messageInfo.isInsertOrUpdate()) {
                    ArrayList<String> keyList = new ArrayList<String>();
                    String[] keys = updateColKeys == null ? new String[0] : updateColKeys.trim().split(",");
                    for (String key : keys) {
                        keyList.add(key.trim());
                    }
                    messageInfo.setKeyColumns(keyList);
                }
                messageInfo.setPrimaryKey(outputEventMessageConfiguration.getOutputMessageProperties()
                        .get(CassandraExtendedEventAdaptorConstants.ADAPTOR_CASSANDRA_PRIMARY_KEY));
                if (null != eventAdaptorInfo.getMessageInfoMap().putIfAbsent(outputEventMessageConfiguration,
                        messageInfo)) {
                    messageInfo = eventAdaptorInfo.getMessageInfoMap().get(outputEventMessageConfiguration);
                }
            }

            // customized code
            Session session = messageInfo.getSession();
            Map<String, Object> attributeMap = (Map<String, Object>) message;

            // optional
            String primaryKey = messageInfo.getPrimaryKey();
            if (primaryKey != null && primaryKey.trim().length() == 0) {
                // not configured properly.
                primaryKey = null;
            }
            // optional
            ArrayList<String> indexCols = messageInfo.getKeyColumns();

            // create table and indexes if not exist.
            if (!messageInfo.isCfInitialized()) {
                try {
                    session.execute("select * from " + columnFamilyName + " limit 1");
                    messageInfo.setCfInitialized(true);
                } catch (Exception ex) {
                    // assuming table doesn't exist.

                    StringBuilder creationQuery = new StringBuilder("create table " + columnFamilyName + " (");
                    if (primaryKey == null || primaryKey.length() == 0) {
                        creationQuery.append("uuid_key text primary key, ");
                    }
                    for (String col : attributeMap.keySet()) {
                        creationQuery.append(col).append(" ").append("text");
                        if (col.equals(primaryKey)) {
                            creationQuery.append(" primary key");
                        }
                        creationQuery.append(",");
                    }
                    String query = creationQuery.substring(0, creationQuery.length() - 1) + ")";
                    session.execute(query);

                    // creating indexes
                    if (indexCols != null) {
                        for (String index : indexCols) {
                            if (!index.equals(primaryKey)) {
                                String indexQuery = "create index ind_" + columnFamilyName + "_" + index
                                        + " on " + columnFamilyName + " (" + index + ")";
                                session.execute(indexQuery);
                            }
                        }
                    }
                    messageInfo.setCfInitialized(true);
                }
            }
            // end of table creation

            // inserting and updating.
            if (messageInfo.isInsertOrUpdate()) {
                // checking whether the key cols values exist
                StringBuilder queryBuilder = new StringBuilder("update ");
                queryBuilder.append(columnFamilyName);
                queryBuilder.append(" set ");

                boolean addComma = false;
                for (Map.Entry<String, Object> entry : attributeMap.entrySet()) {
                    if (!entry.getKey().equals(primaryKey)) {
                        if (addComma) {
                            queryBuilder.append(",");
                        }
                        queryBuilder.append(entry.getKey());
                        queryBuilder.append(" = '");
                        queryBuilder.append(entry.getValue());
                        queryBuilder.append("'");
                        addComma = true;
                    }
                }

                queryBuilder.append(" where ");
                queryBuilder.append(primaryKey);
                queryBuilder.append(" = '");
                queryBuilder.append(attributeMap.get(primaryKey));
                queryBuilder.append("'");

                session.execute(queryBuilder.toString());
            } else {
                // inserting with uuid to allow duplicates
                // if user enters a primary key here, it will be similar to the update clause.
                StringBuilder queryBuilder = new StringBuilder("insert into ");
                queryBuilder.append(columnFamilyName);
                queryBuilder.append("  (");
                boolean addComma = false;
                if (primaryKey == null) {
                    queryBuilder.append("uuid_key, ");
                }
                for (Map.Entry<String, Object> entry : attributeMap.entrySet()) {
                    if (addComma) {
                        queryBuilder.append(", ");
                    }
                    queryBuilder.append(entry.getKey());
                    addComma = true;
                }

                queryBuilder.append(") values (");
                if (primaryKey == null) {
                    queryBuilder.append("'").append(UUID.randomUUID()).append("'");
                    queryBuilder.append(",");
                }
                addComma = false;
                for (Map.Entry<String, Object> entry : attributeMap.entrySet()) {
                    if (addComma) {
                        queryBuilder.append(",");
                    }
                    queryBuilder.append("'").append(entry.getValue()).append("'");
                    addComma = true;
                }
                queryBuilder.append(")");
                session.execute(queryBuilder.toString());

            }
            // end of customized code

        } catch (Throwable t) {
            if (cassandraClusterCache != null) {
                cassandraClusterCache.remove(outputEventAdaptorConfiguration);
            }
            log.error("Cannot connect to Cassandra: " + t.getMessage(), t);
        }
    }
}

From source file:hu.sztaki.lpds.pgportal.portlets.workflow.EasyWorkflowPortlet.java

private void doAbort(String userID, String workflowID, String runtimeID) {
    try {//ww w  . j a  va 2s.com
        if (PortalCacheService.getInstance().getUser(userID) != null) {
            if (PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID) != null) {
                if (PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                        .getRuntime(runtimeID) != null) {
                    int wfStatus = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                            .getRuntime(runtimeID).getStatus();
                    if ((5 == wfStatus) || (23 == wfStatus) || (2 == wfStatus)) {
                        // PortalCacheService.getInstance().getUser(userID).getWorkflow(request.getParameter("workflow")).getRuntime(request.getParameter("rtid")).setStatus("22", 0);
                        // set suspending workflow status ...
                        PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                                .getRuntime(runtimeID).setStatus("28", 0);
                        ConcurrentHashMap tmp = PortalCacheService.getInstance().getUser(userID)
                                .getWorkflow(workflowID).getRuntime(runtimeID).getJobsStatus();
                        Enumeration enm0 = tmp.keys();
                        while (enm0.hasMoreElements()) {
                            Object key0 = enm0.nextElement();
                            Enumeration enm1 = ((ConcurrentHashMap) tmp.get(key0)).keys();
                            while (enm1.hasMoreElements()) {
                                Object key1 = enm1.nextElement();
                                // System.out.println("--"+key0+"/"+key1+"="+((JobStatusData)((Hashtable)tmp.get(key0)).get(key1)).getStatus());
                                int ts = ((JobStatusData) ((ConcurrentHashMap) tmp.get(key0)).get(key1))
                                        .getStatus();
                                if (!(ts == 6) || ts == 7 || ts == 21 || ts == 1) {
                                    PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                                            .getRuntime(runtimeID)
                                            .addJobbStatus((String) key0, (String) key1, "22", "", -1);
                                }
                            }
                        }
                        new WorkflowAbortThread(userID, workflowID, runtimeID);
                    }
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:org.wso2.andes.kernel.OnflightMessageTracker.java

/**
 * Track that this message is buffered. Return true if eligible to buffer
 *
 * @param slot                 slot message being read in
 * @param andesMessageMetadata metadata to buffer
 * @return eligibility to buffer//www .j  ava2 s  .  c om
 */
public boolean addMessageToBufferingTracker(Slot slot, AndesMessageMetadata andesMessageMetadata) {
    long messageID = andesMessageMetadata.getMessageID();
    boolean isOKToBuffer;
    if (log.isDebugEnabled()) {
        log.debug("Buffering message id = " + messageID + " slot = " + slot.toString());
    }
    String slotID = slot.getId();
    ConcurrentHashMap<Long, MsgData> messagesOfSlot = messageBufferingTracker.get(slotID);
    if (messagesOfSlot == null) {
        messagesOfSlot = new ConcurrentHashMap<Long, MsgData>();
        messageBufferingTracker.put(slotID, messagesOfSlot);
        // track destination to slot
        // use this map to remove messageBufferingTracker when subscriber close before receive all messages in slot
        Set<Slot> subscriptionSlots = subscriptionSlotTracker.get(slot.getDestinationOfMessagesInSlot());
        if (subscriptionSlots == null) {
            Set<Slot> newTrackedSlots = new HashSet<Slot>();
            newTrackedSlots.add(slot);
            subscriptionSlotTracker.put(slot.getDestinationOfMessagesInSlot(), newTrackedSlots);
        } else {
            subscriptionSlots.add(slot);
            subscriptionSlotTracker.put(slot.getDestinationOfMessagesInSlot(), subscriptionSlots);
        }
    }
    MsgData trackingData = messagesOfSlot.get(messageID);
    if (trackingData == null) {
        trackingData = new MsgData(messageID, slot, slot.getDestinationOfMessagesInSlot(),
                System.currentTimeMillis(), andesMessageMetadata.getExpirationTime(), MessageStatus.BUFFERED,
                andesMessageMetadata.getArrivalTime());
        msgId2MsgData.put(messageID, trackingData);
        messagesOfSlot.put(messageID, trackingData);
        isOKToBuffer = true;
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Buffering rejected message id = " + messageID);
        }
        isOKToBuffer = false;
    }
    return isOKToBuffer;
}

From source file:hu.sztaki.lpds.pgportal.portlets.workflow.EasyWorkflowPortlet.java

/**
 * Rescue a given workflow einstance//  w  w  w . ja v  a2s . com
 */
public void doRescue(ActionRequest request, ActionResponse response) throws PortletException {
    if (UserQuotaUtils.getInstance().userQuotaIsFull(request.getRemoteUser())) {
        request.setAttribute("msg", "portal.RealWorkflowPortlet.quotaisoverfull");
    } else {
        if (WorkflowInfo(request, response)) {
            doList(request, response);
            try {
                Vector errorJobPidList = new Vector();
                String portalID = PropertyLoader.getInstance().getProperty("service.url");
                String userID = request.getRemoteUser();
                String workflowID = request.getParameter("workflow");
                System.out.println(
                        "doRescue portalID:" + portalID + " userID:" + userID + " workflowID:" + workflowID);
                WorkflowData wData = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID);
                String runtimeID = "" + wData.getEinstanceID();
                System.out.println("doRescue portalID:" + portalID + " userID:" + userID + " workflowID:"
                        + workflowID + " runtimeID:" + runtimeID);
                //
                // 23 = running/error
                int wfStatus = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                        .getRuntime(runtimeID).getStatus();
                if (23 == wfStatus) {
                    // running workflow statusz beirasa...
                    PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                            .getRuntime(runtimeID).setStatus("5", 0);
                } else {
                    // resuming workflow statusz beirasa...
                    PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                            .getRuntime(runtimeID).setStatus("29", 0);
                }
                //
                ConcurrentHashMap tmp = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                        .getRuntime(runtimeID).getJobsStatus();
                Enumeration enm0 = tmp.keys();
                int ts;
                while (enm0.hasMoreElements()) {
                    Object key0 = enm0.nextElement();
                    Enumeration enm1 = ((ConcurrentHashMap) tmp.get(key0)).keys();
                    while (enm1.hasMoreElements()) {
                        Object key1 = enm1.nextElement();
                        ts = ((JobStatusData) ((ConcurrentHashMap) tmp.get(key0)).get(key1)).getStatus();
                        if (ts == 25 || ts == 21 || ts == 22 || ts == 7 || ts == 15 || ts == 13 || ts == 12) {
                            // beirja az init statuszt...
                            // PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID).getRuntime(runtimeID).addJobbStatus((String) key0, (String) key1, "1", "", 0);
                            // kitorli a jobot a nyilvantartasbol...
                            PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowID)
                                    .getRuntime(runtimeID).removeJobStatus((String) key0, (String) key1);
                            // storage takaritashoz ki kell gyujteni a jobID/jobPID-eket
                            ComDataBean comDataBean = new ComDataBean();
                            comDataBean.setJobID((String) key0);
                            comDataBean.setJobPID((String) key1);
                            errorJobPidList.addElement(comDataBean);
                        }
                    }
                }
                new WorkflowRescueThread(portalID, userID, workflowID, runtimeID, "" + wfStatus,
                        errorJobPidList);
                request.setAttribute("msg", "portal.EasyWorkflowPortlet.doRescue");
            } catch (Exception e) {
                request.setAttribute("msg", "portal.EasyWorkflowPortlet.doRescue.error");
                e.printStackTrace();
            }
        }
    }
}

From source file:com.chinamobile.bcbsp.comm.MessageQueuesForDisk.java

/**
 * Load bucket from disk.// w  ww  .j  a v  a 2  s . co  m
 * @param queuesBuckets
 * @param bucketIndex
 * @param queuePath
 * @throws IOException
 */
private void loadBucket(ArrayList<BucketMeta> queuesBuckets, int bucketIndex, String queuePath)
        throws IOException {
    LOG.info("[MessageQueuesForDisk] is loading the [" + queuePath + " Bucket-" + bucketIndex + "] <<< size = "
            + queuesBuckets.get(bucketIndex).count + ".");
    long start = System.currentTimeMillis();
    /** Clock */
    File messagesDataFileBucket;
    FileReader frMessagesData;
    BufferedReader brMessagesData;
    messagesDataFileBucket = new File(this.messagesDataFile + "/" + queuePath + "/" + "bucket-" + bucketIndex);
    if (!messagesDataFileBucket.exists()) {
        throw new IOException("Bucket file" + messagesDataFileBucket + " does not exit!");
    }
    // Open file readers.
    frMessagesData = new FileReader(messagesDataFileBucket);
    brMessagesData = new BufferedReader(frMessagesData);
    // Read the file header.
    @SuppressWarnings("unused")
    String bucketHeader = brMessagesData.readLine();
    ConcurrentHashMap<String, ConcurrentLinkedQueue<IMessage>> queueMap = queuesBuckets
            .get(bucketIndex).queueMap;
    if (queueMap == null) {
        queueMap = new ConcurrentHashMap<String, ConcurrentLinkedQueue<IMessage>>();
    }
    String buffer;
    while ((buffer = brMessagesData.readLine()) != null) {
        String[] queueBuffer = buffer.split(Constants.KV_SPLIT_FLAG);
        if (queueBuffer[0] == "") {
            LOG.warn("[MessageQueuesForDisk] readLine = " + buffer);
        }
        String key = queueBuffer[0];
        ConcurrentLinkedQueue<IMessage> queue = queueMap.get(key);
        if (queue == null) {
            queue = stringToQueue(queueBuffer[1]);
            this.sizeOfHashMapsInMem = this.sizeOfHashMapsInMem
                    + (sizeOfRef + sizeOfInteger + sizeOfEmptyMessageQueue);
        } else {
            queue.addAll(stringToQueue(queueBuffer[1]));
        }
        queueMap.put(key, queue);
    }
    queuesBuckets.get(bucketIndex).queueMap = queueMap;
    brMessagesData.close();
    frMessagesData.close();
    // Update the meta data of the bucket.
    BucketMeta meta = queuesBuckets.get(bucketIndex);
    // Update the size of messages data in memory.
    this.sizeOfMessagesDataInMem = this.sizeOfMessagesDataInMem + (meta.length - meta.lengthInMemory);
    this.countOfMessagesDataInMem = this.countOfMessagesDataInMem + (meta.count - meta.countInMemory);
    meta.onDiskFlag = false;
    meta.lengthInMemory = meta.length;
    meta.countInMemory = meta.count;
    queuesBuckets.set(bucketIndex, meta);
    if (!messagesDataFileBucket.delete()) {
        throw new IOException("Bucket file delete failed!");
    }
    this.readDiskTime = this.readDiskTime + (System.currentTimeMillis() - start);
    /** Clock */
}

From source file:hu.sztaki.lpds.pgportal.portlets.workflow.EasyWorkflowPortlet.java

/**
 * Workflow submit//from w  w w.  j  a v  a  2 s .  c  o  m
 */
public void doSubmit(ActionRequest request, ActionResponse response) throws PortletException {
    if (UserQuotaUtils.getInstance().userQuotaIsFull(request.getRemoteUser())) {
        request.setAttribute("msg", "portal.RealWorkflowPortlet.quotaisoverfull");
    } else {
        if (WorkflowInfo(request, response)) {
            doList(request, response);

            ConcurrentHashMap runTimesData = PortalCacheService.getInstance().getUser(request.getRemoteUser())
                    .getWorkflow(request.getParameter("workflow")).getAllRuntimeInstance();
            Enumeration keys = runTimesData.keys();
            int ts;
            while (keys.hasMoreElements()) {//abort all einstance
                String key = (String) keys.nextElement();//rtid
                if (((WorkflowRunTime) runTimesData.get(key)).getText().equals("einstance")) {
                    // ((WorkflowRunTime) runTimesData.get(key)).;
                    if ((((WorkflowRunTime) runTimesData.get(key)).getStatus() == 5)
                            || (((WorkflowRunTime) runTimesData.get(key)).getStatus() == 2)) {
                        ((WorkflowRunTime) runTimesData.get(key)).setStatus("22", 0);

                        ConcurrentHashMap tmp = PortalCacheService.getInstance()
                                .getUser(request.getRemoteUser()).getWorkflow(request.getParameter("workflow"))
                                .getRuntime(key).getJobsStatus();
                        Enumeration enm0 = tmp.keys();
                        while (enm0.hasMoreElements()) {
                            Object key0 = enm0.nextElement();
                            Enumeration enm1 = ((Hashtable) tmp.get(key0)).keys();
                            while (enm1.hasMoreElements()) {
                                Object key1 = enm1.nextElement();
                                ts = ((JobStatusData) ((Hashtable) tmp.get(key0)).get(key1)).getStatus();
                                if (!(ts == 6) || (ts == 7) || (ts == 21) || (ts == 1)) {
                                    PortalCacheService.getInstance().getUser(request.getRemoteUser())
                                            .getWorkflow(request.getParameter("workflow")).getRuntime(key)
                                            .addJobbStatus((String) key0, (String) key1, "22", "", 0);
                                }
                            }
                        }
                        System.out.println("abort EINSTANCE " + key);
                        new WorkflowAbortThread(request.getRemoteUser(), request.getParameter("workflow"), key);
                    }
                    System.out.println("delete EINSTANCE " + key);
                    RealWorkflowUtils.getInstance().deleteWorkflowInstance(request.getRemoteUser(),
                            request.getParameter("workflow"), key);
                }
            }
            int max = Integer.parseInt(PropertyLoader.getInstance().getProperty("repeat.submit.workflow"));
            for (int i = 0; i < max; i++) {
                new WorkflowSubmitThread(
                        PortalCacheService.getInstance().getUser(request.getRemoteUser())
                                .getWorkflow(request.getParameter("workflow")),
                        request.getRemoteUser(), einstance, request.getParameter("wfchg_type"));
            }
            request.setAttribute("msg", "portal.EasyWorkflowPortlet.doSubmit");
        }
    }
}

From source file:com.pearson.eidetic.driver.threads.RefreshAwsAccountVolumes.java

private ConcurrentHashMap<Region, ArrayList<Volume>> refreshCopyVolumeSnapshots(Volume volume,
        JSONObject eideticParameters, ConcurrentHashMap<Region, ArrayList<Volume>> localCopyVolumeSnapshots,
        Region region) {//from   w w  w.j a v  a2  s. c o  m
    if (volume == null || eideticParameters == null || region == null) {
        return localCopyVolumeSnapshots;
    }
    if (!eideticParameters.containsKey("CopySnapshot")) {
        //and it previously did
        if (volCopyHasTag_.containsKey(volume)) {
            //We leave volCopyHasTag_ at false
            return localCopyVolumeSnapshots;
            //else it previously did not, and we do not worry
        } else {
            //we continue along to the next volume.
            return localCopyVolumeSnapshots;
        }
        //It does have CopySnapshot
    } else {
        //Did it previously?
        if (volCopyHasTag_.containsKey(volume)) {
            //Yeah it does, set to true
            try {
                volCopyHasTag_.replace(volume, true);
            } catch (Exception e) {
                logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=\"Error\", Error=\"error adding vol to CopyVolumeSnapshots\", Volume_id=\""
                        + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                        + StackTrace.getStringFromStackTrace(e) + "\"");
            }
        } else {
            //It did not, we add to localCopyVolumeSnapshots
            try {
                localCopyVolumeSnapshots.get(region).add(volume);
            } catch (Exception e) {
                logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=\"Error\", Error=\"error adding vol to CopyVolumeSnapshots\", Volume_id=\""
                        + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                        + StackTrace.getStringFromStackTrace(e) + "\"");
            }

            try {
                volCopyHasTag_.put(volume, true);
            } catch (Exception e) {
                logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=\"Error\", Error=\"error adding vol to CopyVolumeSnapshots\", Volume_id=\""
                        + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                        + StackTrace.getStringFromStackTrace(e) + "\"");
            }
        }
    }
    return localCopyVolumeSnapshots;
}