Example usage for java.util.concurrent ConcurrentHashMap get

List of usage examples for java.util.concurrent ConcurrentHashMap get

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.taobao.diamond.server.service.GroupService.java

/**
 * /*from w ww.j  av  a2 s  . c o m*/
 */
// @PostConstruct
public void loadGroupInfo() {
    List<GroupInfo> allGroupInfo = persistService.findAllGroupInfo();
    ConcurrentHashMap<String, ConcurrentHashMap<String, GroupInfo>> tempMap = new ConcurrentHashMap<String, ConcurrentHashMap<String, GroupInfo>>();
    log.warn("...");
    if (allGroupInfo != null) {
        for (GroupInfo info : allGroupInfo) {
            String address = info.getAddress();
            String dataId = info.getDataId();
            if (tempMap.get(address) == null) {
                tempMap.put(address, new ConcurrentHashMap<String, GroupInfo>());
            }
            tempMap.get(address).put(dataId, info);
        }
    }
    this.addressGroupCache = tempMap;
    log.warn("..." + (allGroupInfo != null ? allGroupInfo.size() : 0));
}

From source file:com.yahoo.elide.core.EntityDictionary.java

/**
 * Get a type for a field on an entity.//  w ww .j  a v a  2 s . c  om
 *
 * @param entityClass Entity class
 * @param identifier Field to lookup type
 * @return Type of entity
 */
public Class<?> getType(Class<?> entityClass, String identifier) {
    ConcurrentHashMap<String, Class<?>> fieldTypes = entityBinding(entityClass).fieldsToTypes;
    return fieldTypes == null ? null : fieldTypes.get(identifier);
}

From source file:com.pearson.eidetic.driver.threads.RefreshAwsAccountVolumes.java

private ConcurrentHashMap<Region, ArrayList<Volume>> processLocalVolumeTime(
        ConcurrentHashMap<Region, ArrayList<Volume>> localVolumeTime, Region region) {
    for (Map.Entry pair : volTimeHasTag_.entrySet()) {
        Boolean contin = (Boolean) pair.getValue();
        if (contin) {
            continue;
        }//w ww.j  av a2  s . c om
        Volume vol = (Volume) pair.getKey();
        try {
            localVolumeTime.get(region).remove(vol);
        } catch (Exception e) {
            logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=\"Error\", Error=\"error removing vol from VolumeTime_\", Volume_id=\""
                    + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
        }
    }
    volTimeHasTag_.clear();
    return localVolumeTime;
}

From source file:com.pearson.eidetic.driver.threads.RefreshAwsAccountVolumes.java

private ConcurrentHashMap<Region, ArrayList<Volume>> processLocalVolumeNoTime(
        ConcurrentHashMap<Region, ArrayList<Volume>> localVolumeNoTime, Region region) {
    for (Map.Entry pair : volNoTimeHasTag_.entrySet()) {
        Boolean contin = (Boolean) pair.getValue();
        if (contin) {
            continue;
        }//  w  w  w .ja  v  a  2  s .  co  m
        Volume vol = (Volume) pair.getKey();
        try {
            localVolumeNoTime.get(region).remove(vol);
        } catch (Exception e) {
            logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=\"Error\", Error=\"error removing vol from VolumeNoTime_\", Volume_id=\""
                    + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
        }
    }
    volNoTimeHasTag_.clear();
    return localVolumeNoTime;
}

From source file:com.pearson.eidetic.driver.threads.RefreshAwsAccountVolumes.java

private ConcurrentHashMap<Region, ArrayList<Volume>> processLocalCopyVolumeSnapshots(
        ConcurrentHashMap<Region, ArrayList<Volume>> localCopyVolumeSnapshots, Region region) {
    for (Map.Entry pair : volCopyHasTag_.entrySet()) {
        Boolean contin = (Boolean) pair.getValue();
        if (contin) {
            continue;
        }/*from   w  ww. j  a  v  a2s .c  om*/
        Volume vol = (Volume) pair.getKey();
        try {
            localCopyVolumeSnapshots.get(region).remove(vol);
        } catch (Exception e) {
            logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=\"Error\", Error=\"error removing vol from CopyVolumeSnapshots_\", Volume_id=\""
                    + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
        }
    }
    volCopyHasTag_.clear();
    return localCopyVolumeSnapshots;
}

From source file:org.wso2.carbon.event.output.adaptor.hbase.HbaseEventAdaptorType.java

/**
 * @param outputEventMessageConfiguration
 *                - topic name to publish messages
 * @param message - is and Object[]{Event, EventDefinition}
 *///from  w w  w.ja v  a  2 s  .  c o m
public void publish(OutputEventAdaptorMessageConfiguration outputEventMessageConfiguration, Object message,
        OutputEventAdaptorConfiguration outputEventAdaptorConfiguration, int tenantId) {

    if (message instanceof Map) {

        Map<String, Object> messageObject = (Map<String, Object>) message;
        ConcurrentHashMap<String, Configuration> hbaseConfigurationCache = hbaseConfigurationMap.get(tenantId);
        if (null == hbaseConfigurationCache) {
            hbaseConfigurationCache = new ConcurrentHashMap<String, Configuration>();
            if (null != hbaseConfigurationMap.putIfAbsent(tenantId, hbaseConfigurationCache)) {
                hbaseConfigurationCache = hbaseConfigurationMap.get(tenantId);
            }
        }

        Configuration hBaseConfiguration = hbaseConfigurationCache
                .get(outputEventAdaptorConfiguration.getName());
        //            if (null == hBaseConfiguration) {
        //                hBaseConfiguration = HBaseConfiguration.create();
        //                //hBaseConfiguration.addResource(new Path(outputEventAdaptorConfiguration.getOutputProperties().get(HbaseEventAdaptorConstants.ADAPTOR_HBASE_CONF_PATH)));
        //                if (null != hbaseConfigurationCache.putIfAbsent(outputEventAdaptorConfiguration.getName(), hBaseConfiguration)) {
        //                    hBaseConfiguration = hbaseConfigurationCache.get(outputEventAdaptorConfiguration.getName());
        //                } else {
        //                    log.info("Initiated HBase Writer " + outputEventAdaptorConfiguration.getName());
        //                }
        //
        //            }

        String[] columnNames = messageObject.keySet().toArray(new String[0]);
        String tableName = outputEventMessageConfiguration.getOutputMessageProperties()
                .get(HbaseEventAdaptorConstants.ADAPTOR_HBASE_TABLE_NAME);
        try {
            creatTable(hBaseConfiguration, tableName, columnNames);
            for (Map.Entry<String, Object> stringObjectEntry : messageObject.entrySet()) {
                Map.Entry pairs = (Map.Entry) stringObjectEntry;
                addRecord(hBaseConfiguration, tableName, "test", pairs.getKey().toString(), " ",
                        pairs.getValue().toString());
            }

        } catch (Exception e) {
            log.error("Error occurred when trying to publish event. Error: " + e);
        }
    }
}

From source file:org.objectweb.proactive.extensions.dataspaces.vfs.VFSSpacesMountManagerImpl.java

/**
 * Makes sure that the provided file system is mounted for the given dataspace (identified by its root url)
 * @param mountingPoint dataspace uri/* ww  w .  j  a v a  2s.  c o m*/
 * @param spaceRootFOUri file system root
 * @return true if the file system is mounted
 * @throws FileSystemException
 */
private boolean ensureFileSystemIsMounted(final DataSpacesURI mountingPoint, final String spaceRootFOUri)
        throws FileSystemException {
    ConcurrentHashMap<String, FileObject> fileSystems = null;
    DataSpacesURI spacePart = mountingPoint.getSpacePartOnly();
    try {
        readLock.lock();
        fileSystems = mountedSpaces.get(spacePart);
        // already mounted
        if (fileSystems.get(spaceRootFOUri) != null) {
            return true;
        }
    } finally {
        readLock.unlock();
    }
    logger.debug("[VFSMountManager] Request mounting VFS root = " + spaceRootFOUri);
    FileObject mountedRoot;
    try {
        mountedRoot = VFSMountManagerHelper.mount(spaceRootFOUri);

        // the fs is accessible
        try {
            writeLock.lock();
            fileSystems.put(spaceRootFOUri, mountedRoot);
            mountedSpaces.put(spacePart, fileSystems);
        } finally {
            writeLock.unlock();
        }
        if (logger.isDebugEnabled())
            logger.debug(String.format("[VFSMountManager] Mounted space: %s (access URL: %s)", spacePart,
                    spaceRootFOUri));
        return true;

    } catch (org.apache.commons.vfs.FileSystemException x) {
        String err = String.format("[VFSMountManager] Could not access URL %s to mount %s", spaceRootFOUri,
                spacePart);
        logger.info(err);
        removeSpaceRootUri(spacePart, spaceRootFOUri);
        throw new FileSystemException(err, x);

    }
}

From source file:org.wso2.carbon.event.output.adaptor.mqtt.MQTTEventAdaptorType.java

@Override
public void removeConnectionInfo(OutputEventAdaptorMessageConfiguration outputEventAdaptorMessageConfiguration,
        OutputEventAdaptorConfiguration outputEventAdaptorConfiguration, int tenantId) {

    ConcurrentHashMap<String, ConcurrentHashMap<String, MQTTAdaptorPublisher>> clientIdSpecificEventSenderMap = publisherMap
            .get(outputEventAdaptorConfiguration.getName());
    if (clientIdSpecificEventSenderMap != null) {
        String clientId = outputEventAdaptorMessageConfiguration.getOutputMessageProperties()
                .get(MQTTEventAdaptorConstants.ADAPTOR_MESSAGE_CLIENTID);
        ConcurrentHashMap<String, MQTTAdaptorPublisher> topicSpecificEventSenderMap = clientIdSpecificEventSenderMap
                .get(clientId);/*from  w  w  w.  ja va 2s.  com*/
        if (topicSpecificEventSenderMap != null) {
            String topicName = outputEventAdaptorMessageConfiguration.getOutputMessageProperties()
                    .get(MQTTEventAdaptorConstants.ADAPTOR_MESSAGE_TOPIC);
            MQTTAdaptorPublisher mqttAdaptorPublisher = topicSpecificEventSenderMap.get(topicName);
            if (mqttAdaptorPublisher != null) {
                try {
                    mqttAdaptorPublisher.close();
                } catch (OutputEventAdaptorEventProcessingException e) {
                    throw new OutputEventAdaptorEventProcessingException(e);
                }
            }
            topicSpecificEventSenderMap.remove(topicName);
        }
    }
}

From source file:com.xpg.gokit.activity.GokitControlActivity.java

@Override
public boolean didReceiveData(XPGWifiDevice device,
        java.util.concurrent.ConcurrentHashMap<String, Object> dataMap, int result) {
    if (dataMap.get("data") != null) {
        Log.i("info", (String) dataMap.get("data"));
        Message msg = new Message();
        msg.obj = dataMap.get("data");
        msg.what = RESP;// www.  j  a v a 2s . c  om
        handler.sendMessage(msg);
    }

    if (dataMap.get("alters") != null) {
        Log.i("info", (String) dataMap.get("alters"));
        Message msg = new Message();
        msg.obj = dataMap.get("alters");
        msg.what = LOG;
        handler.sendMessage(msg);
    }

    if (dataMap.get("faults") != null) {
        Log.i("info", (String) dataMap.get("faults"));
        Message msg = new Message();
        msg.obj = dataMap.get("faults");
        msg.what = LOG;
        handler.sendMessage(msg);
    }

    if (dataMap.get("binary") != null) {
        Log.i("info", "Binary data:" + bytesToHex((byte[]) dataMap.get("binary")));
    }

    return true;
}

From source file:org.wso2.carbon.event.input.adaptor.http.HTTPEventAdaptorType.java

public String subscribe(InputEventAdaptorMessageConfiguration inputEventAdaptorMessageConfiguration,
        InputEventAdaptorListener inputEventAdaptorListener,
        InputEventAdaptorConfiguration inputEventAdaptorConfiguration, AxisConfiguration axisConfiguration) {
    String subscriptionId = UUID.randomUUID().toString();

    String topic = inputEventAdaptorMessageConfiguration.getInputMessageProperties()
            .get(HTTPEventAdaptorConstants.ADAPTOR_MESSAGE_TOPIC);
    int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
    ConcurrentHashMap<String, ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>> tenantSpecificListenerMap = inputEventAdaptorListenerMap
            .get(tenantId);//from  w  w w . j  ava  2s .co  m
    if (tenantSpecificListenerMap == null) {
        tenantSpecificListenerMap = new ConcurrentHashMap<String, ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>>();
        inputEventAdaptorListenerMap.put(tenantId, tenantSpecificListenerMap);
    }

    ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>> adaptorSpecificListeners = tenantSpecificListenerMap
            .get(inputEventAdaptorConfiguration.getName());

    if (adaptorSpecificListeners == null) {
        adaptorSpecificListeners = new ConcurrentHashMap<String, ArrayList<HTTPAdaptorListener>>();
        if (null != tenantSpecificListenerMap.put(inputEventAdaptorConfiguration.getName(),
                adaptorSpecificListeners)) {
            adaptorSpecificListeners = tenantSpecificListenerMap.get(inputEventAdaptorConfiguration.getName());
        }
    }

    ArrayList<HTTPAdaptorListener> topicSpecificListeners = adaptorSpecificListeners.get(topic);
    ArrayList<HTTPAdaptorListener> newTopicSpecificListeners;
    if (topicSpecificListeners == null || topicSpecificListeners.size() == 0) {
        HTTPEventAdaptorServiceDS.registerDynamicEndpoint(inputEventAdaptorConfiguration.getName(), topic,
                tenantId);
        newTopicSpecificListeners = new ArrayList<HTTPAdaptorListener>();
    } else {
        newTopicSpecificListeners = new ArrayList<HTTPAdaptorListener>(topicSpecificListeners);
    }

    newTopicSpecificListeners.add(new HTTPAdaptorListener(subscriptionId, inputEventAdaptorListener, tenantId));
    adaptorSpecificListeners.put(topic, newTopicSpecificListeners);

    return subscriptionId;
}