Example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

List of usage examples for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap.

Prototype

public ConcurrentHashMap() 

Source Link

Document

Creates a new, empty map with the default initial table size (16).

Usage

From source file:com.hortonworks.streamline.streams.notification.service.NotificationQueueHandler.java

public NotificationQueueHandler(int nThreads) {
    // TODO: evaluate ThreadPoolExecutor with bounded queue size
    executorService = Executors.newFixedThreadPool(nThreads);
    taskMap = new ConcurrentHashMap<>();
}

From source file:com.espertech.esper.core.InternalEventRouterImpl.java

/**
 * Ctor./*  w  w w .j ava2 s .  c o m*/
 */
public InternalEventRouterImpl() {
    this.preprocessors = new ConcurrentHashMap<EventType, NullableObject<InternalEventRouterPreprocessor>>();
    this.descriptors = new LinkedHashMap<UpdateDesc, IRDescEntry>();
}

From source file:com.openteach.diamond.network.waverider.session.DefaultSessionManager.java

public DefaultSessionManager(WaveriderConfig config) {
    this.config = config;
    idleSessionList = new CopyOnWriteArrayList<DefaultSession>();
    sessionMap = new ConcurrentHashMap<Long, DefaultSession>();
}

From source file:com.bluepixel.security.manager.Server.java

private void generateKey() {
    try {/*from   w w  w . j  a  v a 2s.  c  om*/
        KeyPairGenerator keyGen = KeyPairGenerator.getInstance(DEFAULT_ALGORITHM);
        keyGen.initialize(DEFAULT_KEY_LENGTH);
        KeyPair keypair = keyGen.generateKeyPair();
        PublicKey pbKey = keypair.getPublic();
        PrivateKey piKey = keypair.getPrivate();

        publicKey = Base64.encodeWebSafe(pbKey.getEncoded(), false);
        privateKey = Base64.encodeWebSafe(piKey.getEncoded(), false);

        Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding");

        cipher.init(Cipher.ENCRYPT_MODE, piKey);

        secretKeys = new ConcurrentHashMap<String, String>();
        String[] randomKeys = generateRandomWords(10);
        for (String key : randomKeys) {
            String cipherText = Base64.encodeWebSafe(cipher.doFinal(key.getBytes()), false);
            secretKeys.put(key, cipherText);
        }
    } catch (NoSuchAlgorithmException e) {
    } catch (InvalidKeyException e) {
    } catch (NoSuchPaddingException e) {
    } catch (IllegalBlockSizeException e) {
    } catch (BadPaddingException e) {
    }
}

From source file:com.twitter.distributedlog.feature.DynamicConfigurationFeatureProvider.java

public DynamicConfigurationFeatureProvider(String rootScope, DistributedLogConfiguration conf,
        StatsLogger statsLogger) {/*from  w w w  .jav a  2 s.  c  om*/
    super(rootScope, conf, statsLogger);
    this.features = new ConcurrentHashMap<String, SettableFeature>();
    this.featuresConf = new ConcurrentBaseConfiguration();
    this.executorService = Executors.newSingleThreadScheduledExecutor(
            new ThreadFactoryBuilder().setNameFormat("DynamicConfigurationFeatureProvider-%d").build());
}

From source file:com.evon.injectTemplate.InjectTemplateFilter.java

@Override
public void init(FilterConfig filterConfig) throws ServletException {

    try {//from  www. j a  v a2 s  .  c  o m
        contentTypes = new ConcurrentHashMap<>();
        templates = new ConcurrentHashMap<>();

        TemplateConfig.load(filterConfig.getServletContext());

        printConfig();
    } catch (TemplateException e) {
        e.printStackTrace();
        throw new ServletException(e.getMessage(), e);
    }
}

From source file:com.clustercontrol.infra.view.action.DeleteInfraFileAction.java

@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
    InfraFileManagerView view = getView(event);
    if (view == null) {
        m_log.info("execute: view is null");
        return null;
    }//w ww.  j a  v  a  2  s .  c  o m

    List<String> fileIdList = getSelectedInfraFileIdList(view);
    if (fileIdList.isEmpty()) {
        return null;
    }

    StructuredSelection selection = null;
    if (view.getComposite().getTableViewer().getSelection() instanceof StructuredSelection) {
        selection = (StructuredSelection) view.getComposite().getTableViewer().getSelection();
    }
    if (selection == null || selection.isEmpty()) {
        return null;
    }
    Map<String, List<String>> map = new ConcurrentHashMap<String, List<String>>();
    for (Object object : selection.toList()) {
        String managerName = (String) ((ArrayList<?>) object).get(GetInfraFileManagerTableDefine.MANAGER_NAME);
        if (map.get(managerName) == null) {
            map.put(managerName, new ArrayList<String>());
        }
    }

    StringBuffer strFileIds = new StringBuffer();
    String tmpFileId = null;
    for (Object object : selection.toList()) {
        String managerName = (String) ((ArrayList<?>) object).get(GetInfraFileManagerTableDefine.MANAGER_NAME);
        tmpFileId = (String) ((ArrayList<?>) object).get(GetInfraFileManagerTableDefine.FILE_ID);
        map.get(managerName).add(tmpFileId);
        if (strFileIds.length() == 0) {
            strFileIds.append(tmpFileId);
        } else {
            strFileIds.append(", " + tmpFileId);
        }
    }

    if (MessageDialog.openConfirm(null, Messages.getString("confirmed"),
            Messages.getString("message.infra.confirm.action",
                    new Object[] { Messages.getString("file"), Messages.getString("delete"), strFileIds }))) {

        Map<String, String> errMsg = new ConcurrentHashMap<String, String>();
        for (Map.Entry<String, List<String>> entry : map.entrySet()) {
            String managerName = entry.getKey();
            InfraEndpointWrapper wrapper = InfraEndpointWrapper.getWrapper(managerName);
            try {
                wrapper.deleteInfraFileList(entry.getValue());
            } catch (Exception e) {
                m_log.error(e);
                errMsg.put(managerName, HinemosMessage.replace(e.getMessage()));
            }
        }

        if (errMsg.isEmpty()) {
            String action = Messages.getString("delete");
            InfraFileUtil.showSuccessDialog(action, strFileIds.toString());
        } else {
            UIManager.showMessageBox(errMsg, true);
        }
        // ?
        view.update();
    }

    return null;
}

From source file:com.netflix.zeno.diff.TypeDiffOperation.java

@SuppressWarnings("unchecked")
public TypeDiff<T> performDiff(DiffSerializationFramework framework, Iterable<T> fromState, Iterable<T> toState,
        int numThreads) {
    Map<Object, T> fromStateObjects = new HashMap<Object, T>();

    for (T obj : fromState) {
        fromStateObjects.put(instruction.getKey(obj), obj);
    }// w  w w .  j a  v a  2 s . c o  m

    ArrayList<List<T>> perProcessorWorkList = new ArrayList<List<T>>(numThreads); // each entry is a job
    for (int i = 0; i < numThreads; ++i) {
        perProcessorWorkList.add(new ArrayList<T>());
    }

    Map<Object, Object> toStateKeys = new ConcurrentHashMap<Object, Object>();

    int toIncrCount = 0;
    for (T toObject : toState) {
        perProcessorWorkList.get(toIncrCount % numThreads).add(toObject);
        toIncrCount++;
    }

    ExecutorService executor = Executors.newFixedThreadPool(numThreads, new ThreadFactory() {
        @Override
        public Thread newThread(Runnable r) {
            final Thread thread = new Thread(r, "TypeDiff_" + instruction.getTypeIdentifier());
            thread.setDaemon(true);
            return thread;
        }
    });

    try {
        ArrayList<Future<TypeDiff<T>>> workResultList = new ArrayList<Future<TypeDiff<T>>>(
                perProcessorWorkList.size());
        for (final List<T> workList : perProcessorWorkList) {
            if (workList != null && !workList.isEmpty()) {
                workResultList.add(executor.submit(new TypeDiffCallable<T>(framework, instruction,
                        fromStateObjects, toStateKeys, workList)));
            }
        }

        TypeDiff<T> mergedDiff = new TypeDiff<T>(instruction.getTypeIdentifier());
        for (final Future<TypeDiff<T>> future : workResultList) {
            try {
                TypeDiff<T> typeDiff = future.get();
                mergeTypeDiff(mergedDiff, typeDiff);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }

        for (Map.Entry<Object, T> entry : fromStateObjects.entrySet()) {
            mergedDiff.incrementFrom();
            if (!toStateKeys.containsKey(entry.getKey()))
                mergedDiff.addExtraInFrom(entry.getValue());
        }

        return mergedDiff;

    } finally {
        executor.shutdownNow();
    }
}

From source file:org.springframework.yarn.integration.convert.MindHolderToObjectConverter.java

/**
 * Instantiates a new mind holder to object converter.
 *
 * @param objectMapper the object mapper
 * @param basePackage the array of base packages
 *///ww w .  j  a va  2  s.c o  m
public MindHolderToObjectConverter(ObjectMapper objectMapper, String basePackage[]) {
    this.objectMapper = objectMapper;
    this.basePackage = basePackage;
    this.classCache = new ConcurrentHashMap<String, Class<? extends BaseObject>>();
}

From source file:com.digitalpebble.storm.crawler.metrics.DebugMetricsConsumer.java

@Override
public void prepare(Map stormConf, Object registrationArgument, TopologyContext context,
        IErrorReporter errorReporter) {/*from   www  . j a va2 s.  c o  m*/
    this.errorReporter = errorReporter;
    this.metrics = new ConcurrentHashMap<String, Number>();
    this.metrics_metadata = new ConcurrentHashMap<String, Map<String, Object>>();

    try {
        // TODO Config file not tested
        final String PORT_CONFIG_STRING = "topology.metrics.consumers.debug.servlet.port";
        Integer port = (Integer) stormConf.get(PORT_CONFIG_STRING);
        if (port == null) {
            log.warn("Metrics debug servlet's port not specified, defaulting to 7070. You can specify it via "
                    + PORT_CONFIG_STRING + " in storm.yaml");
            port = 7070;
        }
        server = startServlet(port);
    } catch (Exception e) {
        log.error("Failed to start metrics server", e);
        throw new AssertionError(e);
    }
}