Example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

List of usage examples for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap.

Prototype

public ConcurrentHashMap() 

Source Link

Document

Creates a new, empty map with the default initial table size (16).

Usage

From source file:com.impetus.kundera.persistence.EntityManagerSession.java

/**
 * Instantiates a new entity manager cache.
 * //from w ww. ja va  2  s . com
 * @param cache
 *            the cache
 */
public EntityManagerSession(Cache cache) {
    this.sessionCache = new ConcurrentHashMap<Object, Object>();
    setL2Cache(cache);
}

From source file:cc.osint.graphd.sim.ProcessGroup.java

public ProcessGroup(Graph graph, String name, ExecutorService executorService, PoolFiberFactory fiberFactory) {
    this.graphRef = new WeakReference<Graph>(graph);
    this.name = name;
    processMap = new ConcurrentHashMap<String, GraphProcess<T, M>>();
    this.executorService = executorService;
    this.fiberFactory = fiberFactory;
    scriptEngineMap = new ConcurrentHashMap<String, GScriptEngine>();
    log.info("process group instantiated: " + name);
}

From source file:com.clustercontrol.notify.mail.action.GetMailTemplate.java

/**
 * ???<BR>/*  ww  w  .j  a  va2s . c  o m*/
 * ??SessionBean????
 *
 * @return 
 *
 */
public Map<String, List<MailTemplateInfo>> getMailTemplateList() {

    Map<String, List<MailTemplateInfo>> dispDataMap = new ConcurrentHashMap<>();
    List<MailTemplateInfo> records = null;
    for (String managerName : EndpointManager.getActiveManagerSet()) {
        try {
            MailTemplateEndpointWrapper wrapper = MailTemplateEndpointWrapper.getWrapper(managerName);
            records = wrapper.getMailTemplateList();
            dispDataMap.put(managerName, records);
        } catch (InvalidRole_Exception e) {
            MessageDialog.openInformation(null, Messages.getString("message"),
                    Messages.getString("message.accesscontrol.16"));
        } catch (Exception e) {
            m_log.warn("getNotifyListByOwnerRole(), " + e.getMessage(), e);
            MessageDialog.openError(null, Messages.getString("failed"),
                    Messages.getString("message.hinemos.failure.unexpected") + ", "
                            + HinemosMessage.replace(e.getMessage()));
        }
    }
    return dispDataMap;
}

From source file:com.redhat.rhn.frontend.integration.IntegrationService.java

private IntegrationService() {
    cobblerAuthTokenStore = new ConcurrentHashMap<String, String>();
    randomTokenStore = new ConcurrentHashMap<String, String>();
}

From source file:com.vmware.identity.idm.server.AuthSessionFactoryCache.java

AuthSessionFactoryCache() {
    _configDir = IdmUtils.getIdentityServicesConfigDir();
    _factoryLookup = new ConcurrentHashMap<String, AuthenticationSessionFactory>();
}

From source file:com.zack6849.superlogger.Main.java

@Override
public void onEnable() {
    this.logger = getLogger();
    this.loggers = new ConcurrentHashMap<String, LoggerAbstraction>();
    saveDefaultConfig();/*from   w w  w .  j  ava2  s .  c  o m*/
    getConfig().setDefaults(new MemoryConfiguration());
    loadSettings();
    getServer().getPluginManager().registerEvents(new EventListener(this), this);
    try {
        Metrics metrics = new Metrics(this);
        metrics.start();
        logger.log(Level.INFO, "Metrics Running <3");
    } catch (IOException e) {
        logger.warning("There was an issue starting plugin metrics </3");
        logger.warning(e.getMessage());
        e.printStackTrace();
    }
    if (settings.isAutoUpdate()) {
        updater = new Updater(this, 45448, this.getFile(), Updater.UpdateType.DEFAULT, true);
    } else {
        updater = new Updater(this, 45448, this.getFile(), Updater.UpdateType.NO_DOWNLOAD, true);
    }
    if (settings.isDebug()) {
        for (String line : getDebug()) {
            debug(line);
        }
    }
}

From source file:me.schiz.jmeter.protocol.SessionStorage.java

public SessionStorage() {
    map = new ConcurrentHashMap<String, SocketClient>();
    protoTypeMap = new ConcurrentHashMap<String, proto_type>();
}

From source file:com.netflix.hollow.jsonadapter.discover.HollowJsonAdapterSchemaDiscoverer.java

public HollowJsonAdapterSchemaDiscoverer(String typeName) {
    super(typeName, "scan");
    this.mapTypes = new HashSet<String>();
    this.discoveredSchemas = new ConcurrentHashMap<String, HollowDiscoveredSchema>();
    this.schemaNamer = new HollowSchemaNamer();
}

From source file:com.willkara.zeteo.explorers.Explorer.java

/**
 * The public facing search method. Accepts an {@link ExplorerOptions}
 * instance that contains the rules for the search.
 *
 * @param exOptions ExplorerOptions for the search method
 * @return An {@link ExplorerResult} object that contains the information
 * for the directory.//w ww.j  av a2 s  .  co m
 */
public ExplorerResult search(ExplorerOptions exOptions) {
    return new ExplorerResult(
            searcher(exOptions.getDirectoryObject().listFiles(), new ConcurrentHashMap<>(), options));
}

From source file:com.farhad.ngram.lang.detector.profile.LanguageProfile.java

public void construct(String path) {
    MultiValueMap corpus = filetools.readFile(path);

    Iterator<String> it = corpus.keySet().iterator();

    Map<String, Map> profile = new ConcurrentHashMap<>();

    //iterate over each class
    while (it.hasNext()) {

        String theKey = (String) it.next();

        List<String> texts = (List<String>) corpus.get(theKey);
        Num_Docs = texts.size();// w w  w  . j  a va  2 s . c  om
        Map<Integer, Map> ngrams_lang = new HashMap<Integer, Map>();

        Map<String, MyPair> ngrams_profile = new ConcurrentHashMap<>();

        //iterate over each text
        for (int i = 0; i < texts.size(); i++) {

            String text = texts.get(i);
            Map<String, Integer> grams = new HashMap<>();
            for (int n = 1; n <= ngrams; n++) {

                grams.putAll(NgramExtractor.gramLength(n).extractCountedGrams(text));

            }

            ngrams_lang.put(i, grams);

        }

        Iterator<Integer> itt = ngrams_lang.keySet().iterator();

        while (itt.hasNext()) {

            Map<String, Integer> ngram = ngrams_lang.get(itt.next());
            Iterator<String> ittt = ngram.keySet().iterator();

            while (ittt.hasNext()) {

                String ng = ittt.next();

                if (ngrams_profile.containsKey(ng)) {

                    MyPair pair = ngrams_profile.get(ng);
                    pair.setFirst(pair.getFirst() + ngram.get(ng));
                    pair.setSecond(pair.getSecond() + 1);
                    ngrams_profile.put(ng, pair);

                } else {

                    MyPair pair = new MyPair(ngram.get(ng), 1);
                    ngrams_profile.put(ng, pair);
                }
            }
        }

        profile.put(theKey, ngrams_profile);

    }

    //filter based on doc_frequency and term_frequency 
    Iterator<String> p_it = profile.keySet().iterator();

    while (p_it.hasNext()) {

        String lang = p_it.next();
        List<String> texts = (List<String>) corpus.get(lang);
        Num_Docs = texts.size();

        Map<String, MyPair> ngram = profile.get(lang);

        Iterator<String> ngram_it = ngram.keySet().iterator();
        while (ngram_it.hasNext()) {
            String key = ngram_it.next();
            MyPair freq = ngram.get(key);
            if (freq.getFirst() <= MIN_TERM_FREQUENCY | freq.getSecond() >= Num_Docs) {
                ngram.remove(key);
            }

        }

    }

    //computer the term frequecny for each n-gram 
    p_it = profile.keySet().iterator();

    while (p_it.hasNext()) {
        String lang = p_it.next();
        List<String> texts = (List<String>) corpus.get(lang);
        Num_Docs = texts.size();

        Map<String, MyPair> ngram = profile.get(lang);

        int N = ngram.keySet().size();

        Iterator<String> ngram_it = ngram.keySet().iterator();
        Map<String, Double> ngram_tfidf = new HashMap<>();

        while (ngram_it.hasNext()) {

            String key = ngram_it.next();
            MyPair freq = ngram.get(key);

            double tf = (double) freq.getFirst() / N;
            ngram_tfidf.put(key, tf);

        }

        //write the language profile 
        String filename = lang + ".profile";
        saveProfile(filename, ngram_tfidf);
    }
}