Example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

List of usage examples for java.util.concurrent ConcurrentHashMap ConcurrentHashMap

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap ConcurrentHashMap.

Prototype

public ConcurrentHashMap() 

Source Link

Document

Creates a new, empty map with the default initial table size (16).

Usage

From source file:com.enonic.vertical.userservices.UserServicesAccessManagerImpl.java

private void initSiteRules(SiteKey site) {
    ConcurrentMap<String, AccessPermission> siteRules = new ConcurrentHashMap<String, AccessPermission>();

    String allowRules = sitePropertiesService.getProperty(HTTP_SERVICES_ALLOW_PROPERTY, site);
    String denyRules = sitePropertiesService.getProperty(HTTP_SERVICES_DENY_PROPERTY, site);
    parseAndAddRules(allowRules, AccessPermission.ALLOW, siteRules, site);
    parseAndAddRules(denyRules, AccessPermission.DENY, siteRules, site);

    siteRules.putIfAbsent(ACCESS_RULE_ALL, DEFAULT_ACCESS_RULE);

    sitesAccessRules.putIfAbsent(site, siteRules);
}

From source file:org.apache.sling.testing.clients.SlingClientConfig.java

protected SlingClientConfig(URI url, String user, String password, CookieStore cookieStore,
        CredentialsProvider credentialsProvider, AuthCache authCache) {
    this.url = url;
    this.user = user;
    this.password = password;

    this.cookieStore = cookieStore;
    this.credsProvider = credentialsProvider;
    this.authCache = authCache;

    this.values = new ConcurrentHashMap<String, String>();
}

From source file:com.graphaware.importer.stats.LoggingStatisticsCollector.java

private AtomicInteger getCounter(String category, String name) {
    if (name == null) {
        name = "null";
    }/*from  w w w.ja v a2 s  . co  m*/

    ConcurrentHashMap<String, AtomicInteger> counter = counters.get(category);

    if (counter == null) {
        counters.putIfAbsent(category, new ConcurrentHashMap<String, AtomicInteger>());
        counter = counters.get(category);
    }

    AtomicInteger count = counter.get(name);

    if (count == null) {
        counter.putIfAbsent(name, new AtomicInteger(0));
        count = counter.get(name);
    }

    return count;
}

From source file:com.sm.store.server.ClusterStoreServer.java

protected void init() {
    clusterNo = serverConfig.getClusterNo();
    logger.info("start Cluster Server " + serverConfig.findClusterNodes(clusterNo).toString() + " for "
            + serverConfig.getStoreConfigList().size() + " store");
    state = State.Starting;//from  w  w w  . j  a  va2  s .  com
    storeMaps = new ConcurrentHashMap<String, RemoteStore>();
    for (StoreConfig each : serverConfig.getStoreConfigList()) {
        logger.info("start store " + each.toString());
        ClusterStore store = new ClusterStore(each.getFileName(), buildSerializer(each.getSerializer()),
                serverConfig.getDataPath() + "/" + each.getDataPath(), each.isDelay(), each.getBlockSize(),
                each.getMode(), each.isSorted(), serverConfig.findClusterNodes(clusterNo));
        if (each.isDelay()) {
            logger.info("writeThread " + each.getDelayThread());
            store.startWriteThread(each.getDelayThread());
        }
        logger.info("start replica client in cluster path " + each.getLogPath());
        //pass localUrl to start replicate thread to all other nodes in the same cluster
        store.startReplica(each.getLogPath(), serverConfig.getHost() + ":" + serverConfig.getPort());
        //start replica url that was defined in each store
        if (each.getReplicaUrl() != null && each.getReplicaUrl().size() > 0) {
            logger.info("start replica client defined in stores.xml size " + each.getReplicaUrl().size());
            store.startWriteLogThread(each.getLogPath(), each.getReplicaUrl());
        }
        //add support replica client timeout
        List<ReplicaClient> clientList = store.getReplicaClientList();
        for (ReplicaClient client : clientList) {
            client.setTimeout(each.getReplicaTimeout());
        }
        //add support of persist client and persist replica URL
        if (each.getPstReplicaUrl() != null && each.getPstReplicaUrl().size() > 0) {
            //useLRU property to represent blockValue flag ; but only for luster store
            if (each.isUseLRU()) {
                logger.info("useLRU is true, will send blockValue to true and bypass Delta");
                store.setBlockValue(each.isUseLRU());
            } else {
                logger.info("check for delta interface");
                if (store.getDelta() == null) {
                    logger.info("inject UnisonDelta..");
                    //store.setDelta(new UnisonDelta(null));
                    store.setDelta((Delta) createInstance(UNISON_DELTA));
                }
            }
            logger.info("start persist replica client for " + each.getStore());
            store.startPstReplica("log", each.getPstReplicaUrl());
            //add support replica client timeout
            List<ReplicaClient> clist = store.getReplicaClientList();
            for (ReplicaClient client : clist) {
                client.setTimeout(each.getReplicaTimeout());
            }
        }
        //call remote store to start customerize replication outside of cluster
        //store.startReplica(each.getLogPath(), each.getReplicaUrl());
        storeMaps.put(each.getStore(), store);
        //setup trigger from storeConfig
        store.setupTrigger2Cache(each);
    }
    startAdminStore();
    callBack = new ClusterStoreCallBack(storeMaps, serializer, serverConfig.findClusterNodes(clusterNo));
    handler = new ClusterServerHandler(callBack, serverConfig.getMaxThread(), serverConfig.getMaxQueue(),
            serializer);
    // setup the server handler login freqency
    ((ClusterServerHandler) handler).setFreq(serverConfig.getFreq());
}

From source file:edu.usu.sdl.openstorefront.service.job.PluginScanJob.java

@Override
protected void executeInternaljob(JobExecutionContext context) {
    File pluginDir = FileSystemManager.getDir(FileSystemManager.PLUGIN_DIR);

    ObjectMapper objectMapper = StringProcessor.defaultObjectMapper();

    String fileMapJson = service.getSystemService().getPropertyValue(ApplicationProperty.PLUGIN_LAST_LOAD_MAP);
    Map<String, Long> fileMap = null;
    if (StringUtils.isNotBlank(fileMapJson)) {
        try {//from w  ww  .  j av a2 s .  c  o m
            fileMap = objectMapper.readValue(fileMapJson, new TypeReference<Map<String, Long>>() {
            });
        } catch (IOException e) {
            log.log(Level.WARNING,
                    "Unable to restore plugin file map.  Starting over; should be able to continue.");
        }
    } else {
        fileMapJson = "";
    }

    if (fileMap == null) {
        fileMap = new ConcurrentHashMap<>();
    }

    Set<String> newFiles = new HashSet<>();
    for (File plugin : pluginDir.listFiles()) {
        if (plugin.isFile()) {
            newFiles.add(plugin.getPath());

            Long lastModTime = fileMap.get(plugin.getPath());
            boolean loadPlugin = false;
            if (lastModTime != null) {
                //check for update
                if (plugin.lastModified() > lastModTime) {
                    log.log(Level.INFO, MessageFormat.format("Plugin: {0} was updated...", plugin.getName()));
                    loadPlugin = true;
                }
            } else {
                //new Plugin
                loadPlugin = true;
            }

            if (loadPlugin) {
                log.log(Level.INFO,
                        MessageFormat.format("Found plugin: {0} attempting to load.", plugin.getName()));
                try (InputStream in = new FileInputStream(plugin)) {
                    service.getPluginServicePrivate().installPlugin(plugin.getName(), in, true);
                    log.log(Level.INFO,
                            MessageFormat.format("Loaded plugin: {0} successfully.", plugin.getName()));
                } catch (Exception ioe) {
                    log.log(Level.SEVERE, MessageFormat.format("Plugin: {0} failed to load.", plugin.getName()),
                            ioe);
                } finally {
                    fileMap.put(plugin.getPath(), plugin.lastModified());
                }
            }
        }
    }

    //look for removed files
    List<String> keysToRemove = new ArrayList<>();
    for (String path : fileMap.keySet()) {
        if (newFiles.contains(path) == false) {
            //file was remove
            log.log(Level.INFO, MessageFormat.format("Plugin: {0} was removed...", path));

            String filename = Paths.get(path).getFileName().toString();
            Plugin pluginExample = new Plugin();
            pluginExample.setActualFilename(filename);
            pluginExample = pluginExample.find();
            if (pluginExample != null) {
                log.log(Level.INFO, MessageFormat.format("Uninstalling plugin: {0} ", path));
                try {
                    service.getPluginService().uninstallPlugin(pluginExample.getPluginId());
                    log.log(Level.INFO, MessageFormat.format("Plugin: {0} uninstalled. ", path));
                } catch (Exception e) {
                    log.log(Level.INFO, MessageFormat.format(
                            "Failed to uninstalled:  {0} See log.  Try to use admin tools to uninstall.", path),
                            e);
                }
            } else {
                log.log(Level.INFO, MessageFormat.format("Plugin: {0} wasn't installed", path));
            }
            keysToRemove.add(path);
        }
    }

    for (String key : keysToRemove) {
        fileMap.remove(key);
    }

    try {
        String updatedMap = objectMapper.writeValueAsString(fileMap);
        if (fileMapJson.equals(updatedMap) == false) {
            service.getSystemService().saveProperty(ApplicationProperty.PLUGIN_LAST_LOAD_MAP, updatedMap);
        }
    } catch (JsonProcessingException ex) {
        log.log(Level.SEVERE, "Unable to save FileMap.  This can cause spinning.  Pausing job.", ex);
        JobManager.pauseSystemJob(PluginScanJob.class.getSimpleName());
    }

}

From source file:com.bittorrent.mpetazzoni.tracker.TrackedTorrent.java

/**
 * Create a new tracked torrent from meta-info binary data.
 *
 * @param torrent The meta-info byte data.
 * @throws IOException When the info dictionary can't be
 * encoded and hashed back to create the torrent's SHA-1 hash.
 *//*w ww  .j av a2 s  . c  o m*/
public TrackedTorrent(byte[] torrent) throws IOException {
    super(torrent, false);

    this.peers = new ConcurrentHashMap<String, TrackedPeer>();
    this.answerPeers = TrackedTorrent.DEFAULT_ANSWER_NUM_PEERS;
    this.announceInterval = TrackedTorrent.DEFAULT_ANNOUNCE_INTERVAL_SECONDS;
}

From source file:epgtools.dumpepgfromts.subtool.ChannelListMaker.java

public void maker(File Dir) throws FileNotFoundException, IOException {

    FileSeeker seeker = new FileSeeker(Dir, Suffix.TS_SUFFIX);

    seeker.setRecursive(false);/*from ww  w  .ja va 2 s . c  o m*/

    List<File> tsFiles = seeker.seek();

    Map<MultiKey<Integer>, Channel> chMap = new HashMap<>();

    Map<KeyFields, Integer> idMap = new ConcurrentHashMap<>();

    for (File tsFile : tsFiles) {

        FileLoader fl = new FileLoader(tsFile, 1000000L);

        fl.load();

        for (Channel ch : fl.getChannels()) {
            chMap.put(ch.getMuiltiKey(), ch);
        }

        Set<Programme> ps = fl.getProgrammes();
        //???????????
        //??????????????

        for (Programme p : ps) {
            if (p.isThis_or_other() == true) {
                KeyFields kf = p.getKeyfFields();
                idMap.put(kf, Integer.valueOf(this.getNameWithoutExtension(tsFile)));
            }
        }

    }
    Set<PhysicalChannelNumberRecord> numSet = new HashSet<>();

    Set<KeyFields> kfs = idMap.keySet();
    for (KeyFields kf : kfs) {
        Integer pt = idMap.get(kf);
        String name;
        Channel ch = chMap.get(kf.getMuiltiKey());
        if (ch != null) {
            name = ch.getDisplay_name();
        } else {
            name = "";
        }
        numSet.add(new PhysicalChannelNumberRecord(pt, kf.getTransport_stream_id(), kf.getOriginal_network_id(),
                kf.getService_id(), name));
    }

    //??????
    for (PhysicalChannelNumberRecord r : numSet) {
        System.out.println(r);
    }
    //???????
    List<PhysicalChannelNumberRecord> nl = new ArrayList<>();
    nl.addAll(numSet);
    CsvManager csvManager = CsvManagerFactory.newCsvManager();
    File dest = new File("channellist/channels.csv");
    System.out.println(dest.getAbsolutePath());
    csvManager.save(nl, PhysicalChannelNumberRecord.class).to(dest, "UTF-8");

}

From source file:org.biopax.validator.impl.ValidatorImpl.java

public ValidatorImpl() {
    results = Collections.newSetFromMap(new ConcurrentHashMap<Validation, Boolean>());
}

From source file:com.newrelic.agent.deps.org.apache.http.impl.client.BasicAuthCache.java

/**
 * Default constructor.//from  www .  j av a  2  s  . c om
 *
 * @since 4.3
 */
public BasicAuthCache(final SchemePortResolver schemePortResolver) {
    super();
    this.map = new ConcurrentHashMap<HttpHost, byte[]>();
    this.schemePortResolver = schemePortResolver != null ? schemePortResolver
            : DefaultSchemePortResolver.INSTANCE;
}

From source file:io.pravega.controller.eventProcessor.impl.EventProcessorGroupImpl.java

EventProcessorGroupImpl(final EventProcessorSystemImpl actorSystem,
        final EventProcessorConfig<T> eventProcessorConfig, final CheckpointStore checkpointStore) {
    this.objectId = String.format("EventProcessorGroup[%s]",
            eventProcessorConfig.getConfig().getReaderGroupName());
    this.actorSystem = actorSystem;
    this.eventProcessorConfig = eventProcessorConfig;
    this.eventProcessorMap = new ConcurrentHashMap<>();
    this.writer = actorSystem.clientFactory.createEventWriter(eventProcessorConfig.getConfig().getStreamName(),
            eventProcessorConfig.getSerializer(), EventWriterConfig.builder().build());
    this.checkpointStore = checkpointStore;
}