List of usage examples for java.util.concurrent ConcurrentMap get
V get(Object key);
From source file:org.jtalks.jcommune.plugin.auth.poulpe.service.PoulpeAuthService.java
private void addHeaderAttribute(ClientResource clientResource, String attrName, String attrValue) { ConcurrentMap<String, Object> attrs = clientResource.getRequest().getAttributes(); Series<Header> headers = (Series<Header>) attrs.get(HeaderConstants.ATTRIBUTE_HEADERS); if (headers == null) { headers = new Series<>(Header.class); Series<Header> prev = (Series<Header>) attrs.putIfAbsent(HeaderConstants.ATTRIBUTE_HEADERS, headers); if (prev != null) { headers = prev;//from www. j a v a 2 s. c o m } } headers.add(attrName, attrValue); }
From source file:org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation.java
/** * Add a rate sample for a rate metric./*from w w w .j a v a 2 s .c o m*/ * @param name of the rate metric * @param elapsed time */ public void add(String name, long elapsed) { ConcurrentMap<String, ThreadSafeSampleStat> localStats = threadLocalMetricsMap.get(); if (localStats == null) { localStats = new ConcurrentHashMap<>(); threadLocalMetricsMap.set(localStats); weakReferenceQueue.add(new WeakReference<>(localStats)); } ThreadSafeSampleStat stat = localStats.get(name); if (stat == null) { stat = new ThreadSafeSampleStat(); localStats.put(name, stat); } stat.add(elapsed); }
From source file:org.slc.sli.ingestion.aspect.StageTrackingAspect.java
private void trackCallStatistics(String statsKey, long elapsed) { String jobId = TenantContext.getJobId(); if (jobId != null) { ConcurrentMap<String, Pair<AtomicLong, AtomicLong>> statsForJob = stats.get(jobId); if (statsForJob == null) { stats.putIfAbsent(jobId, new ConcurrentHashMap<String, Pair<AtomicLong, AtomicLong>>()); statsForJob = stats.get(jobId); }/*ww w . j a va 2 s .c o m*/ Pair<AtomicLong, AtomicLong> pair = statsForJob.get(statsKey); if (pair == null) { statsForJob.putIfAbsent(statsKey, Pair.of(new AtomicLong(0L), new AtomicLong(0L))); pair = statsForJob.get(statsKey); } // increment pair.getLeft().incrementAndGet(); pair.getRight().addAndGet(elapsed); } }
From source file:cz.cuni.mff.ufal.AllBitstreamZipArchiveReader.java
/** Creates unique filename based on map of counters of already used filenames */ protected String createUniqueFilename(String filename, ConcurrentMap<String, AtomicInteger> usedFilenames) { String uniqueFilename = filename; usedFilenames.putIfAbsent(filename, new AtomicInteger(0)); int occurence = usedFilenames.get(filename).incrementAndGet(); if (occurence > 1) { uniqueFilename = addSuffixToFilename(filename, String.valueOf(occurence)); }//ww w . j av a 2 s . com return uniqueFilename; }
From source file:edu.illinois.cs.cogcomp.pipeline.server.ServerClientAnnotator.java
/** * The method is synchronized since the caching seems to have issues upon mult-threaded caching * @param overwrite if true, it would overwrite the values on cache *///from w w w . j a v a 2 s . c o m public synchronized TextAnnotation annotate(String str, boolean overwrite) throws Exception { String viewsConnected = Arrays.toString(viewsToAdd); String views = viewsConnected.substring(1, viewsConnected.length() - 1).replace(" ", ""); ConcurrentMap<String, byte[]> concurrentMap = (db != null) ? db.hashMap(viewName, Serializer.STRING, Serializer.BYTE_ARRAY).createOrOpen() : null; String key = DigestUtils.sha1Hex(str + views); if (!overwrite && concurrentMap != null && concurrentMap.containsKey(key)) { byte[] taByte = concurrentMap.get(key); return SerializationHelper.deserializeTextAnnotationFromBytes(taByte); } else { URL obj = new URL(url + ":" + port + "/annotate"); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); con.setRequestMethod("POST"); con.setRequestProperty("charset", "utf-8"); con.setRequestProperty("Content-Type", "text/plain; charset=utf-8"); con.setDoOutput(true); con.setUseCaches(false); OutputStreamWriter wr = new OutputStreamWriter(con.getOutputStream()); wr.write("text=" + URLEncoder.encode(str, "UTF-8") + "&views=" + views); wr.flush(); InputStreamReader reader = new InputStreamReader(con.getInputStream()); BufferedReader in = new BufferedReader(reader); String inputLine; StringBuilder response = new StringBuilder(); while ((inputLine = in.readLine()) != null) { response.append(inputLine); } in.close(); reader.close(); wr.close(); con.disconnect(); TextAnnotation ta = SerializationHelper.deserializeFromJson(response.toString()); if (concurrentMap != null) { concurrentMap.put(key, SerializationHelper.serializeTextAnnotationToBytes(ta)); this.db.commit(); } return ta; } }
From source file:com.networknt.light.rule.AbstractRule.java
public Map<String, Object> getAccessByRuleClass(String ruleClass) throws Exception { Map<String, Object> access = null; Map<String, Object> accessMap = ServiceLocator.getInstance().getMemoryImage("accessMap"); ConcurrentMap<Object, Object> cache = (ConcurrentMap<Object, Object>) accessMap.get("cache"); if (cache == null) { cache = new ConcurrentLinkedHashMap.Builder<Object, Object>().maximumWeightedCapacity(1000).build(); accessMap.put("cache", cache); } else {/*from ww w. j a va 2s . co m*/ access = (Map<String, Object>) cache.get(ruleClass); } if (access == null) { OrientGraph graph = ServiceLocator.getInstance().getGraph(); try { OrientVertex accessVertex = (OrientVertex) graph.getVertexByKey("Access.ruleClass", ruleClass); if (accessVertex != null) { String json = accessVertex.getRecord().toJSON(); access = mapper.readValue(json, new TypeReference<HashMap<String, Object>>() { }); cache.put(ruleClass, access); } } catch (Exception e) { logger.error("Exception:", e); throw e; } finally { graph.shutdown(); } } return access; }
From source file:com.networknt.light.rule.menu.AbstractMenuRule.java
protected String getJsonByRid(String rid) { String json = null;/* www . j av a 2 s . c om*/ Map<String, Object> menuMap = (Map<String, Object>) ServiceLocator.getInstance().getMemoryImage("menuMap"); ConcurrentMap<Object, Object> cache = (ConcurrentMap<Object, Object>) menuMap.get("cache"); if (cache != null) { json = (String) cache.get("rid"); } if (json == null) { json = DbService.getJsonByRid(rid); // put it into the blog cache. if (json != null) { if (cache == null) { cache = new ConcurrentLinkedHashMap.Builder<Object, Object>().maximumWeightedCapacity(1000) .build(); menuMap.put("cache", cache); } cache.put(rid, json); } } return json; }
From source file:com.networknt.light.rule.menu.AbstractMenuRule.java
protected String getMenu(OrientGraph graph, String host) { String json = null;/*w ww. j av a 2 s . c o m*/ Map<String, Object> menuMap = (Map<String, Object>) ServiceLocator.getInstance().getMemoryImage("menuMap"); ConcurrentMap<Object, Object> cache = (ConcurrentMap<Object, Object>) menuMap.get("cache"); if (cache != null) { json = (String) cache.get(host); } if (json == null) { Vertex menu = graph.getVertexByKey("Menu.host", host); if (menu != null) { json = ((OrientVertex) menu).getRecord().toJSON("rid,fetchPlan:out_Own.in_Create:-2 out_Own:2"); } if (json != null) { if (cache == null) { cache = new ConcurrentLinkedHashMap.Builder<Object, Object>().maximumWeightedCapacity(1000) .build(); menuMap.put("cache", cache); } cache.put(host, json); } } return json; }
From source file:org.apache.hadoop.hbase.client.MetaCache.java
public void clearCache(final HRegionLocation location) { if (location == null) { return;//from ww w .j a v a 2s. c om } TableName tableName = location.getRegionInfo().getTable(); ConcurrentMap<byte[], RegionLocations> tableLocations = getTableLocations(tableName); RegionLocations regionLocations = tableLocations.get(location.getRegionInfo().getStartKey()); if (regionLocations != null) { RegionLocations updatedLocations = regionLocations.remove(location); boolean removed = false; if (updatedLocations != regionLocations) { if (updatedLocations.isEmpty()) { removed = tableLocations.remove(location.getRegionInfo().getStartKey(), regionLocations); } else { removed = tableLocations.replace(location.getRegionInfo().getStartKey(), regionLocations, updatedLocations); } if (removed && LOG.isTraceEnabled()) { LOG.trace("Removed " + location + " from cache"); } } } }
From source file:org.apache.hadoop.hbase.client.MetaCache.java
/** * Deletes the cached location of the region if necessary, based on some error from source. * @param hri The region in question./*from w w w . j ava2 s . c om*/ */ public void clearCache(HRegionInfo hri) { ConcurrentMap<byte[], RegionLocations> tableLocations = getTableLocations(hri.getTable()); RegionLocations regionLocations = tableLocations.get(hri.getStartKey()); if (regionLocations != null) { HRegionLocation oldLocation = regionLocations.getRegionLocation(hri.getReplicaId()); if (oldLocation == null) return; RegionLocations updatedLocations = regionLocations.remove(oldLocation); boolean removed = false; if (updatedLocations != regionLocations) { if (updatedLocations.isEmpty()) { removed = tableLocations.remove(hri.getStartKey(), regionLocations); } else { removed = tableLocations.replace(hri.getStartKey(), regionLocations, updatedLocations); } if (removed && LOG.isTraceEnabled()) { LOG.trace("Removed " + oldLocation + " from cache"); } } } }