List of usage examples for java.util.concurrent ConcurrentHashMap put
public V put(K key, V value)
From source file:org.wso2.carbon.mediator.cache.CacheMediatorTest.java
/** * Test case for isValidCacheEntry() with no-cache and ETag headers. * * @throws AxisFault when exception happens on message context creation. *///from ww w . j a v a 2s.c om public void testIsValidResponseWithNoCache() throws AxisFault { CachableResponse cachedResponse = new CachableResponse(); MessageContext synCtx = createMessageContext(); ConcurrentHashMap<String, Object> headers = new ConcurrentHashMap<>(); headers.put(HttpHeaders.CACHE_CONTROL, CACHE_CONTROL_HEADER); headers.put(HttpHeaders.ETAG, "2046-64-77-50-35-75-11038-459-486126-71-58"); cachedResponse.setHeaderProperties(headers); assertEquals("no-cache or ETag header does not exist.", HttpCachingFilter.isValidCacheEntry(cachedResponse, synCtx), true); }
From source file:org.apache.hadoop.gateway.GatewayPortMappingDisableFeatureTest.java
/** * Creates a deployment of a gateway instance that all test methods will share. This method also creates a * registry of sorts for all of the services that will be used by the test methods. * The createTopology method is used to create the topology file that would normally be read from disk. * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. * <p/>//from w w w. j a v a 2s.co m * This would normally be done once for this suite but the failure tests start affecting each other depending * on the state the last 'active' url * * @throws Exception Thrown if any failure occurs. */ @Before public void setup() throws Exception { LOG_ENTER(); eeriePort = getAvailablePort(1240, 49151); ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>(); topologyPortMapping.put("eerie", eeriePort); masterServer = new MockServer("master", true); GatewayTestConfig config = new GatewayTestConfig(); config.setGatewayPath("gateway"); config.setTopologyPortMapping(topologyPortMapping); // disable the feature config.setGatewayPortMappingEnabled(false); driver.setResourceBase(WebHdfsHaFuncTest.class); driver.setupLdap(0); driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES); driver.setupGateway(config, "eerie", createTopology("WEBHDFS"), USE_GATEWAY); LOG_EXIT(); }
From source file:org.opendaylight.controller.routing.dijkstrav2_implementation.internal.DijkstraImplementation.java
private static boolean updateTopo(Edge edge, Short bw, UpdateType type, ConcurrentMap<Short, Graph<Node, Edge>> topologyBWAware, ConcurrentHashMap<Short, DijkstraShortestPath<Node, Edge>> sptBWAware) { Short baseBW = Short.valueOf((short) 0); Graph<Node, Edge> topo = topologyBWAware.get(baseBW); DijkstraShortestPath<Node, Edge> spt = sptBWAware.get(baseBW); boolean edgePresentInGraph = false; if (topo == null) { // Create topology for this BW Graph<Node, Edge> g = new SparseMultigraph(); topologyBWAware.put(bw, g);/*from www .java 2 s .c om*/ topo = topologyBWAware.get(bw); sptBWAware.put(bw, new DijkstraShortestPath(g)); spt = sptBWAware.get(bw); } if (topo != null) { NodeConnector src = edge.getTailNodeConnector(); NodeConnector dst = edge.getHeadNodeConnector(); if (spt == null) { spt = new DijkstraShortestPath(topo); sptBWAware.put(bw, spt); } switch (type) { case ADDED: // Make sure the vertex are there before adding the edge topo.addVertex(src.getNode()); topo.addVertex(dst.getNode()); // Add the link between edgePresentInGraph = topo.containsEdge(edge); if (edgePresentInGraph == false) { try { topo.addEdge(new Edge(src, dst), src.getNode(), dst.getNode(), EdgeType.DIRECTED); } catch (final ConstructionException e) { log.error("", e); return edgePresentInGraph; } } case CHANGED: // Mainly raised only on properties update, so not really useful // in this case break; case REMOVED: // Remove the edge try { topo.removeEdge(new Edge(src, dst)); } catch (final ConstructionException e) { log.error("", e); return edgePresentInGraph; } // If the src and dst vertex don't have incoming or // outgoing links we can get ride of them if (topo.containsVertex(src.getNode()) && (topo.inDegree(src.getNode()) == 0) && (topo.outDegree(src.getNode()) == 0)) { log.debug("Removing vertex {}", src); topo.removeVertex(src.getNode()); } if (topo.containsVertex(dst.getNode()) && (topo.inDegree(dst.getNode()) == 0) && (topo.outDegree(dst.getNode()) == 0)) { log.debug("Removing vertex {}", dst); topo.removeVertex(dst.getNode()); } break; } spt.reset(); if (bw.equals(baseBW)) { //TODO: for now this doesn't work // clearMaxThroughput(); } } else { log.error("Cannot find topology for BW {} this is unexpected!", bw); } return edgePresentInGraph; }
From source file:org.wso2.carbon.mediator.cache.CacheMediatorTest.java
/** * Test case for isValidCacheEntry() with an expired cache. * * @throws AxisFault when exception happens on message context creation. *//* ww w . j ava 2s .c o m*/ public void testIsValidResponseWithExpiredCache() throws AxisFault { CachableResponse cachedResponse = new CachableResponse(); MessageContext synCtx = createMessageContext(); ConcurrentHashMap<String, Object> httpHeaders = new ConcurrentHashMap<>(); httpHeaders.put(HttpHeaders.CACHE_CONTROL, CACHE_CONTROL_HEADER); cachedResponse.setHeaderProperties(httpHeaders); //Set the response fetched time with an old date time. cachedResponse.setResponseFetchedTime(System.currentTimeMillis() - 100000); assertEquals("Cached response does not expired.", HttpCachingFilter.isValidCacheEntry(cachedResponse, synCtx), true); }
From source file:org.wso2.carbon.mediator.cache.CacheMediatorTest.java
/** * Test case for isValidCacheEntry() with a valid cache response. * * @throws AxisFault when exception happens on message context creation. *//*from w w w .ja v a 2 s. c o m*/ public void testIsValidResponseWithValidCache() throws AxisFault, ParseException { CachableResponse cachedResponse = new CachableResponse(); MessageContext synCtx = createMessageContext(); Map<String, String> headers = new HashMap<>(); ConcurrentHashMap<String, Object> httpHeaders = new ConcurrentHashMap<>(); httpHeaders.put(HttpHeaders.CACHE_CONTROL, CACHE_CONTROL_HEADER); cachedResponse.setHeaderProperties(httpHeaders); HttpCachingFilter.setResponseCachedTime(headers, cachedResponse); assertEquals("Cached response is expired.", HttpCachingFilter.isValidCacheEntry(cachedResponse, synCtx), false); }
From source file:org.apache.marmotta.ucuenca.wk.commons.function.TranslateForSemanticDistance.java
private String traductorYandex(String palabras) throws UnsupportedEncodingException, SQLException, IOException { String url = "https://translate.yandex.net/api/v1.5/tr.json/translate"; //String url = "https://translate.yandex.net/api/v1.5/tr.json/translate?key=trnsl.1.1.20160321T160516Z.43cfb95e23a69315.6c0a2ae19f56388c134615f4740fbb1d400f15d3&lang=en&text=" + URLEncoder.encode(palabras, "UTF-8"); ConcurrentHashMap<String, String> mp = new ConcurrentHashMap<>(); mp.put("key", "trnsl.1.1.20160321T160516Z.43cfb95e23a69315.6c0a2ae19f56388c134615f4740fbb1d400f15d3"); mp.put("lang", "en"); mp.put("text", palabras); mp.put("options", "1"); boolean c = true; int i = 0;/*from www.jav a 2 s . c om*/ int maxqueries = 10; do { try { i++; if (i == maxqueries) { c = false; } String http = http2(url, mp); String res = http; JsonParser parser = new JsonParser(); JsonObject parse = parser.parse(res).getAsJsonObject(); JsonArray asArray = parse.get("text").getAsJsonArray(); res = asArray.get(0).getAsString(); palabras = res; c = false; } catch (Exception e) { e.printStackTrace(new PrintStream(System.out)); try { Thread.sleep(1000); } catch (InterruptedException ex) { Logger.getLogger(SemanticDistance.class.getName()).log(Level.SEVERE, null, ex); } } //System.out.println("In2"); } while (c); return palabras; }
From source file:org.bibsonomy.recommender.tags.multiplexer.RecommendedTagResultManager.java
/** * cache result for given query - if this query is still active * ONLY NON-EMPTY RESULTS ARE STORED//from w ww. ja v a 2s. c o m * @param qid */ public void addResult(Long qid, Long sid, SortedSet<RecommendedTag> result) { if (isActive(qid)) { ConcurrentHashMap<Long, SortedSet<RecommendedTag>> queryStore = resultStore.get(qid); if ((queryStore != null) && (result != null && result.size() > 0)) queryStore.put(sid, result); } }
From source file:org.apache.hadoop.gateway.GatewayPortMappingFuncTest.java
/** * Creates a deployment of a gateway instance that all test methods will share. This method also creates a * registry of sorts for all of the services that will be used by the test methods. * The createTopology method is used to create the topology file that would normally be read from disk. * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. * <p/>/* www . ja va 2 s . com*/ * This would normally be done once for this suite but the failure tests start affecting each other depending * on the state the last 'active' url * * @throws Exception Thrown if any failure occurs. */ @Before public void setup() throws Exception { LOG_ENTER(); eeriePort = getAvailablePort(1240, 49151); ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>(); topologyPortMapping.put("eerie", eeriePort); masterServer = new MockServer("master", true); GatewayTestConfig config = new GatewayTestConfig(); config.setGatewayPath("gateway"); config.setTopologyPortMapping(topologyPortMapping); driver.setResourceBase(WebHdfsHaFuncTest.class); driver.setupLdap(0); driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES); driver.setupGateway(config, "eerie", createTopology("WEBHDFS"), USE_GATEWAY); LOG_EXIT(); }
From source file:org.getobjects.ofs.fs.OFSFileManager.java
public void cacheObject(String _section, IOFSFileInfo _info, Object _object) { if (_section == null || _info == null) return;//from w ww .j a v a 2 s .co m ConcurrentHashMap<IOFSFileInfo, Object> sectionCache = this.cacheForSection(_section); CacheNode entry = new CacheNode(this.etagFromFileInfo(_info), _object); sectionCache.put(_info, entry); }
From source file:org.jasig.cas.services.DefaultServicesManagerImpl.java
private void load() { final ConcurrentHashMap<Long, RegisteredService> localServices = new ConcurrentHashMap<Long, RegisteredService>(); for (final RegisteredService r : this.serviceRegistryDao.load()) { log.debug("Adding registered service " + r.getServiceId()); localServices.put(r.getId(), r); }/* ww w . j a va2 s .c om*/ this.services = localServices; log.info(String.format("Loaded %s services.", this.services.size())); }