List of usage examples for java.util.concurrent ConcurrentHashMap keySet
KeySetView keySet
To view the source code for java.util.concurrent ConcurrentHashMap keySet.
Click Source Link
From source file:co.paralleluniverse.photon.Photon.java
public static void main(final String[] args) throws InterruptedException, IOException { final Options options = new Options(); options.addOption("rate", true, "Requests per second (default " + rateDefault + ")"); options.addOption("duration", true, "Minimum test duration in seconds: will wait for <duration> * <rate> requests to terminate or, if progress check enabled, no progress after <duration> (default " + durationDefault + ")"); options.addOption("maxconnections", true, "Maximum number of open connections (default " + maxConnectionsDefault + ")"); options.addOption("timeout", true, "Connection and read timeout in millis (default " + timeoutDefault + ")"); options.addOption("print", true, "Print cycle in millis, 0 to disable intermediate statistics (default " + printCycleDefault + ")"); options.addOption("check", true, "Progress check cycle in millis, 0 to disable progress check (default " + checkCycleDefault + ")"); options.addOption("stats", false, "Print full statistics when finish (default false)"); options.addOption("minmax", false, "Print min/mean/stddev/max stats when finish (default false)"); options.addOption("name", true, "Test name to print in the statistics (default '" + testNameDefault + "')"); options.addOption("help", false, "Print help"); try {//from ww w. j a va2 s . co m final CommandLine cmd = new BasicParser().parse(options, args); final String[] ar = cmd.getArgs(); if (cmd.hasOption("help") || ar.length != 1) printUsageAndExit(options); final String url = ar[0]; final int timeout = Integer.parseInt(cmd.getOptionValue("timeout", timeoutDefault)); final int maxConnections = Integer .parseInt(cmd.getOptionValue("maxconnections", maxConnectionsDefault)); final int duration = Integer.parseInt(cmd.getOptionValue("duration", durationDefault)); final int printCycle = Integer.parseInt(cmd.getOptionValue("print", printCycleDefault)); final int checkCycle = Integer.parseInt(cmd.getOptionValue("check", checkCycleDefault)); final String testName = cmd.getOptionValue("name", testNameDefault); final int rate = Integer.parseInt(cmd.getOptionValue("rate", rateDefault)); final MetricRegistry metrics = new MetricRegistry(); final Meter requestMeter = metrics.meter("request"); final Meter responseMeter = metrics.meter("response"); final Meter errorsMeter = metrics.meter("errors"); final Logger log = LoggerFactory.getLogger(Photon.class); final ConcurrentHashMap<String, AtomicInteger> errors = new ConcurrentHashMap<>(); final HttpGet request = new HttpGet(url); final StripedTimeSeries<Long> sts = new StripedTimeSeries<>(30000, false); final StripedHistogram sh = new StripedHistogram(60000, 5); log.info("name: " + testName + " url:" + url + " rate:" + rate + " duration:" + duration + " maxconnections:" + maxConnections + ", " + "timeout:" + timeout); final DefaultConnectingIOReactor ioreactor = new DefaultConnectingIOReactor(IOReactorConfig.custom() .setConnectTimeout(timeout).setIoThreadCount(10).setSoTimeout(timeout).build()); Runtime.getRuntime().addShutdownHook(new Thread(() -> { final List<ExceptionEvent> events = ioreactor.getAuditLog(); if (events != null) events.stream().filter(event -> event != null).forEach(event -> { System.err.println( "Apache Async HTTP Client I/O Reactor Error Time: " + event.getTimestamp()); //noinspection ThrowableResultOfMethodCallIgnored if (event.getCause() != null) //noinspection ThrowableResultOfMethodCallIgnored event.getCause().printStackTrace(); }); if (cmd.hasOption("stats")) printFinishStatistics(errorsMeter, sts, sh, testName); if (!errors.keySet().isEmpty()) errors.entrySet().stream() .forEach(p -> log.info(testName + " " + p.getKey() + " " + p.getValue() + "ms")); System.out.println( testName + " responseTime(90%): " + sh.getHistogramData().getValueAtPercentile(90) + "ms"); if (cmd.hasOption("minmax")) { final HistogramData hd = sh.getHistogramData(); System.out.format("%s %8s%8s%8s%8s\n", testName, "min", "mean", "sd", "max"); System.out.format("%s %8d%8.2f%8.2f%8d\n", testName, hd.getMinValue(), hd.getMean(), hd.getStdDeviation(), hd.getMaxValue()); } })); final PoolingNHttpClientConnectionManager mngr = new PoolingNHttpClientConnectionManager(ioreactor); mngr.setDefaultMaxPerRoute(maxConnections); mngr.setMaxTotal(maxConnections); final CloseableHttpAsyncClient ahc = HttpAsyncClientBuilder.create().setConnectionManager(mngr) .setDefaultRequestConfig(RequestConfig.custom().setLocalAddress(null).build()).build(); try (final CloseableHttpClient client = new FiberHttpClient(ahc)) { final int num = duration * rate; final CountDownLatch cdl = new CountDownLatch(num); final Semaphore sem = new Semaphore(maxConnections); final RateLimiter rl = RateLimiter.create(rate); spawnStatisticsThread(printCycle, cdl, log, requestMeter, responseMeter, errorsMeter, testName); for (int i = 0; i < num; i++) { rl.acquire(); if (sem.availablePermits() == 0) log.debug("Maximum connections count reached, waiting..."); sem.acquireUninterruptibly(); new Fiber<Void>(() -> { requestMeter.mark(); final long start = System.nanoTime(); try { try (final CloseableHttpResponse ignored = client.execute(request)) { responseMeter.mark(); } catch (final Throwable t) { markError(errorsMeter, errors, t); } } catch (final Throwable t) { markError(errorsMeter, errors, t); } finally { final long now = System.nanoTime(); final long millis = TimeUnit.NANOSECONDS.toMillis(now - start); sts.record(start, millis); sh.recordValue(millis); sem.release(); cdl.countDown(); } }).start(); } spawnProgressCheckThread(log, duration, checkCycle, cdl); cdl.await(); } } catch (final ParseException ex) { System.err.println("Parsing failed. Reason: " + ex.getMessage()); } }
From source file:Main.java
public static <T1, T2> void removeHashMapElementByHash(ConcurrentHashMap<T1, T2> target, int hashcode) { Iterator<T1> iter = target.keySet().iterator(); Object key = null;//ww w. ja v a2s.c o m while (iter.hasNext()) { key = iter.next(); if (key.hashCode() == hashcode) { target.remove(key); } } }
From source file:Main.java
public static <T1, T2> T2 getHashMapElementByHash(ConcurrentHashMap<T1, T2> target, int hashcode) { Iterator<T1> iter = target.keySet().iterator(); Object key = null;/* www . j a v a 2 s .c om*/ while (iter.hasNext()) { key = iter.next(); if (key.hashCode() == hashcode) { return target.get(key); } } return null; }
From source file:com.netflix.dyno.connectionpool.impl.MonitorConsoleResource.java
@SuppressWarnings("unchecked") @Path("/topology/{cpName}") @GET//from w w w . j a v a2s . co m @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.APPLICATION_JSON) public String getConnectionPoolToplogy(@PathParam("cpName") String cpName) { TokenPoolTopology topology = MonitorConsole.getInstance().getTopology(cpName); if (topology == null) { return "Not Found: " + cpName; } ConcurrentHashMap<String, List<TokenStatus>> map = topology.getAllTokens(); JSONObject json = new JSONObject(); for (String rack : map.keySet()) { List<TokenStatus> tokens = map.get(rack); json.put(rack, getTokenStatusMap(tokens)); } return json.toJSONString(); }
From source file:org.apache.vxquery.indexing.MetaFileUtilTest.java
/** * Validate the updated metadata.//from w w w . ja va 2s. c o m */ @Test public void step6_testVerifyMetadataChange() { ConcurrentHashMap<String, XmlMetadata> fromFile = metaFileUtil.getMetadata(); Set<String> from = fromFile.keySet(); Set<String> modified = modifiedMap.keySet(); Assert.assertTrue(from.containsAll(modified)); for (String key : modified) { Assert.assertEquals(TestConstants.getXMLMetadataString(modifiedMap.get(key)), TestConstants.getXMLMetadataString(fromFile.get(key))); } }
From source file:org.apache.vxquery.indexing.MetaFileUtilTest.java
/** * Validate the content of the file./*from ww w. j a v a 2 s . com*/ */ @Test public void step3_testValidateMetadataFile() { ConcurrentHashMap<String, XmlMetadata> fromFile = metaFileUtil.getMetadata(); Set<String> from = fromFile.keySet(); Set<String> initial = initialMap.keySet(); Assert.assertTrue(from.containsAll(initial)); for (String key : initial) { Assert.assertEquals(TestConstants.getXMLMetadataString(initialMap.get(key)), TestConstants.getXMLMetadataString(fromFile.get(key))); } }
From source file:org.apache.niolex.config.event.ConfigEventDispatcher.java
/** * Fire the specified event to all the listeners registered to this dispatcher. * @param groupName/*from w w w.j a v a 2s . c om*/ * @param data */ public void fireClientEvent(String groupName, PacketData data) { ConcurrentHashMap<IPacketWriter, String> queue = clients.get(groupName); if (queue != null) { for (IPacketWriter wt : queue.keySet()) { wt.handleWrite(data); } } }
From source file:com.redhat.red.offliner.ftest.SinglePlaintextDownloadNoChecksumFTest.java
/** * In general, we should only have one test method per functional test. This allows for the best parallelism when we * execute the tests, especially if the setup takes some time. * * @throws Exception In case anything (anything at all) goes wrong! *///from ww w . j a v a 2 s . co m @Test public void run() throws Exception { // We only need one repo server. TestRepositoryServer server = newRepositoryServer(); // Generate some test content String path = contentGenerator.newArtifactPath("jar"); byte[] content = contentGenerator.newBinaryContent(1024); // Register the generated content by writing it to the path within the repo server's dir structure. // This way when the path is requested it can be downloaded instead of returning a 404. server.registerContent(path, content); server.registerContent(path + Main.SHA_SUFFIX, sha1Hex(content)); server.registerContent(path + Main.MD5_SUFFIX, md5Hex(content)); // Write the plaintext file we'll use as input. File plaintextList = temporaryFolder.newFile("artifact-list." + getClass().getSimpleName() + ".txt"); String pathWithChecksum = contentGenerator.newPlaintextEntryWithoutChecksum(path); FileUtils.write(plaintextList, pathWithChecksum); Options opts = new Options(); opts.setBaseUrls(Collections.singletonList(server.getBaseUri())); // Capture the downloads here so we can verify the content. File downloads = temporaryFolder.newFolder(); opts.setDownloads(downloads); opts.setLocations(Collections.singletonList(plaintextList.getAbsolutePath())); opts.setConnections(1); // run `new Main(opts).run()` and return the Main instance so we can query it for errors, etc. Main finishedMain = run(opts); ConcurrentHashMap<String, Throwable> errors = finishedMain.getErrors(); System.out.printf("ERRORS:\n\n%s\n\n\n", StringUtils.join(errors.keySet().stream() .map(k -> "ERROR: " + k + ": " + errors.get(k).getMessage() + "\n " + StringUtils.join(errors.get(k).getStackTrace(), "\n ")) .collect(Collectors.toList()), "\n\n")); assertThat("Wrong number of downloads logged. Should have been 3 including checksums.", finishedMain.getDownloaded(), equalTo(3)); assertThat("Errors should be empty!", finishedMain.getErrors().isEmpty(), equalTo(true)); File downloaded = new File(downloads, path); assertThat("File: " + path + " doesn't seem to have been downloaded!", downloaded.exists(), equalTo(true)); assertThat("Downloaded file: " + path + " contains the wrong content!", FileUtils.readFileToByteArray(downloaded), equalTo(content)); }
From source file:org.bibsonomy.recommender.tags.multiplexer.RecommendedTagResultManager.java
/** * Returns ids of those recommenders which delivered tag for given query - if the query * is cached, otherwise null./*from w ww . java2 s . c o m*/ * * @param qid * @return */ public Set<Long> getActiveRecommender(Long qid) { if (isCached(qid)) { ConcurrentHashMap<Long, SortedSet<RecommendedTag>> queryStore = resultStore.get(qid); if (queryStore != null) return queryStore.keySet(); } return null; }
From source file:com.alibaba.wasp.meta.TableSchemaCacheReader.java
public List<Index> leftMatchIndexsByComposite(String tableName, String compositeName) { ConcurrentHashMap<String, List<Index>> tableIndexes = compositeIndex.get(tableName); for (String indexName : tableIndexes.keySet()) { if (indexName.startsWith(compositeName)) { return tableIndexes.get(indexName); }/*from ww w . ja v a 2 s . c o m*/ } return null; }