List of usage examples for java.util.concurrent ConcurrentHashMap entrySet
EntrySetView entrySet
To view the source code for java.util.concurrent ConcurrentHashMap entrySet.
Click Source Link
From source file:co.paralleluniverse.photon.Photon.java
public static void main(final String[] args) throws InterruptedException, IOException { final Options options = new Options(); options.addOption("rate", true, "Requests per second (default " + rateDefault + ")"); options.addOption("duration", true, "Minimum test duration in seconds: will wait for <duration> * <rate> requests to terminate or, if progress check enabled, no progress after <duration> (default " + durationDefault + ")"); options.addOption("maxconnections", true, "Maximum number of open connections (default " + maxConnectionsDefault + ")"); options.addOption("timeout", true, "Connection and read timeout in millis (default " + timeoutDefault + ")"); options.addOption("print", true, "Print cycle in millis, 0 to disable intermediate statistics (default " + printCycleDefault + ")"); options.addOption("check", true, "Progress check cycle in millis, 0 to disable progress check (default " + checkCycleDefault + ")"); options.addOption("stats", false, "Print full statistics when finish (default false)"); options.addOption("minmax", false, "Print min/mean/stddev/max stats when finish (default false)"); options.addOption("name", true, "Test name to print in the statistics (default '" + testNameDefault + "')"); options.addOption("help", false, "Print help"); try {/*from w ww. j a va 2s . c om*/ final CommandLine cmd = new BasicParser().parse(options, args); final String[] ar = cmd.getArgs(); if (cmd.hasOption("help") || ar.length != 1) printUsageAndExit(options); final String url = ar[0]; final int timeout = Integer.parseInt(cmd.getOptionValue("timeout", timeoutDefault)); final int maxConnections = Integer .parseInt(cmd.getOptionValue("maxconnections", maxConnectionsDefault)); final int duration = Integer.parseInt(cmd.getOptionValue("duration", durationDefault)); final int printCycle = Integer.parseInt(cmd.getOptionValue("print", printCycleDefault)); final int checkCycle = Integer.parseInt(cmd.getOptionValue("check", checkCycleDefault)); final String testName = cmd.getOptionValue("name", testNameDefault); final int rate = Integer.parseInt(cmd.getOptionValue("rate", rateDefault)); final MetricRegistry metrics = new MetricRegistry(); final Meter requestMeter = metrics.meter("request"); final Meter responseMeter = metrics.meter("response"); final Meter errorsMeter = metrics.meter("errors"); final Logger log = LoggerFactory.getLogger(Photon.class); final ConcurrentHashMap<String, AtomicInteger> errors = new ConcurrentHashMap<>(); final HttpGet request = new HttpGet(url); final StripedTimeSeries<Long> sts = new StripedTimeSeries<>(30000, false); final StripedHistogram sh = new StripedHistogram(60000, 5); log.info("name: " + testName + " url:" + url + " rate:" + rate + " duration:" + duration + " maxconnections:" + maxConnections + ", " + "timeout:" + timeout); final DefaultConnectingIOReactor ioreactor = new DefaultConnectingIOReactor(IOReactorConfig.custom() .setConnectTimeout(timeout).setIoThreadCount(10).setSoTimeout(timeout).build()); Runtime.getRuntime().addShutdownHook(new Thread(() -> { final List<ExceptionEvent> events = ioreactor.getAuditLog(); if (events != null) events.stream().filter(event -> event != null).forEach(event -> { System.err.println( "Apache Async HTTP Client I/O Reactor Error Time: " + event.getTimestamp()); //noinspection ThrowableResultOfMethodCallIgnored if (event.getCause() != null) //noinspection ThrowableResultOfMethodCallIgnored event.getCause().printStackTrace(); }); if (cmd.hasOption("stats")) printFinishStatistics(errorsMeter, sts, sh, testName); if (!errors.keySet().isEmpty()) errors.entrySet().stream() .forEach(p -> log.info(testName + " " + p.getKey() + " " + p.getValue() + "ms")); System.out.println( testName + " responseTime(90%): " + sh.getHistogramData().getValueAtPercentile(90) + "ms"); if (cmd.hasOption("minmax")) { final HistogramData hd = sh.getHistogramData(); System.out.format("%s %8s%8s%8s%8s\n", testName, "min", "mean", "sd", "max"); System.out.format("%s %8d%8.2f%8.2f%8d\n", testName, hd.getMinValue(), hd.getMean(), hd.getStdDeviation(), hd.getMaxValue()); } })); final PoolingNHttpClientConnectionManager mngr = new PoolingNHttpClientConnectionManager(ioreactor); mngr.setDefaultMaxPerRoute(maxConnections); mngr.setMaxTotal(maxConnections); final CloseableHttpAsyncClient ahc = HttpAsyncClientBuilder.create().setConnectionManager(mngr) .setDefaultRequestConfig(RequestConfig.custom().setLocalAddress(null).build()).build(); try (final CloseableHttpClient client = new FiberHttpClient(ahc)) { final int num = duration * rate; final CountDownLatch cdl = new CountDownLatch(num); final Semaphore sem = new Semaphore(maxConnections); final RateLimiter rl = RateLimiter.create(rate); spawnStatisticsThread(printCycle, cdl, log, requestMeter, responseMeter, errorsMeter, testName); for (int i = 0; i < num; i++) { rl.acquire(); if (sem.availablePermits() == 0) log.debug("Maximum connections count reached, waiting..."); sem.acquireUninterruptibly(); new Fiber<Void>(() -> { requestMeter.mark(); final long start = System.nanoTime(); try { try (final CloseableHttpResponse ignored = client.execute(request)) { responseMeter.mark(); } catch (final Throwable t) { markError(errorsMeter, errors, t); } } catch (final Throwable t) { markError(errorsMeter, errors, t); } finally { final long now = System.nanoTime(); final long millis = TimeUnit.NANOSECONDS.toMillis(now - start); sts.record(start, millis); sh.recordValue(millis); sem.release(); cdl.countDown(); } }).start(); } spawnProgressCheckThread(log, duration, checkCycle, cdl); cdl.await(); } } catch (final ParseException ex) { System.err.println("Parsing failed. Reason: " + ex.getMessage()); } }
From source file:com.taobao.tddl.interact.monitor.TotalStatMonitor.java
/** * flush db counter to log//w w w . j a v a2 s .c o m * * @param map */ private static void flushDbTabLogMapToFile(ConcurrentHashMap<String, AtomicLong> map) { SimpleDateFormat df = new SimpleDateFormat("yyy-MM-dd HH:mm:ss:SSS"); String time = df.format(new Date()); for (Entry<String, AtomicLong> entry : map.entrySet()) { String key = entry.getKey(); StringBuilder sb = new StringBuilder().append(appName).append(logFieldSep).append(key) .append(logFieldSep).append(entry.getValue()).append(logFieldSep).append(time).append(linesep); TotalLogInit.DB_TAB_LOG.info(sb.toString()); } }
From source file:com.taobao.tddl.interact.monitor.TotalStatMonitor.java
/** * flush vslot counter to log/*from w w w .j a va 2 s . c o m*/ * * @param map */ private static void flushVSlotLogMapToFile(ConcurrentHashMap<String, AtomicLong> map) { SimpleDateFormat df = new SimpleDateFormat("yyy-MM-dd HH:mm:ss:SSS"); String time = df.format(new Date()); for (Entry<String, AtomicLong> entry : map.entrySet()) { String key = entry.getKey(); StringBuilder sb = new StringBuilder().append(appName).append(logFieldSep).append(key) .append(logFieldSep).append(entry.getValue()).append(logFieldSep).append(time).append(linesep); TotalLogInit.VSLOT_LOG.info(sb.toString()); } }
From source file:com.taobao.tddl.common.StatMonitor.java
/** * //w ww . ja v a2 s. co m */ @SuppressWarnings("unused") private static void writeLogMapToFile_Nagios(ConcurrentHashMap<String, Values> map) { for (Entry<String, Values> entry : map.entrySet()) { Values values = entry.getValue(); if (values != null) { String key = new StringBuilder(entry.getKey()).append("||").toString(); String value = new StringBuilder().append(values.value1).append("|").append(values.value2) .append("|").append(values.value2) .append((double) values.value1.get() / values.value2.get()).toString(); NagiosUtils.addNagiosLog(key, value); } } }
From source file:com.taobao.tddl.common.StatMonitor.java
/** * /*from w w w. j a va2 s . c o m*/ * SELECT xxx #@#my065037_cm4_feel_25#@#EXECUTE_A_SQL_SUCCESS#@#1#@#1#@#1#@#1#@#10-12-27 13:58:35:224 * SELECT sss #@#my065026_cm4_feel_03#@#EXECUTE_A_SQL_SUCCESS#@#1#@#1#@#1#@#1#@#10-12-27 13:58:35:224 */ private static void writeLogMapToFile(ConcurrentHashMap<String, Values> map) { String time = BufferedStatLogWriter.df.format(new Date()); for (Entry<String, Values> entry : map.entrySet()) { Values values = entry.getValue(); if (values != null) { String key = entry.getKey(); LoggerInit.TDDL_Snapshot_LOG .warn(new StringBuilder().append(values.value1).append(BufferedStatLogWriter.logFieldSep) .append(values.value2).append(BufferedStatLogWriter.logFieldSep).append(key) .append(BufferedStatLogWriter.logFieldSep).append(time) .append(BufferedStatLogWriter.linesep)); } } }
From source file:ubicrypt.core.dto.VClock.java
@Override public Object clone() throws CloneNotSupportedException { return new VClock(map.entrySet().stream().collect(ConcurrentHashMap<Integer, AtomicLong>::new, (ConcurrentHashMap<Integer, AtomicLong> map, Map.Entry<Integer, AtomicLong> entry) -> map .put(entry.getKey(), new AtomicLong(entry.getValue().longValue())), (ConcurrentHashMap<Integer, AtomicLong> map1, ConcurrentHashMap<Integer, AtomicLong> map2) -> map1 .putAll(map2)));/* w w w . ja v a 2s . c o m*/ }
From source file:net.yacy.kelondro.util.FileUtils.java
public static ConcurrentHashMap<String, byte[]> loadMapB(final File f) { ConcurrentHashMap<String, String> m = loadMap(f); if (m == null) return null; ConcurrentHashMap<String, byte[]> mb = new ConcurrentHashMap<String, byte[]>(); for (Map.Entry<String, String> e : m.entrySet()) mb.put(e.getKey(), UTF8.getBytes(e.getValue())); return mb;/*from w w w.jav a2 s . c o m*/ }
From source file:com.joyfulmongo.monitor.MonitorManager.java
private String getString(ConcurrentHashMap<String, Record> map) { String result = ""; List<String> keys = new ArrayList<String>(); Set<Entry<String, Record>> entries = map.entrySet(); Iterator<Entry<String, Record>> iter = entries.iterator(); while (iter.hasNext()) { keys.add(iter.next().getKey());// w w w .java 2s .c o m } Collections.sort(keys); for (String key : keys) { Record record = map.get(key); result += key + "," + record + "\n"; } if (result.length() == 0) { result = "No result"; } return result; }
From source file:org.apache.synapse.transport.amqp.ha.AMQPTransportReconnectHandler.java
public void run() { try {//from w ww .ja v a 2 s .c o m AMQPTransportHAEntry entry = blockedTasks.take(); if (entry != null) { Map<String, String> params = connectionFactoryManager .getConnectionFactory(entry.getConnectionFactoryName()).getParameters(); int count = 1; long retryDuration = initialReconnectDuration; while (true) { try { Thread.sleep(initialReconnectDuration); new AMQPTransportConnectionFactory(params, es); log.info("The reconnection attempt '" + count + "' was successful"); break; } catch (AMQPTransportException e) { retryDuration = (long) (retryDuration * reconnectionProgressionFactor); if (retryDuration > maxReconnectionDuration) { retryDuration = initialReconnectDuration; log.info("The retry duration exceeded the maximum reconnection duration." + " The retry duration is set to initial reconnection duration " + "value(" + initialReconnectDuration + "s)"); } log.info("The reconnection attempt number '" + count++ + "' failed. Next " + "re-try will be after '" + (retryDuration / 1000) + "' seconds"); try { Thread.sleep(retryDuration); } catch (InterruptedException ignore) { // we need to block } } } ConcurrentHashMap<String, AMQPTransportConnectionFactory> allFac = connectionFactoryManager .getAllFactories(); for (Map.Entry me : allFac.entrySet()) { String name = (String) me.getKey(); Map<String, String> param = ((AMQPTransportConnectionFactory) me.getValue()).getParameters(); connectionFactoryManager.removeConnectionFactory(name); connectionFactoryManager.addConnectionFactory(name, new AMQPTransportConnectionFactory(param, es)); log.info("A new connection factory was created for -> '" + name + "'"); } String conFacName = entry.getConnectionFactoryName(); AMQPTransportConnectionFactory cf = connectionFactoryManager.getConnectionFactory(conFacName); connectionMap.put(entry.getKey(), new AMQPTransportHABrokerEntry(cf.getChannel(), cf.getConnection())); entry.getLock().release(); while (!blockedTasks.isEmpty()) { entry = blockedTasks.take(); conFacName = entry.getConnectionFactoryName(); cf = connectionFactoryManager.getConnectionFactory(conFacName); connectionMap.put(entry.getKey(), new AMQPTransportHABrokerEntry(cf.getChannel(), cf.getConnection())); if (log.isDebugEnabled()) { log.info("The worker task with key '" + entry.getKey() + "' was combined with " + "a new connection factory"); } entry.getLock().release(); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (AMQPTransportException e) { log.error("High Availability handler just died!. It's time to reboot the system.", e); } }
From source file:org.apache.marmotta.ucuenca.wk.commons.function.SemanticDistance.java
private double mapEntry(ConcurrentHashMap<String, Double> result) { double r = 0; for (Map.Entry<String, Double> cc : result.entrySet()) { r = cc.getValue();/*from www .ja v a2s . c om*/ } return r; }