List of usage examples for com.google.common.util.concurrent RateLimiter create
public static RateLimiter create(double permitsPerSecond)
From source file:io.warp10.continuum.ThrottlingManager.java
public static void init() { if (initialized.get()) { return;// w w w . j av a2 s .c om } final Properties properties = WarpConfig.getProperties(); String rate = properties.getProperty(Configuration.THROTTLING_MANAGER_RATE_DEFAULT); if (null != rate) { DEFAULT_RATE_PRODUCER = Double.parseDouble(rate); } String mads = properties.getProperty(Configuration.THROTTLING_MANAGER_MADS_DEFAULT); if (null != mads) { DEFAULT_MADS_PRODUCER = Long.parseLong(mads); } String maxwait = properties.getProperty(Configuration.THROTTLING_MANAGER_MAXWAIT_DEFAULT); if (null != maxwait) { MAXWAIT_PER_DATAPOINT = Long.parseLong(maxwait); } else { MAXWAIT_PER_DATAPOINT = MAXWAIT_PER_DATAPOINT_DEFAULT; } // // Start the thread which will read the throttling configuration periodically // final String dir = properties.getProperty(Configuration.THROTTLING_MANAGER_DIR); final long now = System.currentTimeMillis(); final long rampup = Long.parseLong(properties.getProperty(Configuration.THROTTLING_MANAGER_RAMPUP, "0")); Thread t = new Thread() { // Set of files already read private Set<String> read = new HashSet<String>(); long delay = Long.parseLong(properties.getProperty(Configuration.THROTTLING_MANAGER_PERIOD, "60000")); @Override public void run() { while (true) { // // If manager was not enabled, sleed then continue the loop // if (!enabled) { try { Thread.sleep(100); } catch (InterruptedException ie) { } continue; } // // Open the directory // final File root = new File(dir); String[] files = root.list(new FilenameFilter() { @Override public boolean accept(File d, String name) { if (!d.equals(root)) { return false; } if (!name.endsWith(THROTTLING_MANAGER_SUFFIX)) { return false; } return true; } }); // Sort files in lexicographic order if (null == files) { files = new String[0]; } Arrays.sort(files); Set<String> newreads = new HashSet<String>(); for (String file : files) { if (read.contains(file)) { newreads.add(file); continue; } // // Read each line // try { BufferedReader br = new BufferedReader(new FileReader(new File(dir, file))); while (true) { String line = br.readLine(); if (null == line) { break; } line = line.trim(); if (line.startsWith("#")) { continue; } String[] tokens = line.split(":"); if (5 != tokens.length) { continue; } // Lines end with ':#' if (!"#".equals(tokens[4])) { continue; } String entity = tokens[0]; String mads = tokens[1]; String rate = tokens[2]; String estimator = tokens[3]; boolean isProducer = entity.charAt(0) != '+'; if (isProducer) { // Attempt to read UUID UUID uuid = UUID.fromString(entity); entity = uuid.toString().toLowerCase(); } else { // Remove leading '+' and decode application name which may be URL encoded entity = URLDecoder.decode(entity.substring(1), "UTF-8"); } if ("-".equals(estimator)) { // // Clear estimator, we also push an event with a GTS_DISTINCT set to 0 for the producer/app // We also remove the metric // if (isProducer) { synchronized (producerHLLPEstimators) { producerHLLPEstimators.remove(entity); Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_PRODUCER, entity); Sensision.clear( SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT, labels); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT, labels, 0); } } else { synchronized (applicationHLLPEstimators) { applicationHLLPEstimators.remove(entity); Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_APPLICATION, entity); Sensision.clear( SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT, labels); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT, labels, 0); } } } else if (!"".equals(estimator)) { byte[] ser = OrderPreservingBase64 .decode(estimator.getBytes(Charsets.US_ASCII)); HyperLogLogPlus hllp = HyperLogLogPlus.fromBytes(ser); // Force mode to 'NORMAL', SPARSE is too slow as it calls merge repeatdly hllp.toNormal(); // // Ignore estimator if it has expired // if (hllp.hasExpired()) { hllp = new HyperLogLogPlus(hllp.getP(), hllp.getPPrime()); hllp.toNormal(); hllp.setInitTime(0); } // Retrieve current estimator if (isProducer) { isProducer = true; HyperLogLogPlus old = producerHLLPEstimators.get(entity); // Merge estimators and replace with the result, keeping the most recent estimator as the base if (null == old || hllp.getInitTime() > old.getInitTime()) { if (null != old) { hllp.fuse(old); } synchronized (producerHLLPEstimators) { producerHLLPEstimators.put(entity, hllp); } } else { old.fuse(hllp); } } else { HyperLogLogPlus old = applicationHLLPEstimators.get(entity); // Merge estimators and replace with the result, keeping the most recent estimator as the base if (null == old || hllp.getInitTime() > old.getInitTime()) { if (null != old) { hllp.fuse(old); } synchronized (applicationHLLPEstimators) { applicationHLLPEstimators.put(entity, hllp); } } else { old.fuse(hllp); } } } if (!"".equals(mads)) { long limit = Long.parseLong(mads); // Adjust limit so we account for the error of the estimator limit = (long) Math.ceil(limit * toleranceRatio); Map<String, String> labels = new HashMap<String, String>(); if (isProducer) { producerMADSLimits.put(entity, limit); labels.put(SensisionConstants.SENSISION_LABEL_PRODUCER, entity); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_THROTTLING_GTS_LIMIT, labels, limit); } else { applicationMADSLimits.put(entity, limit); labels.put(SensisionConstants.SENSISION_LABEL_APPLICATION, entity); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_THROTTLING_GTS_LIMIT_PER_APP, labels, limit); } } if (!"".equals(rate)) { Map<String, String> labels = new HashMap<String, String>(); double rlimit = Double.parseDouble(rate); if (isProducer) { producerRateLimiters.put(entity, RateLimiter.create(Math.max(MINIMUM_RATE_LIMIT, rlimit))); labels.put(SensisionConstants.SENSISION_LABEL_PRODUCER, entity); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_THROTTLING_RATE_LIMIT, labels, rlimit); } else { applicationRateLimiters.put(entity, RateLimiter.create(Math.max(MINIMUM_RATE_LIMIT, rlimit))); labels.put(SensisionConstants.SENSISION_LABEL_APPLICATION, entity); Sensision.event( SensisionConstants.SENSISION_CLASS_CONTINUUM_THROTTLING_RATE_LIMIT_PER_APP, labels, rlimit); } } else { if (isProducer) { producerRateLimiters.remove(entity); } else { applicationRateLimiters.remove(entity); } } } br.close(); newreads.add(file); } catch (Exception e) { e.printStackTrace(); } } loaded = true; // // Replace the list of read files // read = newreads; // // Store events with the current versions of all estimators. // TSerializer serializer = new TSerializer(new TCompactProtocol.Factory()); if (System.currentTimeMillis() - now > rampup) { List<String> keys = new ArrayList<String>(); keys.addAll(producerHLLPEstimators.keySet()); for (String key : keys) { HyperLogLogPlus hllp = producerHLLPEstimators.get(key); if (null == hllp) { continue; } long initTime = hllp.getInitTime(); try { byte[] bytes = hllp.toBytes(); String encoded = new String(OrderPreservingBase64.encode(bytes), Charsets.US_ASCII); Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_PRODUCER, key); Sensision.set(SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT, labels, hllp.cardinality()); Sensision.event(0L, null, null, null, SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_ESTIMATOR, labels, encoded); } catch (IOException ioe) { // Ignore exception } } keys.clear(); keys.addAll(applicationHLLPEstimators.keySet()); for (String key : keys) { HyperLogLogPlus hllp = applicationHLLPEstimators.get(key); if (null == hllp) { continue; } long initTime = hllp.getInitTime(); try { byte[] bytes = hllp.toBytes(); String encoded = new String(OrderPreservingBase64.encode(bytes), Charsets.US_ASCII); Map<String, String> labels = new HashMap<String, String>(); labels.put(SensisionConstants.SENSISION_LABEL_APPLICATION, key); Sensision.set(SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_DISTINCT_PER_APP, labels, hllp.cardinality()); Sensision.event(0L, null, null, null, SensisionConstants.SENSISION_CLASS_CONTINUUM_GTS_ESTIMATOR_PER_APP, labels, encoded); } catch (IOException ioe) { // Ignore exception } } } try { Thread.sleep(delay); } catch (InterruptedException ie) { } } } }; t.setName("[ThrottlingManager]"); t.setDaemon(true); if (null != dir) { t.start(); } initialized.set(true); }