List of usage examples for java.util.concurrent TimeUnit MICROSECONDS
TimeUnit MICROSECONDS
To view the source code for java.util.concurrent TimeUnit MICROSECONDS.
Click Source Link
From source file:io.druid.benchmark.FilteredAggregatorBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w. j a v a2 s .c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void querySingleQueryableIndex(Blackhole blackhole) throws Exception { final QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex", new QueryableIndexSegment("qIndex", qIndex)); List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query); for (Result<TimeseriesResultValue> result : results) { blackhole.consume(result); } }
From source file:io.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from w w w .java 2 s. c o m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public void timeFilterAll(Blackhole blackhole) throws Exception { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, timeFilterAll); Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole); List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList()) .get(0); for (Long st : strings) { blackhole.consume(st); } }
From source file:org.apache.druid.benchmark.query.SelectBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from w w w . jav a2s . co m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryIncrementalIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); String segmentId = "incIndex"; QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentId, new IncrementalIndexSegment(incIndexes.get(0), segmentId)); boolean done = false; while (!done) { List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy); SelectResultValue result = results.get(0).getValue(); if (result.getEvents().size() == 0) { done = true; } else { for (EventHolder eh : result.getEvents()) { blackhole.consume(eh); } queryCopy = incrementQueryPagination(queryCopy, result); } } }
From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w ww. j ava 2 s . c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithPostFilter(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, filter); readCursors(cursors, blackhole); }
From source file:com.linkedin.pinot.perf.BenchmarkOfflineIndexReader.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from w w w.ja va 2s .c o m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public int floatDictionary() { int length = _floatDictionary.length(); int ret = 0; for (int i = 0; i < NUM_ROUNDS; i++) { float value = _floatDictionary.getFloatValue(RANDOM.nextInt(length)); ret += _floatDictionary.indexOf(value); } return ret; }
From source file:eu.edisonproject.training.wsd.Wikidata.java
private Map<CharSequence, List<CharSequence>> getbroaderIDS(Set<Term> terms) throws MalformedURLException, InterruptedException, ExecutionException { Map<CharSequence, List<CharSequence>> map = new HashMap<>(); if (terms.size() > 0) { int maxT = 2; BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue(maxT); ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue); // ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 5000L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy()); Set<Future<Map<CharSequence, List<CharSequence>>>> set1 = new HashSet<>(); String prop = "P31"; for (Term t : terms) { URL url = new URL( PAGE + "?action=wbgetclaims&format=json&props=&property=" + prop + "&entity=" + t.getUid()); Logger.getLogger(Wikidata.class.getName()).log(Level.FINE, url.toString()); WikiRequestor req = new WikiRequestor(url, t.getUid().toString(), 1); Future<Map<CharSequence, List<CharSequence>>> future = pool.submit(req); set1.add(future);/* w w w . java 2s . com*/ } pool.shutdown(); for (Future<Map<CharSequence, List<CharSequence>>> future : set1) { while (!future.isDone()) { // Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet...."); Thread.currentThread().sleep(10); } Map<CharSequence, List<CharSequence>> c = future.get(); if (c != null) { map.putAll(c); } } } return map; }
From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w. ja va 2 s . c om @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithExFnPreFilter(Blackhole blackhole) { Filter filter = new SelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter(); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, filter); readCursors(cursors, blackhole); }
From source file:com.linkedin.pinot.perf.BenchmarkOfflineIndexReader.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from ww w. j a v a2 s . c o m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public int doubleDictionary() { int length = _doubleDictionary.length(); int ret = 0; for (int i = 0; i < NUM_ROUNDS; i++) { double value = _doubleDictionary.getDoubleValue(RANDOM.nextInt(length)); ret += _doubleDictionary.indexOf(value); } return ret; }
From source file:io.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from ww w.j a va 2 s .c om*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readWithPreFilter(Blackhole blackhole) throws Exception { Filter filter = new SelectorFilter("dimSequential", "199"); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, filter); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences .toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } }
From source file:io.druid.benchmark.query.SelectBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from w ww . j a v a 2s . co m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryQueryableIndex(Blackhole blackhole) throws Exception { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); String segmentId = "qIndex"; QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentId, new QueryableIndexSegment(segmentId, qIndexes.get(0))); boolean done = false; while (!done) { List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy); SelectResultValue result = results.get(0).getValue(); if (result.getEvents().size() == 0) { done = true; } else { for (EventHolder eh : result.getEvents()) { blackhole.consume(eh); } queryCopy = incrementQueryPagination(queryCopy, result); } } }