List of usage examples for java.util.concurrent TimeUnit MICROSECONDS
TimeUnit MICROSECONDS
To view the source code for java.util.concurrent TimeUnit MICROSECONDS.
Click Source Link
From source file:org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest.java
private void validate_g_V_repeat_both_modern_profile(TraversalMetrics traversalMetrics, boolean withRepeatUnrollStrategy) { traversalMetrics.toString(); // ensure no exceptions are thrown Metrics metrics = traversalMetrics.getMetrics(0); assertEquals(6, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue()); assertEquals(6, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue()); metrics = traversalMetrics.getMetrics(1); assertEquals(withRepeatUnrollStrategy ? 12 : 72, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue()); assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue()); if (!withRepeatUnrollStrategy) assertTrue("Count should be greater than traversers.", metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID) > metrics .getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue()); assertTrue("Percent duration should be positive.", (Double) metrics.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY) >= 0); assertTrue("Times should be positive.", metrics.getDuration(TimeUnit.MICROSECONDS) >= 0); // Test the nested global metrics of the repeat step if (!withRepeatUnrollStrategy) { final Metrics vertexStepNestedInRepeat = (Metrics) metrics.getNested().toArray()[0]; assertEquals(114, vertexStepNestedInRepeat.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue()); assertNotEquals(0, vertexStepNestedInRepeat.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue()); assertTrue("Count should be greater than traversers.", vertexStepNestedInRepeat.getCount(TraversalMetrics.ELEMENT_COUNT_ID) > vertexStepNestedInRepeat .getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue()); assertTrue("Times should be positive.", vertexStepNestedInRepeat.getDuration(TimeUnit.MICROSECONDS) >= 0); }//from w w w . j a va 2s. c om double totalPercentDuration = 0; for (Metrics m : traversalMetrics.getMetrics()) { totalPercentDuration += (Double) m.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY); } assertEquals(100, totalPercentDuration, 0.000001); }
From source file:io.druid.benchmark.query.SearchBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from ww w . j av a 2 s . c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception { QueryRunner<SearchHit> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex", new IncrementalIndexSegment(incIndexes.get(0), "incIndex")); List<Result<SearchResultValue>> results = SearchBenchmark.runQuery(factory, runner, query); List<SearchHit> hits = results.get(0).getValue().getValue(); for (SearchHit hit : hits) { blackhole.consume(hit); } }
From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w .j av a 2 s . c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilterCNF(Blackhole blackhole) { DimFilter dimFilter1 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter( Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null))))); DimFilter dimFilter2 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter( Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null))))); DimFilter dimFilter3 = new OrDimFilter(Arrays.asList(dimFilter1, dimFilter2, new AndDimFilter( Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null))))); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, Filters.convertToCNF(dimFilter3.toFilter())); readCursors(cursors, blackhole); }
From source file:io.druid.benchmark.query.GroupByBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w . ja v a 2 s . c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void querySingleQueryableIndex(Blackhole blackhole) throws Exception { QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex", new QueryableIndexSegment("qIndex", queryableIndexes.get(0))); List<Row> results = GroupByBenchmark.runQuery(factory, runner, query); for (Row result : results) { blackhole.consume(result); } }
From source file:org.apache.druid.benchmark.query.timecompare.TimeCompareBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)// ww w. j a v a 2 s . c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndexTopN(Blackhole blackhole) { Sequence<Result<TopNResultValue>> queryResult = topNRunner.run(QueryPlus.wrap(topNQuery), new HashMap<>()); List<Result<TopNResultValue>> results = queryResult.toList(); for (Result<TopNResultValue> result : results) { blackhole.consume(result); } }
From source file:org.apache.druid.benchmark.query.SearchBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w. j a v a 2 s . co m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner<SearchHit> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex", new IncrementalIndexSegment(incIndexes.get(0), "incIndex")); List<Result<SearchResultValue>> results = SearchBenchmark.runQuery(factory, runner, query); List<SearchHit> hits = results.get(0).getValue().getValue(); for (SearchHit hit : hits) { blackhole.consume(hit); } }
From source file:org.apache.druid.benchmark.query.timecompare.TimeCompareBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//ww w .j a v a2s . c om @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndexTimeseries(Blackhole blackhole) { Sequence<Result<TimeseriesResultValue>> queryResult = timeseriesRunner.run(QueryPlus.wrap(timeseriesQuery), new HashMap<>()); List<Result<TimeseriesResultValue>> results = queryResult.toList(); for (Result<TimeseriesResultValue> result : results) { blackhole.consume(result); } }
From source file:io.druid.benchmark.query.SearchBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from w w w . j a v a 2s. c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void querySingleQueryableIndex(Blackhole blackhole) throws Exception { final QueryRunner<Result<SearchResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex", new QueryableIndexSegment("qIndex", qIndexes.get(0))); List<Result<SearchResultValue>> results = SearchBenchmark.runQuery(factory, runner, query); List<SearchHit> hits = results.get(0).getValue().getValue(); for (SearchHit hit : hits) { blackhole.consume(hit); } }
From source file:io.druid.benchmark.query.GroupByBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)//from ww w.j av a 2 s .c o m @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception { List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList(); QueryToolChest toolChest = factory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName, new QueryableIndexSegment(segmentName, queryableIndexes.get(i))); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>( toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest)); Sequence<Row> queryResult = theRunner.run(query, Maps.<String, Object>newHashMap()); List<Row> results = Sequences.toList(queryResult, Lists.<Row>newArrayList()); for (Row result : results) { blackhole.consume(result); } }
From source file:io.druid.benchmark.FilterPartitionBenchmark.java
@Benchmark @BenchmarkMode(Mode.AverageTime)/*from ww w.j a v a 2s . co m*/ @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilter(Blackhole blackhole) throws Exception { DimFilter dimFilter1 = new OrDimFilter( Arrays.<DimFilter>asList(new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null))))); DimFilter dimFilter2 = new OrDimFilter( Arrays.<DimFilter>asList(new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null))))); DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.<DimFilter>asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null))))); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence<Cursor> cursors = makeCursors(sa, dimFilter3.toFilter()); Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole); List<String> strings = Sequences .toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0); for (String st : strings) { blackhole.consume(st); } }