Example usage for java.util.concurrent TimeUnit MICROSECONDS

List of usage examples for java.util.concurrent TimeUnit MICROSECONDS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MICROSECONDS.

Prototype

TimeUnit MICROSECONDS

To view the source code for java.util.concurrent TimeUnit MICROSECONDS.

Click Source Link

Document

Time unit representing one thousandth of a millisecond.

Usage

From source file:org.apache.druid.benchmark.query.TopNBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*from ww  w.  j ava2 s.c om*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole) {
    QueryRunner<Result<TopNResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex",
            new IncrementalIndexSegment(incIndexes.get(0), "incIndex"));

    List<Result<TopNResultValue>> results = TopNBenchmark.runQuery(factory, runner, query);
    for (Result<TopNResultValue> result : results) {
        blackhole.consume(result);
    }
}

From source file:io.druid.benchmark.query.TopNBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/* w w  w .j av  a2 s  . co  m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole) throws Exception {
    final QueryRunner<Result<TopNResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", qIndexes.get(0)));

    List<Result<TopNResultValue>> results = TopNBenchmark.runQuery(factory, runner, query);
    for (Result<TopNResultValue> result : results) {
        blackhole.consume(result);
    }
}

From source file:org.openhie.openempi.configuration.Configuration.java

private TimeUnit getTimeUnit(org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.Enum timeUnit) {
    if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.DAYS) {
        return TimeUnit.DAYS;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.HOURS) {
        return TimeUnit.HOURS;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.MICROSECONDS) {
        return TimeUnit.MICROSECONDS;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.MILLISECONDS) {
        return TimeUnit.MILLISECONDS;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.MINUTES) {
        return TimeUnit.MINUTES;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.NANOSECONDS) {
        return TimeUnit.NANOSECONDS;
    } else if (timeUnit == org.openhie.openempi.configuration.xml.ScheduledTask.TimeUnit.SECONDS) {
        return TimeUnit.SECONDS;
    }/* w w  w.  j  av a 2s  . c  o m*/
    log.warn("An unknown time-unit was specified for a scheduled task of " + timeUnit
            + " so will resort to seconds as the default time unit.");
    return TimeUnit.SECONDS;
}

From source file:io.druid.benchmark.query.SelectBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*w w  w  . j  a v a2 s  .co  m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));

    String segmentName;
    List<QueryRunner<Result<SelectResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        segmentName = "qIndex" + i;
        QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName,
                new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));

    boolean done = false;
    while (!done) {
        Sequence<Result<SelectResultValue>> queryResult = theRunner.run(QueryPlus.wrap(queryCopy),
                Maps.newHashMap());
        List<Result<SelectResultValue>> results = Sequences.toList(queryResult,
                Lists.<Result<SelectResultValue>>newArrayList());

        SelectResultValue result = results.get(0).getValue();

        if (result.getEvents().size() == 0) {
            done = true;
        } else {
            for (EventHolder eh : result.getEvents()) {
                blackhole.consume(eh);
            }
            queryCopy = incrementQueryPagination(queryCopy, result);
        }
    }
}

From source file:io.druid.benchmark.FilterPartitionBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)// ww w.  jav a2 s . c  o  m
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readWithExFnPreFilter(Blackhole blackhole) throws Exception {
    Filter filter = new SelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter();

    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
    Sequence<Cursor> cursors = makeCursors(sa, filter);

    Sequence<List<String>> stringListSeq = readCursors(cursors, blackhole);
    List<String> strings = Sequences
            .toList(Sequences.limit(stringListSeq, 1), Lists.<List<String>>newArrayList()).get(0);
    for (String st : strings) {
        blackhole.consume(st);
    }
}

From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*from www. j a va 2s.com*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readOrFilter(Blackhole blackhole) {
    Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
    Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"),
            new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar")));
    Filter orFilter = new OrFilter(Arrays.asList(filter, filter2));

    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
    Sequence<Cursor> cursors = makeCursors(sa, orFilter);
    readCursors(cursors, blackhole);
}

From source file:org.apache.druid.benchmark.query.TopNBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//from  ww w.j  a v  a2s.c  o m
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole) {
    final QueryRunner<Result<TopNResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", qIndexes.get(0)));

    List<Result<TopNResultValue>> results = TopNBenchmark.runQuery(factory, runner, query);
    for (Result<TopNResultValue> result : results) {
        blackhole.consume(result);
    }
}

From source file:org.apache.druid.benchmark.query.SelectBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//w  ww  .j  av a 2  s.  c  o  m
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) {
    SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));

    String segmentName;
    List<QueryRunner<Result<SelectResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        segmentName = "qIndex" + i;
        QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName,
                new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));

    boolean done = false;
    while (!done) {
        Sequence<Result<SelectResultValue>> queryResult = theRunner.run(QueryPlus.wrap(queryCopy),
                Maps.newHashMap());
        List<Result<SelectResultValue>> results = queryResult.toList();

        SelectResultValue result = results.get(0).getValue();

        if (result.getEvents().size() == 0) {
            done = true;
        } else {
            for (EventHolder eh : result.getEvents()) {
                blackhole.consume(eh);
            }
            queryCopy = incrementQueryPagination(queryCopy, result);
        }
    }
}

From source file:io.druid.benchmark.query.TimeseriesBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*from  w  w w. java2 s .c  o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception {
    QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex",
            new IncrementalIndexSegment(incIndexes.get(0), "incIndex"));

    List<Result<TimeseriesResultValue>> results = TimeseriesBenchmark.runQuery(factory, runner, query);
    for (Result<TimeseriesResultValue> result : results) {
        blackhole.consume(result);
    }
}

From source file:io.druid.benchmark.query.TopNBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*  www  .jav  a  2  s  .  c  o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Result<TopNResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        String segmentName = "qIndex" + i;
        QueryRunner<Result<TopNResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName,
                new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));

    Sequence<Result<TopNResultValue>> queryResult = theRunner.run(QueryPlus.wrap(query),
            Maps.<String, Object>newHashMap());
    List<Result<TopNResultValue>> results = Sequences.toList(queryResult,
            Lists.<Result<TopNResultValue>>newArrayList());

    for (Result<TopNResultValue> result : results) {
        blackhole.consume(result);
    }
}