Example usage for java.util.concurrent TimeUnit MICROSECONDS

List of usage examples for java.util.concurrent TimeUnit MICROSECONDS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MICROSECONDS.

Prototype

TimeUnit MICROSECONDS

To view the source code for java.util.concurrent TimeUnit MICROSECONDS.

Click Source Link

Document

Time unit representing one thousandth of a millisecond.

Usage

From source file:org.apache.druid.benchmark.GroupByTypeInterfaceBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//w w  w  .  j av  a 2s . co m
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexStringOnly(Blackhole blackhole) {
    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", queryableIndexes.get(0)));

    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);

    for (Row result : results) {
        blackhole.consume(result);
    }
}

From source file:org.apache.druid.benchmark.query.SearchBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//from   ww  w. j  a  v  a2s  .  c  om
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole) {
    final QueryRunner<Result<SearchResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", qIndexes.get(0)));

    List<Result<SearchResultValue>> results = SearchBenchmark.runQuery(factory, runner, query);
    List<SearchHit> hits = results.get(0).getValue().getValue();
    for (SearchHit hit : hits) {
        blackhole.consume(hit);
    }
}

From source file:com.vmware.photon.controller.model.adapters.azure.stats.AzureStatsService.java

private void getMetrics(AzureStatsDataHolder statsData)
        throws InvalidKeyException, URISyntaxException, StorageException {
    String storageAccountName = statsData.bootDisk.customProperties
            .get(AzureConstants.AZURE_STORAGE_ACCOUNT_NAME);
    String storageKey = statsData.bootDiskAuth.customProperties.get(AzureConstants.AZURE_STORAGE_ACCOUNT_KEY1);
    String storageConnectionString = String.format(STORAGE_CONNECTION_STRING, storageAccountName, storageKey);
    for (String metricName : METRIC_NAMES) {
        AzureMetricRequest request = new AzureMetricRequest();
        request.setStorageConnectionString(storageConnectionString);
        request.setTableName(statsData.tableName);
        request.setPartitionValue(statsData.partitionValue);
        long endTimeMicros = Utils.getNowMicrosUtc();
        Date timeStamp = new Date(TimeUnit.MICROSECONDS.toMillis(endTimeMicros)
                - TimeUnit.MINUTES.toMillis(AzureConstants.METRIC_COLLECTION_PERIOD));
        request.setTimestamp(timeStamp);
        request.setMetricName(metricName);
        AzureMetricsHandler handler = new AzureMetricsHandler(this, statsData);
        getMetricStatisticsAsync(request, handler);
    }/* w ww  .j a  v  a  2  s . c  o  m*/
}

From source file:io.druid.benchmark.GroupByTypeInterfaceBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/* w  ww .j  a v  a 2s  .  c  o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exception {
    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", queryableIndexes.get(0)));

    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);

    for (Row result : results) {
        blackhole.consume(result);
    }
}

From source file:io.druid.benchmark.query.SearchBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*from  w  ww  . j av a2  s  .c om*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        String segmentName = "qIndex" + i;
        final QueryRunner<Result<SearchResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory,
                segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));

    Sequence<Result<SearchResultValue>> queryResult = theRunner.run(QueryPlus.wrap(query),
            Maps.<String, Object>newHashMap());
    List<Result<SearchResultValue>> results = Sequences.toList(queryResult,
            Lists.<Result<SearchResultValue>>newArrayList());

    for (Result<SearchResultValue> result : results) {
        List<SearchHit> hits = result.getValue().getValue();
        for (SearchHit hit : hits) {
            blackhole.consume(hit);
        }
    }
}

From source file:org.apache.druid.benchmark.GroupByTypeInterfaceBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//from w ww  .j  a  va 2  s  . c  om
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexLongOnly(Blackhole blackhole) {
    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", queryableIndexes.get(0)));

    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);

    for (Row result : results) {
        blackhole.consume(result);
    }
}

From source file:io.druid.benchmark.GroupByTypeInterfaceBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*  w  ww  . j a  va2s.  com*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Exception {
    QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "qIndex",
            new QueryableIndexSegment("qIndex", queryableIndexes.get(0)));

    List<Row> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longQuery);

    for (Row result : results) {
        blackhole.consume(result);
    }
}

From source file:org.apache.druid.benchmark.query.SearchBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*  w w  w.j  av a 2  s  .c  om*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) {
    List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        String segmentName = "qIndex" + i;
        final QueryRunner<Result<SearchResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory,
                segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));

    Sequence<Result<SearchResultValue>> queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap());
    List<Result<SearchResultValue>> results = queryResult.toList();

    for (Result<SearchResultValue> result : results) {
        List<SearchHit> hits = result.getValue().getValue();
        for (SearchHit hit : hits) {
            blackhole.consume(hit);
        }
    }
}

From source file:org.apache.drill.exec.physical.impl.common.HashPartition.java

public void spillThisPartition() {
    if (tmpBatchesList.size() == 0) {
        return;//w  ww. j  a va  2 s.c om
    } // in case empty - nothing to spill
    logger.debug("HashJoin: Spilling partition {}, current cycle {}, part size {} batches", partitionNum,
            cycleNum, tmpBatchesList.size());

    // If this is the first spill for this partition, create an output stream
    if (writer == null) {
        final String side = processingOuter ? "outer" : "inner";
        final String suffix = cycleNum > 0 ? side + "_" + Integer.toString(cycleNum) : side;
        spillFile = spillSet.getNextSpillFile(suffix);

        try {
            writer = spillSet.writer(spillFile);
        } catch (IOException ioe) {
            throw UserException.resourceError(ioe).message("Hash Join failed to open spill file: " + spillFile)
                    .build(logger);
        }

        isSpilled = true;
    }

    partitionInMemorySize = 0L;
    numInMemoryRecords = 0L;
    inMemoryBatchStats.clear();

    while (tmpBatchesList.size() > 0) {
        VectorContainer vc = tmpBatchesList.remove(0);

        int numRecords = vc.getRecordCount();

        // set the value count for outgoing batch value vectors
        for (VectorWrapper<?> v : vc) {
            v.getValueVector().getMutator().setValueCount(numRecords);
        }

        WritableBatch wBatch = WritableBatch.getBatchNoHVWrap(numRecords, vc, false);
        try {
            writer.write(wBatch, null);
        } catch (IOException ioe) {
            throw UserException.dataWriteError(ioe)
                    .message("Hash Join failed to write to output file: " + spillFile).build(logger);
        } finally {
            wBatch.clear();
        }
        vc.zeroVectors();
        logger.trace("HASH JOIN: Took {} us to spill {} records", writer.time(TimeUnit.MICROSECONDS),
                numRecords);
    }
}

From source file:kieker.tools.traceAnalysis.TraceAnalysisTool.java

private static void addDecorators(final String[] decoratorNames,
        final AbstractDependencyGraphFilter<?> plugin) {
    if (decoratorNames == null) {
        return;// ww w  .j a  v  a2 s . co m
    }
    final List<String> decoratorList = Arrays.asList(decoratorNames);
    final Iterator<String> decoratorIterator = decoratorList.iterator();

    while (decoratorIterator.hasNext()) {
        final String currentDecoratorStr = decoratorIterator.next();
        if (Constants.RESPONSE_TIME_DECORATOR_FLAG_NS.equals(currentDecoratorStr)) {
            plugin.addDecorator(new ResponseTimeNodeDecorator(TimeUnit.NANOSECONDS));
            continue;
        } else if (Constants.RESPONSE_TIME_DECORATOR_FLAG_US.equals(currentDecoratorStr)) {
            plugin.addDecorator(new ResponseTimeNodeDecorator(TimeUnit.MICROSECONDS));
            continue;
        } else if (Constants.RESPONSE_TIME_DECORATOR_FLAG_MS.equals(currentDecoratorStr)) {
            plugin.addDecorator(new ResponseTimeNodeDecorator(TimeUnit.MILLISECONDS));
            continue;
        } else if (Constants.RESPONSE_TIME_DECORATOR_FLAG_S.equals(currentDecoratorStr)) {
            plugin.addDecorator(new ResponseTimeNodeDecorator(TimeUnit.SECONDS));
            continue;
        } else if (Constants.RESPONSE_TIME_COLORING_DECORATOR_FLAG.equals(currentDecoratorStr)) {
            // if decorator is responseColoring, next value should be the threshold
            final String thresholdStringStr = decoratorIterator.next();

            try {
                final int threshold = Integer.parseInt(thresholdStringStr);

                plugin.addDecorator(new ResponseTimeColorNodeDecorator(threshold));
            } catch (final NumberFormatException exc) {
                System.err.println(
                        "\nFailed to parse int value of property " + "threshold(ms) : " + thresholdStringStr); // NOPMD (System.out)
            }
        } else {
            LOG.warn("Unknown decoration name '" + currentDecoratorStr + "'.");
            return;
        }
    }
}