Example usage for java.util.concurrent TimeUnit MICROSECONDS

List of usage examples for java.util.concurrent TimeUnit MICROSECONDS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MICROSECONDS.

Prototype

TimeUnit MICROSECONDS

To view the source code for java.util.concurrent TimeUnit MICROSECONDS.

Click Source Link

Document

Time unit representing one thousandth of a millisecond.

Usage

From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*  ww w . j a  v  a  2 s .  com*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void timeFilterNone(Blackhole blackhole) {
    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
    Sequence<Cursor> cursors = makeCursors(sa, timeFilterNone);

    readCursorsLong(cursors, blackhole);
}

From source file:io.druid.benchmark.FilterPartitionBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*from w w w  .ja  va 2s  .c  o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void timeFilterNone(Blackhole blackhole) throws Exception {
    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
    Sequence<Cursor> cursors = makeCursors(sa, timeFilterNone);

    Sequence<List<Long>> longListSeq = readCursorsLong(cursors, blackhole);
    List<Long> strings = Sequences.toList(Sequences.limit(longListSeq, 1), Lists.<List<Long>>newArrayList())
            .get(0);
    for (Long st : strings) {
        blackhole.consume(st);
    }
}

From source file:com.linkedin.pinot.perf.BenchmarkOfflineIndexReader.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//from ww  w  .  ja  va2 s . co  m
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public int fixedBitMultiValueReaderRandom() {
    FixedBitMultiValueReader.Context context = _fixedBitMultiValueReader.createContext();
    int ret = 0;
    for (int i = 0; i < _numDocs; i++) {
        ret += _fixedBitMultiValueReader.getIntArray(RANDOM.nextInt(_numDocs), _buffer, context);
    }
    return ret;
}

From source file:com.doctor.esper.reference_5_2_0.Chapter21Performance.java

/**
 * 21.2.35. Query Planning Index Hints//  w w  w .  j a v  a2s.c  o  m
 * 
 * @see http://www.espertech.com/esper/release-5.2.0/esper-reference/html_single/index.html#perf-tips-25b
 * 
 *      ? @Hint('index(httpLogWinLength100ForIndex, bust)')bustMultiple indexes can be listed separated by comma (,).
 * 
 */
@Test
public void test_Query_Planning() {
    HttpLog httpLog = new HttpLog(1, UUID.randomUUID().toString(), "www.baidu.com/tieba", "www.baidu.com",
            "userAgent", LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);
    httpLog = new HttpLog(2, UUID.randomUUID().toString(), "www.baidu.com/tieba", "www.baidu.com", "userAgent",
            LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);
    httpLog = new HttpLog(3, UUID.randomUUID().toString(), "www.baidu.com/tieba_son", "www.baidu.com",
            "userAgent", LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);
    httpLog = new HttpLog(4, UUID.randomUUID().toString(), "www.baidu.com/tieba", "www.baidu.com", "userAgent",
            LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);
    httpLog = new HttpLog(6, UUID.randomUUID().toString(), "www.baidu.com/tieba", "www.baidu.com", "userAgent",
            LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);
    httpLog = new HttpLog(8, UUID.randomUUID().toString(), "www.baidu.com/tieba_son", "www.baidu.com",
            "userAgent", LocalDateTime.now());
    esperTemplateBean.sendEvent(httpLog);

    Stopwatch stopwatch = Stopwatch.createStarted();
    List<HttpLog> list = httpLogWinLength100Query
            .prepareQueryWithParameters(Chapter21Performance::httpLogMapRow, "www.baidu.com/tieba_son", 2, 10);
    System.out.println(stopwatch.elapsed(TimeUnit.MICROSECONDS));
    stopwatch.stop();
    assertThat(list.size(), equalTo(2));
    System.out.println(list);
}

From source file:org.apache.druid.benchmark.FilteredAggregatorBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*www .ja v a 2s  .c  o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void ingest(Blackhole blackhole) throws Exception {
    incIndexFilteredAgg = makeIncIndex(filteredMetrics);
    for (InputRow row : inputRows) {
        int rv = incIndexFilteredAgg.add(row).getRowCount();
        blackhole.consume(rv);
    }
}

From source file:io.druid.benchmark.FilteredAggregatorBenchmark.java

@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void ingest(Blackhole blackhole) throws Exception {
    incIndexFilteredAgg = makeIncIndex(filteredMetrics);
    for (InputRow row : inputRows) {
        int rv = incIndexFilteredAgg.add(row);
        blackhole.consume(rv);//from   w  ww .  ja v  a 2s. co m
    }
}

From source file:org.apache.druid.benchmark.FilterPartitionBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)/*  ww w.  ja v a2  s  .c o m*/
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void timeFilterHalf(Blackhole blackhole) {
    StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
    Sequence<Cursor> cursors = makeCursors(sa, timeFilterHalf);

    readCursorsLong(cursors, blackhole);
}

From source file:com.discover.cls.processors.cls.GeoEnrichEnriched.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from  w  w  w.ja  v a  2  s. c o  m*/
    if (flowFile == null) {
        return;
    }

    final DatabaseReader dbReader = databaseReaderRef.get();
    final String ipAttributeName = context.getProperty(IP_ADDRESS_ATTRIBUTE)
            .evaluateAttributeExpressions(flowFile).getValue();
    final String ipAttributeValue = flowFile.getAttribute(ipAttributeName);
    if (StringUtils.isEmpty(ipAttributeName)) { //TODO need to add additional validation - should look like an IPv4 or IPv6 addr for instance
        session.transfer(flowFile, REL_NOT_FOUND);
        getLogger().warn("Unable to find ip address for {}", new Object[] { flowFile });
        return;
    }
    InetAddress inetAddress = null;
    CityResponse response = null;

    try {
        inetAddress = InetAddress.getByName(ipAttributeValue);
    } catch (final IOException ioe) {
        session.transfer(flowFile, REL_NOT_FOUND);
        getLogger().warn("Could not resolve {} to ip address for {}",
                new Object[] { ipAttributeValue, flowFile }, ioe);
        return;
    }
    final StopWatch stopWatch = new StopWatch(true);
    try {
        response = dbReader.city(inetAddress);
        stopWatch.stop();
    } catch (final IOException | GeoIp2Exception ex) {
        session.transfer(flowFile, REL_NOT_FOUND);
        getLogger().warn("Failure while trying to find enrichment data for {} due to {}",
                new Object[] { flowFile, ex }, ex);
        return;
    }

    if (response == null) {
        session.transfer(flowFile, REL_NOT_FOUND);
        return;
    }

    final Map<String, String> attrs = new HashMap<>();
    attrs.put(new StringBuilder(ipAttributeName).append(".geo.lookup.micros").toString(),
            String.valueOf(stopWatch.getDuration(TimeUnit.MICROSECONDS)));
    attrs.put(new StringBuilder(ipAttributeName).append(".geo.city").toString(), response.getCity().getName());

    final Double latitude = response.getLocation().getLatitude();
    if (latitude != null) {
        attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), latitude.toString());
    }

    final Double longitude = response.getLocation().getLongitude();
    if (longitude != null) {
        attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), longitude.toString());
    }

    int i = 0;
    for (final Subdivision subd : response.getSubdivisions()) {
        attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.").append(i).toString(),
                subd.getName());
        attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.isocode.").append(i).toString(),
                subd.getIsoCode());
        i++;
    }
    attrs.put(new StringBuilder(ipAttributeName).append(".geo.country").toString(),
            response.getCountry().getName());
    attrs.put(new StringBuilder(ipAttributeName).append(".geo.country.isocode").toString(),
            response.getCountry().getIsoCode());
    attrs.put(new StringBuilder(ipAttributeName).append(".geo.postalcode").toString(),
            response.getPostal().getCode());
    flowFile = session.putAllAttributes(flowFile, attrs);

    session.transfer(flowFile, REL_FOUND);
}

From source file:org.apache.druid.benchmark.FilteredAggregatorBenchmark.java

@Benchmark
@BenchmarkMode(Mode.AverageTime)//w  w  w .  ja v a2 s. c  om
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole) {
    QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, "incIndex",
            new IncrementalIndexSegment(incIndex, "incIndex"));

    List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query);
    for (Result<TimeseriesResultValue> result : results) {
        blackhole.consume(result);
    }
}

From source file:com.jbombardier.reports.ReportGenerator.java

private static String formatTime(double valueInNanoseconds, TimeUnit reportTimeUnits) {
    double scaled = valueInNanoseconds;

    if (Double.isNaN(scaled)) {
        return "-";
    } else {/* w  ww  .j  av  a2 s .  co m*/
        if (reportTimeUnits == TimeUnit.MICROSECONDS) {
            scaled *= 1e-3;
        } else if (reportTimeUnits == TimeUnit.MILLISECONDS) {
            scaled *= 1e-6;
        } else if (reportTimeUnits == TimeUnit.SECONDS) {
            scaled *= 1e-9;
        }

        return numberFormat.format(scaled);
    }
}