Example usage for java.util.concurrent TimeUnit MINUTES

List of usage examples for java.util.concurrent TimeUnit MINUTES

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MINUTES.

Prototype

TimeUnit MINUTES

To view the source code for java.util.concurrent TimeUnit MINUTES.

Click Source Link

Document

Time unit representing sixty seconds.

Usage

From source file:io.pcp.parfait.benchmark.StandardMetricThroughPutBenchmark.java

private List<CounterIncrementer> executeBenchmark(int numThreads, int iterations,
        List<MonitoredCounter> counters) throws InterruptedException {
    List<CounterIncrementer> counterIncrementers = newArrayList();
    for (int i = 0; i < numThreads; i++) {
        CounterIncrementer counterIncrementer = new CounterIncrementer(counters, iterations);
        counterIncrementers.add(counterIncrementer);
        executorService.execute(counterIncrementer);
    }/*from  w w w . j av a2s .c o  m*/

    executorService.shutdown();
    executorService.awaitTermination(1, TimeUnit.MINUTES);
    return counterIncrementers;
}

From source file:com.examples.cloud.speech.StreamingRecognizeClient.java

/** Send streaming recognize requests to server. */
public void recognize() throws InterruptedException, IOException {
    final CountDownLatch finishLatch = new CountDownLatch(1);
    StreamObserver<StreamingRecognizeResponse> responseObserver = new StreamObserver<StreamingRecognizeResponse>() {
        @Override//from   w  ww. ja v  a2s. c om
        public void onNext(StreamingRecognizeResponse response) {
            logger.info("Received response: " + TextFormat.printToString(response));
        }

        @Override
        public void onError(Throwable error) {
            logger.log(Level.WARN, "recognize failed: {0}", error);
            finishLatch.countDown();
        }

        @Override
        public void onCompleted() {
            logger.info("recognize completed.");
            finishLatch.countDown();
        }
    };

    StreamObserver<StreamingRecognizeRequest> requestObserver = speechClient
            .streamingRecognize(responseObserver);
    try {
        // Build and send a StreamingRecognizeRequest containing the parameters for
        // processing the audio.
        RecognitionConfig config = RecognitionConfig.newBuilder().setEncoding(AudioEncoding.LINEAR16)
                .setSampleRate(samplingRate).build();
        StreamingRecognitionConfig streamingConfig = StreamingRecognitionConfig.newBuilder().setConfig(config)
                .setInterimResults(true).setSingleUtterance(true).build();

        StreamingRecognizeRequest initial = StreamingRecognizeRequest.newBuilder()
                .setStreamingConfig(streamingConfig).build();
        requestObserver.onNext(initial);

        // Open audio file. Read and send sequential buffers of audio as additional RecognizeRequests.
        FileInputStream in = new FileInputStream(new File(file));
        // For LINEAR16 at 16000 Hz sample rate, 3200 bytes corresponds to 100 milliseconds of audio.
        byte[] buffer = new byte[BYTES_PER_BUFFER];
        int bytesRead;
        int totalBytes = 0;
        int samplesPerBuffer = BYTES_PER_BUFFER / BYTES_PER_SAMPLE;
        int samplesPerMillis = samplingRate / 1000;

        while ((bytesRead = in.read(buffer)) != -1) {
            totalBytes += bytesRead;
            StreamingRecognizeRequest request = StreamingRecognizeRequest.newBuilder()
                    .setAudioContent(ByteString.copyFrom(buffer, 0, bytesRead)).build();
            requestObserver.onNext(request);
            // To simulate real-time audio, sleep after sending each audio buffer.
            Thread.sleep(samplesPerBuffer / samplesPerMillis);
        }
        logger.info("Sent " + totalBytes + " bytes from audio file: " + file);
    } catch (RuntimeException e) {
        // Cancel RPC.
        requestObserver.onError(e);
        throw e;
    }
    // Mark the end of requests.
    requestObserver.onCompleted();

    // Receiving happens asynchronously.
    finishLatch.await(1, TimeUnit.MINUTES);
}

From source file:rmblworx.tools.timey.AlarmTest.java

/**
 * Test method for {@link Alarm#Alarm(IAlarmService, int, TimeUnit)}.
 *///from   ww  w .  j a  v  a2s .co m
@Test(expected = NullArgumentException.class)
public final void testShouldFailBecauseServiceIsNull() {
    this.effectiveDelegate = new Alarm(null, 1, TimeUnit.MINUTES);
}

From source file:com.ovalsearch.services.impl.StartupServiceImpl.java

private void loadNewExecutorServiceAndScheduleTasks() {
    ScheduledExecutorService executorService = Executors.newScheduledThreadPool(CacheManager.getInstance()
            .getCache(PropertyMapCache.class).getPropertyInteger(Property.SCHEDULER_THREAD_POOL_SIZE));
    taskScheduler.setExecutorService(executorService);
    LOG.info("Scheduling Task in newly created Executor Service");
    ScheduledFuture<?> futureObject = taskScheduler.getExecutorService().scheduleAtFixedRate(
            new DownloadDataThread(
                    CacheManager.getInstance().getCache(PropertyMapCache.class)
                            .getPropertyString(Property.FILENAME),
                    CacheManager.getInstance().getCache(PropertyMapCache.class)
                            .getPropertyString(Property.REMOTE_REPO),
                    entityDao, applicationsDas),
            getInitialDelayInMinutes(), CacheManager.getInstance().getCache(PropertyMapCache.class)
                    .getPropertyInteger(Property.FILE_DOWNLOAD_PERIOD) * 60,
            TimeUnit.MINUTES);
    taskScheduler.setScheduledTask(futureObject);
}

From source file:com.iselect.kernal.geo.service.GeographicServiceImpl.java

@Override
public List<Future> importGeos(List<CountryDto> countries) {
    ExecutorService pools = Executors.newFixedThreadPool(2);
    List<Future> results = new ArrayList<>(countries.size());
    for (CountryDto country : countries) {
        Future result = pools.submit(new GeographicCallable(country));
        results.add(result);/*from w w w. j a  va  2 s . co m*/
    }
    try {
        pools.awaitTermination(1, TimeUnit.MINUTES);
        pools.shutdown();
    } catch (InterruptedException ex) {
        Logger.getLogger(GeographicServiceImpl.class.getName()).log(Level.SEVERE, null, ex);
    }
    return results;
}

From source file:com.doctor.esper.reference_5_2_0.Chapter21Performance.java

/**
 * We recommend using multiple threads to send events into Esper. We provide a test class below. Our test class does not use a blocking queue and thread pool so as to avoid a point of contention.
 * // www.  ja va2  s  .  c o m
 * ???Esper??
 * 
 * 
 * We recommend using Java threads as test_testing_performance_with_multiple_threads() , or a blocking queue and thread pool with sendEvent() or alternatively we recommend configuring inbound threading if your application does not already employ threading. Esper provides the configuration
 * option to use engine-level queues and threadpools for inbound, outbound and internal executions. See Section 15.7.1, Advanced Threading? for more information.
 * 
 * ?http://www.espertech.com/esper/release-5.2.0/esper-reference/html_single/index.html#config-engine-threading
 * 
 * @throws InterruptedException
 */
@Test
public void test_testing_performance_with_multiple_threads() throws InterruptedException {
    int numEvents = 1000000;
    int numThreads = 3;
    Thread[] threads = new Thread[numThreads];
    CountDownLatch countDownLatch = new CountDownLatch(numThreads);

    int eventPerThreads = numEvents / numThreads;
    for (int i = 0; i < numThreads; i++) {
        threads[i] = new Thread(new MyRunnable(countDownLatch, eventPerThreads, esperTemplateBean));
    }

    Stopwatch stopwatch = Stopwatch.createStarted();
    for (int i = 0; i < numThreads; i++) {
        threads[i].start();
    }
    countDownLatch.await(10, TimeUnit.MINUTES);
    if (countDownLatch.getCount() > 0) {
        throw new RuntimeException("Failed to complete in 10 minute");
    }

    System.out.println("Took " + stopwatch.elapsed(TimeUnit.MILLISECONDS) + "milliseconds ");
}

From source file:com.ikon.util.ExecutionUtils.java

/**
 * Execute command line//from ww  w  .j  a  va  2  s.  c  o  m
 */
public static ExecutionResult runCmd(String cmd) throws SecurityException, InterruptedException, IOException {
    log.debug("runCmd({})", cmd);
    return runCmdImpl(cmd.split(" "), TimeUnit.MINUTES.toMillis(Config.SYSTEM_EXECUTION_TIMEOUT));
}

From source file:io.openmessaging.rocketmq.consumer.LocalMessageCache.java

@Override
public void startup() {
    this.cleanExpireMsgExecutors.scheduleAtFixedRate(new Runnable() {
        @Override/*from   w w w  .  jav  a  2s.c o m*/
        public void run() {
            cleanExpireMsg();
        }
    }, clientConfig.getRmqMessageConsumeTimeout(), clientConfig.getRmqMessageConsumeTimeout(),
            TimeUnit.MINUTES);
}

From source file:com.astexample.Recognize.java

/** Send streaming recognize requests to server. */
public void recognize() throws InterruptedException, IOException {
    final CountDownLatch finishLatch = new CountDownLatch(1);
    StreamObserver<RecognizeResponse> responseObserver = new StreamObserver<RecognizeResponse>() {
        @Override/*w  ww.j  ava2s .c om*/
        public void onNext(RecognizeResponse response) {
            logger.info("Received response: " + TextFormat.printToString(response));
        }

        @Override
        public void onError(Throwable error) {
            Status status = Status.fromThrowable(error);
            logger.log(Level.WARNING, "recognize failed: {0}", status);
            finishLatch.countDown();
        }

        @Override
        public void onCompleted() {
            logger.info("recognize completed.");
            finishLatch.countDown();
        }
    };

    StreamObserver<RecognizeRequest> requestObserver = stub.recognize(responseObserver);
    try {
        // Build and send a RecognizeRequest containing the parameters for processing the audio.
        InitialRecognizeRequest initial = InitialRecognizeRequest.newBuilder()
                .setEncoding(AudioEncoding.LINEAR16).setSampleRate(samplingRate).setInterimResults(true)
                .build();
        RecognizeRequest firstRequest = RecognizeRequest.newBuilder().setInitialRequest(initial).build();
        requestObserver.onNext(firstRequest);

        // Open audio file. Read and send sequential buffers of audio as additional RecognizeRequests.
        FileInputStream in = new FileInputStream(new File(file));
        // For LINEAR16 at 16000 Hz sample rate, 3200 bytes corresponds to 100 milliseconds of audio.
        byte[] buffer = new byte[3200];
        int bytesRead;
        int totalBytes = 0;
        while ((bytesRead = in.read(buffer)) != -1) {
            totalBytes += bytesRead;
            AudioRequest audio = AudioRequest.newBuilder().setContent(ByteString.copyFrom(buffer, 0, bytesRead))
                    .build();
            RecognizeRequest request = RecognizeRequest.newBuilder().setAudioRequest(audio).build();
            requestObserver.onNext(request);
            // To simulate real-time audio, sleep after sending each audio buffer.
            // For 16000 Hz sample rate, sleep 100 milliseconds.
            Thread.sleep(samplingRate / 160);
        }
        logger.info("Sent " + totalBytes + " bytes from audio file: " + file);
    } catch (RuntimeException e) {
        // Cancel RPC.
        requestObserver.onError(e);
        throw e;
    }
    // Mark the end of requests.
    requestObserver.onCompleted();

    // Receiving happens asynchronously.
    finishLatch.await(1, TimeUnit.MINUTES);
}

From source file:com.hpcloud.util.Duration.java

public long toMins() {
    return TimeUnit.MINUTES.convert(length, timeUnit);
}