Example usage for java.util.concurrent Callable Callable

List of usage examples for java.util.concurrent Callable Callable

Introduction

In this page you can find the example usage for java.util.concurrent Callable Callable.

Prototype

Callable

Source Link

Usage

From source file:com.googlecode.android_scripting.facade.BluetoothFacade.java

public BluetoothFacade(FacadeManager manager) {
    super(manager);
    mAndroidFacade = manager.getReceiver(AndroidFacade.class);
    mBluetoothAdapter = MainThread.run(manager.getService(), new Callable<BluetoothAdapter>() {
        @Override//from   w  ww .j  a v  a  2 s  .  c om
        public BluetoothAdapter call() throws Exception {
            return BluetoothAdapter.getDefaultAdapter();
        }
    });
}

From source file:de.langmi.spring.batch.examples.readers.file.flatfileitemreader.FlatFileItemReaderDoInStepscopeTest.java

/**
 * Test should read succesfully./*  w w  w  .  j  a va2 s.c o  m*/
 *
 * @throws Exception 
 */
@Test
public void testSuccessfulReading() throws Exception {
    // setup
    Map<String, JobParameter> jobParametersMap = new HashMap<String, JobParameter>();
    jobParametersMap.put("time", new JobParameter(System.currentTimeMillis()));
    jobParametersMap.put("input.file", new JobParameter("file:src/test/resources/input/file/simple/input.txt"));
    StepExecution execution = MetaDataInstanceFactory.createStepExecution(new JobParameters(jobParametersMap));

    int count = StepScopeTestUtils.doInStepScope(execution, new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {

            int count = 0;

            itemReaderStream.open(new ExecutionContext());

            String line;
            try {
                while ((line = itemReaderStream.read()) != null) {
                    assertEquals(String.valueOf(count), line);
                    count++;
                }
            } finally {
                itemReaderStream.close();
            }
            return count;

        }
    });
    assertEquals(EXPECTED_COUNT, count);
}

From source file:com.eucalyptus.util.metrics.ThruputMetrics.java

/**
 * Adds start time for monitored action that will be finished later.
 * If the same action was already recorded as ended due to asynchronous communication,
 * adds its execution time as a new data point.
 *//*ww  w.ja  v a 2s . c o m*/
public static void startOperation(final MonitoredAction action, final String id, final long startTime) {
    Threads.enqueue(Eucalyptus.class, ThruputMetrics.class, new Callable<Boolean>() {
        @Override
        public Boolean call() throws Exception {
            String startKey = operationKey(action, id, true);
            String endKey = operationKey(action, id, false);
            Long endTime = (Long) paritalOperations.get(endKey);
            if (endTime != null) {
                paritalOperations.remove(endKey);
                if (endTime - startTime > 0)
                    addDataPointNoThread(action, endTime - startTime);
            } else {
                paritalOperations.put(startKey, startTime);
            }
            return true;
        }
    });
}

From source file:com.link_intersystems.lang.reflect.SerializableConstructorTest.java

@Test
public void noSuchMethodOnSerialization() throws Throwable {
    Constructor<ConstructorSerializationTestClass> constructor = ConstructorSerializationTestClass.class
            .getDeclaredConstructor(String.class);
    final SerializableConstructor serializableConstructor = new NoSuchMethodSerializableConstructor(
            constructor);/*w  w w  . j av  a  2s  .com*/
    Assertion.assertCause(NoSuchMethodException.class, new Callable<Object>() {

        public Object call() throws Exception {
            return SerializationUtils.clone(serializableConstructor);
        }
    });
}

From source file:io.druid.storage.cassandra.CassandraDataSegmentPuller.java

public com.metamx.common.FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir)
        throws SegmentLoadingException {
    log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir);
    if (!outDir.exists()) {
        outDir.mkdirs();//from   w ww .  j a v  a2 s . c om
    }

    if (!outDir.isDirectory()) {
        throw new ISE("outDir[%s] must be a directory.", outDir);
    }

    long startTime = System.currentTimeMillis();
    final File tmpFile = new File(outDir, "index.zip");
    log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath());

    final com.metamx.common.FileUtils.FileCopyResult localResult;
    try {
        localResult = RetryUtils.retry(new Callable<com.metamx.common.FileUtils.FileCopyResult>() {
            @Override
            public com.metamx.common.FileUtils.FileCopyResult call() throws Exception {
                try (OutputStream os = new FileOutputStream(tmpFile)) {
                    final ObjectMetadata meta = ChunkedStorage.newReader(indexStorage, key, os)
                            .withBatchSize(BATCH_SIZE).withConcurrencyLevel(CONCURRENCY).call();
                }
                return new com.metamx.common.FileUtils.FileCopyResult(tmpFile);
            }
        }, Predicates.<Throwable>alwaysTrue(), 10);
    } catch (Exception e) {
        throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key,
                tmpFile.getAbsolutePath());
    }
    try {
        final com.metamx.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir);
        log.info("Pull of file[%s] completed in %,d millis (%s bytes)", key,
                System.currentTimeMillis() - startTime, result.size());
        return result;
    } catch (Exception e) {
        try {
            FileUtils.deleteDirectory(outDir);
        } catch (IOException e1) {
            log.error(e1, "Error clearing segment directory [%s]", outDir.getAbsolutePath());
            e.addSuppressed(e1);
        }
        throw new SegmentLoadingException(e, e.getMessage());
    } finally {
        if (!tmpFile.delete()) {
            log.warn("Could not delete cache file at [%s]", tmpFile.getAbsolutePath());
        }
    }
}

From source file:com.netflix.curator.framework.recipes.locks.TestInterProcessSemaphore.java

@Test
public void testThreadedLeaseIncrease() throws Exception {
    final Timing timing = new Timing();
    CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(),
            timing.connection(), new RetryOneTime(1));
    try {//from  w  w  w .  ja  v a2 s  .  c  o  m
        client.start();

        final SharedCount count = new SharedCount(client, "/foo/count", 1);
        count.start();

        final InterProcessSemaphoreV2 semaphore = new InterProcessSemaphoreV2(client, "/test", count);

        ExecutorService service = Executors.newCachedThreadPool();

        final CountDownLatch latch = new CountDownLatch(1);
        Future<Object> future1 = service.submit(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                Lease lease = semaphore.acquire(timing.seconds(), TimeUnit.SECONDS);
                Assert.assertNotNull(lease);
                latch.countDown();
                lease = semaphore.acquire(timing.seconds(), TimeUnit.SECONDS);
                Assert.assertNotNull(lease);
                return null;
            }
        });
        Future<Object> future2 = service.submit(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                Assert.assertTrue(latch.await(timing.seconds(), TimeUnit.SECONDS));
                timing.sleepABit(); // make sure second acquire is waiting
                Assert.assertTrue(count.trySetCount(2));
                return null;
            }
        });

        future1.get();
        future2.get();
    } finally {
        IOUtils.closeQuietly(client);
    }
}

From source file:com.developmentsprint.spring.breaker.hystrix.HystrixAnnotationDrivenConcreteClassTest.java

@Test
public void testMaxConcurrency() throws Exception {
    ExecutorService executor = Executors.newFixedThreadPool(20);
    List<Future<String>> futures = new ArrayList<Future<String>>();
    for (int i = 0; i < 20; i++) {
        futures.add(executor.submit(new Callable<String>() {
            @Override/*from   w ww. j  a v a2s .co  m*/
            public String call() throws Exception {
                return methods.getText();
            }
        }));
    }

    Thread.sleep(500L);

    int successes = 0;
    int rejections = 0;
    for (Future<String> future : futures) {
        try {
            future.get();
            successes++;
        } catch (Exception e) {
            rejections++;
        }
    }

    assertThat(successes).isEqualTo(10);
    assertThat(rejections).isEqualTo(10);
}

From source file:com.jdom.ajatt.viewer.util.HtmlUtil.java

public static String getRequest(Activity activity, final String url) {
    SharedPreferences prefs = activity.getSharedPreferences(CLASS_NAME, Context.MODE_PRIVATE);
    String cachedUrlContents = prefs.getString(url, null);
    String urlRetrievalTimeKey = url + ".time";
    long cachedUrlRetrievalTime = prefs.getLong(urlRetrievalTimeKey, 0L);
    long ageOfCachedData = System.currentTimeMillis() - cachedUrlRetrievalTime;
    if (cachedUrlRetrievalTime == 0) {
        Log.d(CLASS_NAME, "Did not find cached data for URL [" + url + "].");
    } else {/*from  w  w w. j  av  a2  s  .com*/
        Log.d(CLASS_NAME, "URL [" + url + "] has been cached for [" + ageOfCachedData + "] ms.");
    }

    Future<String> result = null;

    boolean expired = ageOfCachedData > CACHE_URL_MILLISECONDS;

    if (expired) {
        Log.d(CLASS_NAME, "URL [" + url + "] data is stale.");
    } else {
        long timeRemainingValidCache = CACHE_URL_MILLISECONDS - ageOfCachedData;
        Log.d(CLASS_NAME,
                "URL [" + url + "] data has [" + timeRemainingValidCache + "] ms of validity remaining.");
    }

    if (cachedUrlContents == null || expired) {
        Callable<String> callable = new Callable<String>() {
            public String call() throws Exception {
                long start = System.currentTimeMillis();
                Log.d(CLASS_NAME, "Retrieving URL [" + url + "].");
                HttpClient client = new DefaultHttpClient();
                HttpGet request = new HttpGet(url);
                try {
                    HttpResponse response = client.execute(request);
                    return HttpHelper.request(response);
                } catch (Exception ex) {
                    Log.e(CLASS_NAME, "Failure to retrieve the url!", ex);
                    return null;
                } finally {
                    Log.d(CLASS_NAME, "Retrieving URL [" + url + "] took ["
                            + (System.currentTimeMillis() - start) + "] ms to retrieve.");
                }
            }
        };

        ExecutorService executor = Executors.newSingleThreadExecutor();
        result = executor.submit(callable);
    }

    if (cachedUrlContents == null) {
        try {
            cachedUrlContents = result.get();

            Editor editor = prefs.edit();
            editor.putLong(urlRetrievalTimeKey, System.currentTimeMillis());
            editor.putString(url, cachedUrlContents);
            editor.commit();
        } catch (Exception e) {
            Log.e(CLASS_NAME, "Failure to retrieve the url!", e);
        }
    }

    return cachedUrlContents;
}

From source file:com.rogiel.httpchannel.http.Request.java

public Future<InputStream> asStreamAsync() throws IOException {
    return ctx.threadPool.submit(new Callable<InputStream>() {
        @Override/* w w  w.j av a2 s  .  c om*/
        public InputStream call() throws Exception {
            return asStream();
        }
    });
}

From source file:com.metamx.druid.loading.S3DataSegmentPuller.java

@Override
public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException {
    final S3Coords s3Coords = new S3Coords(segment);

    log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir);

    if (!isObjectInBucket(s3Coords)) {
        throw new SegmentLoadingException("IndexFile[%s] does not exist.", s3Coords);
    }/*from   w  w w .  j av a  2 s.  c  om*/

    if (!outDir.exists()) {
        outDir.mkdirs();
    }

    if (!outDir.isDirectory()) {
        throw new ISE("outDir[%s] must be a directory.", outDir);
    }

    try {
        S3Utils.retryS3Operation(new Callable<Void>() {
            @Override
            public Void call() throws Exception {
                long startTime = System.currentTimeMillis();
                S3Object s3Obj = null;

                try {
                    s3Obj = s3Client.getObject(s3Coords.bucket, s3Coords.path);

                    InputStream in = null;
                    try {
                        in = s3Obj.getDataInputStream();
                        final String key = s3Obj.getKey();
                        if (key.endsWith(".zip")) {
                            CompressionUtils.unzip(in, outDir);
                        } else if (key.endsWith(".gz")) {
                            final File outFile = new File(outDir, toFilename(key, ".gz"));
                            ByteStreams.copy(new GZIPInputStream(in), Files.newOutputStreamSupplier(outFile));
                        } else {
                            ByteStreams.copy(in,
                                    Files.newOutputStreamSupplier(new File(outDir, toFilename(key, ""))));
                        }
                        log.info("Pull of file[%s] completed in %,d millis", s3Obj,
                                System.currentTimeMillis() - startTime);
                        return null;
                    } catch (IOException e) {
                        FileUtils.deleteDirectory(outDir);
                        throw new IOException(String.format("Problem decompressing object[%s]", s3Obj), e);
                    } finally {
                        Closeables.closeQuietly(in);
                    }
                } finally {
                    S3Utils.closeStreamsQuietly(s3Obj);
                }
            }
        });
    } catch (Exception e) {
        throw new SegmentLoadingException(e, e.getMessage());
    }
}