Example usage for java.util.concurrent CompletableFuture get

List of usage examples for java.util.concurrent CompletableFuture get

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture get.

Prototype

@SuppressWarnings("unchecked")
public T get() throws InterruptedException, ExecutionException 

Source Link

Document

Waits if necessary for this future to complete, and then returns its result.

Usage

From source file:io.pravega.client.stream.impl.ControllerImplTest.java

@Test
public void testSealStream() throws Exception {
    CompletableFuture<Boolean> updateStreamStatus;
    updateStreamStatus = controllerClient.sealStream("scope1", "stream1");
    assertTrue(updateStreamStatus.get());

    updateStreamStatus = controllerClient.sealStream("scope1", "stream2");
    AssertExtensions.assertThrows("Should throw exception", updateStreamStatus, Throwable -> true);

    updateStreamStatus = controllerClient.sealStream("scope1", "stream3");
    AssertExtensions.assertThrows("Should throw Exception", updateStreamStatus, throwable -> true);

    updateStreamStatus = controllerClient.sealStream("scope1", "stream4");
    AssertExtensions.assertThrows("Should throw Exception", updateStreamStatus, throwable -> true);

    updateStreamStatus = controllerClient.sealStream("scope1", "stream5");
    AssertExtensions.assertThrows("Should throw Exception", updateStreamStatus, throwable -> true);
}

From source file:io.pravega.client.stream.impl.ControllerImplTest.java

@Test
public void testGetCurrentSegments() throws Exception {
    CompletableFuture<StreamSegments> streamSegments;
    streamSegments = controllerClient.getCurrentSegments("scope1", "stream1");
    assertTrue(streamSegments.get().getSegments().size() == 2);
    assertEquals(new Segment("scope1", "stream1", 6), streamSegments.get().getSegmentForKey(0.2));
    assertEquals(new Segment("scope1", "stream1", 7), streamSegments.get().getSegmentForKey(0.6));

    streamSegments = controllerClient.getCurrentSegments("scope1", "stream2");
    AssertExtensions.assertThrows("Should throw Exception", streamSegments, throwable -> true);
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestMockHdfsStorageSystem.java

@Test
public void test_handleBucketDeletionRequest() throws InterruptedException, ExecutionException, IOException {
    // 0) Setup//  w  w  w.ja v  a 2 s.co m
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;

    final GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class)
            .with(GlobalPropertiesBean::local_yarn_config_dir, temp_dir)
            .with(GlobalPropertiesBean::distributed_root_dir, temp_dir)
            .with(GlobalPropertiesBean::local_root_dir, temp_dir)
            .with(GlobalPropertiesBean::local_cached_jar_dir, temp_dir).done().get();

    final MockHdfsStorageService storage_service = new MockHdfsStorageService(globals);

    // 1) Set up bucket (code taken from management_db_service)
    final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/delete/bucket").done().get();
    final String bucket_path = System.getProperty("java.io.tmpdir") + "/data/" + bucket.full_name();
    try {
        FileUtils.deleteDirectory(new File(bucket_path));
    } catch (Exception e) {
    }
    setup_bucket(storage_service, bucket, Arrays.asList( //(create some secondary buffers also)
            "$sec_test1", "$sec_test2", "$sec_test3"));

    // Check nothing goes wrong when bucket doesn't exist
    final DataBucketBean bucket2 = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/delete/bucket_not_exist").done().get();
    final String bucket_path2 = System.getProperty("java.io.tmpdir") + "/data/" + bucket2.full_name();
    FileUtils.deleteDirectory(new File(bucket_path2));
    assertFalse("The file path for bucket2 does not exist", new File(bucket_path2).exists());

    final CompletableFuture<BasicMessageBean> res2 = storage_service.getDataService().get()
            .handleBucketDeletionRequest(bucket2, Optional.empty(), false);
    assertEquals(true, res2.get().success());

    //(check didn't create anything)

    assertFalse("No bucket2 paths were created", new File(bucket_path2).exists());

    // Check main bucket deletion:                  

    check_handleBucketDeletion_preChecks(storage_service, bucket, "current", true);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test1", true);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test2", true);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test3", true);
    {
        final CompletableFuture<BasicMessageBean> res1 = storage_service.getDataService().get()
                .handleBucketDeletionRequest(bucket, Optional.empty(), false);
        assertEquals("Handle deletion bucket requesed should have worked:" + res1.get().message(), true,
                res1.get().success());
        System.out.println("handleDeletion output: " + res1.get().message());
    }
    check_handleBucketDeletion_postChecks(storage_service, bucket, "current", false);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test1", false);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test2", false);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test3", false);

    // OK now delete a secondary buffer      
    {
        final CompletableFuture<BasicMessageBean> res1 = storage_service.getDataService().get()
                .handleBucketDeletionRequest(bucket, Optional.of("sec_test2"), false);
        assertEquals("Handle deletion bucket requesed should have worked:" + res1.get().message(), true,
                res1.get().success());
        System.out.println("handleDeletion output: " + res1.get().message());
    }
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test1", false);
    check_handleBucketDeletion_postChecks(storage_service, bucket, "sec_test2", false);
    check_handleBucketDeletion_preChecks(storage_service, bucket, "sec_test3", false);

    // OK now delete a secondary buffer with prejudice
    {
        final CompletableFuture<BasicMessageBean> res1 = storage_service.getDataService().get()
                .handleBucketDeletionRequest(bucket, Optional.of("sec_test1"), true);
        assertEquals("Handle deletion bucket requesed should have worked:" + res1.get().message(), true,
                res1.get().success());
        System.out.println("handleDeletion output: " + res1.get().message());
    }
    check_handleBucketDeletion_postChecks(storage_service, bucket, "sec_test1", true);

    // Finally: check that deleting a bucket deletes everything:
    {
        final CompletableFuture<BasicMessageBean> res1 = storage_service.getDataService().get()
                .handleBucketDeletionRequest(bucket, Optional.empty(), true);
        assertEquals("Handle deletion bucket requesed should have worked:" + res1.get().message(), true,
                res1.get().success());
        System.out.println("handleDeletion output: " + res1.get().message());
    }
    check_handleBucketDeletion_postChecks(storage_service, bucket, "sec_test3", true);

}

From source file:org.apache.flink.client.program.rest.RestClusterClientTest.java

@Test
public void testDisposeSavepoint() throws Exception {
    final String savepointPath = "foobar";
    final String exceptionMessage = "Test exception.";
    final FlinkException testException = new FlinkException(exceptionMessage);

    final TestSavepointDisposalHandlers testSavepointDisposalHandlers = new TestSavepointDisposalHandlers(
            savepointPath);//  ww  w.  j  a va2 s.  co  m
    final TestSavepointDisposalHandlers.TestSavepointDisposalTriggerHandler testSavepointDisposalTriggerHandler = testSavepointDisposalHandlers.new TestSavepointDisposalTriggerHandler();
    final TestSavepointDisposalHandlers.TestSavepointDisposalStatusHandler testSavepointDisposalStatusHandler = testSavepointDisposalHandlers.new TestSavepointDisposalStatusHandler(
            OptionalFailure.of(AsynchronousOperationInfo.complete()),
            OptionalFailure
                    .of(AsynchronousOperationInfo.completeExceptional(new SerializedThrowable(testException))),
            OptionalFailure.ofFailure(testException));

    try (TestRestServerEndpoint ignored = createRestServerEndpoint(testSavepointDisposalStatusHandler,
            testSavepointDisposalTriggerHandler)) {
        {
            final CompletableFuture<Acknowledge> disposeSavepointFuture = restClusterClient
                    .disposeSavepoint(savepointPath);
            assertThat(disposeSavepointFuture.get(), is(Acknowledge.get()));
        }

        {
            final CompletableFuture<Acknowledge> disposeSavepointFuture = restClusterClient
                    .disposeSavepoint(savepointPath);

            try {
                disposeSavepointFuture.get();
                fail("Expected an exception");
            } catch (ExecutionException ee) {
                assertThat(ExceptionUtils.findThrowableWithMessage(ee, exceptionMessage).isPresent(), is(true));
            }
        }

        {
            try {
                restClusterClient.disposeSavepoint(savepointPath).get();
                fail("Expected an exception.");
            } catch (ExecutionException ee) {
                assertThat(ExceptionUtils.findThrowable(ee, RestClientException.class).isPresent(), is(true));
            }
        }
    }
}

From source file:org.pentaho.di.ui.repo.controller.RepositoryConnectController.java

public String browse() {
    Spoon spoon = spoonSupplier.get();//from   ww  w.  j  a  v a  2s .  c o m
    CompletableFuture<String> name = new CompletableFuture<>();
    Runnable execute = () -> {
        DirectoryDialog directoryDialog = new DirectoryDialog(spoonSupplier.get().getShell());
        name.complete(directoryDialog.open());
    };
    if (spoon.getShell() != null) {
        spoon.getShell().getDisplay().asyncExec(execute);
    } else {
        execute.run();
    }
    try {
        return name.get();
    } catch (Exception e) {
        return "/";
    }
}

From source file:io.ventu.rpc.amqp.AmqpInvokerimplTest.java

@Test
public void responseReceiver_handleDelivery_onEncodingException_MapWithNoError_exception()
        throws EncodingException, IOException, InterruptedException, ExecutionException, TimeoutException {
    ResponseReceiverImpl receiver = new ResponseReceiverImpl(serializer, new Validator() {
    }, 1, TimeUnit.MINUTES);/*w  ww  .  j  av  a2  s. c o m*/

    String correlationId = "987654321";
    CompletableFuture<Res> answer = receiver.put(correlationId, Res.class);
    assertFalse(answer.isDone());
    assertFalse(answer.isCompletedExceptionally());

    Map<String, Object> res = Maps.newHashMap();
    res.put("value", "notAnInt");

    receiver.handleDelivery(correlationId, serializer.encode(res));
    assertTrue(answer.isDone());
    assertTrue(answer.isCompletedExceptionally());

    exception.expect(ExecutionException.class);
    try {
        answer.get();
    } catch (ExecutionException ex) {
        assertTrue(ex.getCause() instanceof EncodingException);
        assertEquals("failed to decode JSON", ex.getCause().getMessage());
        throw ex;
    }
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestMockHdfsStorageSystem.java

@Test
public void test_ageOut() throws IOException, InterruptedException, ExecutionException {
    // 0) Setup//from w w w  .j  a v  a  2s  .c o  m
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;

    final GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class)
            .with(GlobalPropertiesBean::local_yarn_config_dir, temp_dir)
            .with(GlobalPropertiesBean::distributed_root_dir, temp_dir)
            .with(GlobalPropertiesBean::local_root_dir, temp_dir)
            .with(GlobalPropertiesBean::distributed_root_dir, temp_dir).done().get();

    final MockHdfsStorageService storage_service = new MockHdfsStorageService(globals);

    // 1) Set up bucket (code taken from management_db_service)
    final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/age/out/bucket")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                    .with(DataSchemaBean::storage_schema, BeanTemplateUtils.build(StorageSchemaBean.class)
                            .with(StorageSchemaBean::raw,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "9 days")
                                            .done().get())
                            .with(StorageSchemaBean::json,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "6 days")
                                            .done().get())
                            .with(StorageSchemaBean::processed,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "1 week")
                                            .done().get())
                            .done().get())
                    .done().get())
            .done().get();

    FileUtils.deleteDirectory(new File(System.getProperty("java.io.tmpdir") + File.separator + "/data/"
            + File.separator + bucket.full_name()));
    setup_bucket(storage_service, bucket, Arrays.asList("$sec_test"));
    final String bucket_path = System.getProperty("java.io.tmpdir") + File.separator + "/data/" + File.separator
            + bucket.full_name();
    assertTrue("The file path has been created", new File(bucket_path + "/managed_bucket").exists());

    final long now = new Date().getTime();
    IntStream.range(4, 10).boxed().map(i -> now - (i * 1000L * 3600L * 24L))
            .forEach(Lambdas.wrap_consumer_u(n -> {
                final String pattern = TimeUtils.getTimeBasedSuffix(TimeUtils.getTimePeriod("1 day").success(),
                        Optional.empty());
                final String dir = DateUtils.formatDate(new Date(n), pattern);

                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW + "/" + dir));
                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON + "/" + dir));
                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED + "/" + dir));
                FileUtils.forceMkdir(new File(bucket_path + "/"
                        + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/" + dir)); // (mini test for secondary)
            }));

    // (7 cos includes root)
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(
                    new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(
                    bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/"),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());

    // 1) Normal run:

    CompletableFuture<BasicMessageBean> cf = storage_service.getDataService().get().handleAgeOutRequest(bucket);

    BasicMessageBean res = cf.get();

    assertEquals(true, res.success());
    assertTrue("sensible message: " + res.message(), res.message().contains("raw: deleted 1 "));
    assertTrue("sensible message: " + res.message(), res.message().contains("json: deleted 4 "));
    assertTrue("sensible message: " + res.message(), res.message().contains("processed: deleted 3 "));

    assertTrue("Message marked as loggable: " + res.details(),
            Optional.ofNullable(res.details()).filter(m -> m.containsKey("loggable")).isPresent());

    System.out.println("Return from to delete: " + res.message());

    //(+1 including root)
    assertEquals(6,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(3,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(4,
            FileUtils.listFilesAndDirs(
                    new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(4,
            FileUtils.listFilesAndDirs(new File(
                    bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/"),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());

    // 2) Run it again, returns success but not loggable:

    CompletableFuture<BasicMessageBean> cf2 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket);

    BasicMessageBean res2 = cf2.get();

    assertEquals(true, res2.success());
    assertTrue("sensible message: " + res2.message(), res2.message().contains("raw: deleted 0 "));
    assertTrue("sensible message: " + res2.message(), res2.message().contains("json: deleted 0 "));
    assertTrue("sensible message: " + res2.message(), res2.message().contains("processed: deleted 0 "));
    assertTrue("Message _not_ marked as loggable: " + res2.details(),
            !Optional.ofNullable(res2.details()).map(m -> m.get("loggable")).isPresent());

    // 3) No temporal settings

    final DataBucketBean bucket3 = BeanTemplateUtils.build(DataBucketBean.class)
            .with("full_name", "/test/handle/age/out/delete/not/temporal")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class).done().get())
            .done().get();

    CompletableFuture<BasicMessageBean> cf3 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket3);
    BasicMessageBean res3 = cf3.get();
    // no temporal settings => returns success
    assertEquals(true, res3.success());

    // 4) Unparseable temporal settings (in theory won't validate but we can test here)

    final DataBucketBean bucket4 = BeanTemplateUtils.build(DataBucketBean.class)
            .with("full_name", "/test/handle/age/out/delete/temporal/malformed")
            .with(DataBucketBean::data_schema,
                    BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                            BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "bananas")
                                            .done().get())
                                    .done().get())
                            .done().get())
            .done().get();

    CompletableFuture<BasicMessageBean> cf4 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket4);
    BasicMessageBean res4 = cf4.get();
    // no temporal settings => returns success
    assertEquals(false, res4.success());

}

From source file:io.pravega.client.stream.impl.ControllerImplTest.java

@Test
public void testGetSegmentsAtTime() throws Exception {
    CompletableFuture<Map<Segment, Long>> positions;
    positions = controllerClient.getSegmentsAtTime(new StreamImpl("scope1", "stream1"), 0);
    assertEquals(2, positions.get().size());
    assertEquals(10, positions.get().get(new Segment("scope1", "stream1", 0)).longValue());
    assertEquals(20, positions.get().get(new Segment("scope1", "stream1", 1)).longValue());
    positions = controllerClient.getSegmentsAtTime(new StreamImpl("scope1", "stream2"), 0);
    AssertExtensions.assertThrows("Should throw Exception", positions, throwable -> true);
}

From source file:io.pravega.client.stream.impl.ControllerImplTest.java

@Test
public void testPingTransaction() throws Exception {
    CompletableFuture<Void> transaction;
    transaction = controllerClient.pingTransaction(new StreamImpl("scope1", "stream1"), UUID.randomUUID(), 0);
    assertTrue(transaction.get() == null);

    transaction = controllerClient.pingTransaction(new StreamImpl("scope1", "stream2"), UUID.randomUUID(), 0);
    AssertExtensions.assertThrows("Should throw Exception", transaction, throwable -> true);
}

From source file:io.ventu.rpc.amqp.AmqpInvokerimplTest.java

@Test
public void responseReceiver_handleDelivery_onEncodingException_withErrorField_APIException()
        throws EncodingException, IOException, InterruptedException, ExecutionException, TimeoutException {
    ResponseReceiverImpl receiver = new ResponseReceiverImpl(serializer, new Validator() {
    }, 1, TimeUnit.MINUTES);//from   w w  w .ja va 2  s .  c om

    String correlationId = "987654321";
    CompletableFuture<Res> answer = receiver.put(correlationId, Res.class);
    assertFalse(answer.isDone());
    assertFalse(answer.isCompletedExceptionally());

    Map<String, Object> res = Maps.newHashMap();
    res.put("error", Integer.valueOf(371));

    receiver.handleDelivery(correlationId, serializer.encode(res));
    assertTrue(answer.isDone());
    assertTrue(answer.isCompletedExceptionally());

    exception.expect(ExecutionException.class);
    try {
        answer.get();
    } catch (ExecutionException ex) {
        assertTrue(ex.getCause() instanceof ApiException);
        assertEquals("371", ex.getCause().getMessage());
        throw ex;
    }
}