Example usage for java.util.concurrent CompletableFuture completedFuture

List of usage examples for java.util.concurrent CompletableFuture completedFuture

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture completedFuture.

Prototype

public static <U> CompletableFuture<U> completedFuture(U value) 

Source Link

Document

Returns a new CompletableFuture that is already completed with the given value.

Usage

From source file:io.pravega.client.stream.mock.MockController.java

@Override
@Synchronized/*from   w  w  w  .  j a  v a2s .c o  m*/
public CompletableFuture<Boolean> createStream(StreamConfiguration streamConfig) {
    Stream stream = new StreamImpl(streamConfig.getScope(), streamConfig.getStreamName());
    if (createdStreams.get(stream) != null) {
        return CompletableFuture.completedFuture(false);
    }

    if (createdScopes.get(streamConfig.getScope()) == null) {
        return FutureHelpers.failedFuture(new IllegalArgumentException("Scope does not exit."));
    }

    createdStreams.put(stream, streamConfig);
    createdScopes.get(streamConfig.getScope()).add(stream);
    for (Segment segment : getSegmentsForStream(stream)) {
        createSegment(segment.getScopedName(), new PravegaNodeUri(endpoint, port));
    }
    return CompletableFuture.completedFuture(true);
}

From source file:io.pravega.controller.store.stream.PersistentStreamBase.java

/***
 * Creates a new stream record in the stream store.
 * Create a new task of type Create.//w  w w  .j  a  v  a  2  s .c o  m
 * If create task already exists, use that and bring it to completion
 * If no task exists, fall through all create steps. They are all idempotent
 * <p>
 * Create Steps:
 * 1. Create new store configuration
 * 2. Create new segment table.
 * 3. Create new history table.
 * 4. Create new index
 *
 * @param configuration stream configuration.
 * @return : future of whether it was done or not
 */
@Override
public CompletableFuture<CreateStreamResponse> create(final StreamConfiguration configuration,
        long createTimestamp) {

    return checkScopeExists().thenCompose((Void v) -> checkStreamExists(configuration, createTimestamp))
            .thenCompose(createStreamResponse -> storeCreationTimeIfAbsent(createStreamResponse.getTimestamp())
                    .thenCompose(
                            (Void v) -> createConfigurationIfAbsent(createStreamResponse.getConfiguration()))
                    .thenCompose((Void v) -> createStateIfAbsent(State.CREATING))
                    .thenCompose((Void v) -> createNewSegmentTable(createStreamResponse.getConfiguration(),
                            createStreamResponse.getTimestamp()))
                    .thenCompose((Void v) -> getState()).thenCompose(state -> {
                        if (state.equals(State.CREATING)) {
                            return createNewEpoch(0);
                        } else {
                            return CompletableFuture.completedFuture(null);
                        }
                    }).thenCompose((Void v) -> {
                        final int numSegments = createStreamResponse.getConfiguration().getScalingPolicy()
                                .getMinNumSegments();
                        final byte[] historyTable = TableHelper.createHistoryTable(
                                createStreamResponse.getTimestamp(),
                                IntStream.range(0, numSegments).boxed().collect(Collectors.toList()));

                        return createHistoryTableIfAbsent(new Data<>(historyTable, null));
                    })
                    .thenCompose((Void v) -> createIndexTableIfAbsent(new Data<>(
                            TableHelper.createIndexTable(createStreamResponse.getTimestamp(), 0), null)))
                    .thenApply((Void v) -> createStreamResponse));
}

From source file:io.pravega.controller.server.eventProcessor.ScaleRequestHandler.java

private CompletableFuture<Void> processScaleUp(final ScaleEvent request, final ScalingPolicy policy,
        final OperationContext context) {
    log.debug("scale up request received for stream {} segment {}", request.getStream(),
            request.getSegmentNumber());
    if (policy.getType().equals(ScalingPolicy.Type.FIXED_NUM_SEGMENTS)) {
        return CompletableFuture.completedFuture(null);
    }/*from  w ww .j  a va 2  s  .c om*/
    return streamMetadataStore
            .getSegment(request.getScope(), request.getStream(), request.getSegmentNumber(), context, executor)
            .thenComposeAsync(segment -> {
                // do not go above scale factor. Minimum scale factor is 2 though.
                int numOfSplits = Math.min(request.getNumOfSplits(), Math.max(2, policy.getScaleFactor()));
                double delta = (segment.getKeyEnd() - segment.getKeyStart()) / numOfSplits;

                final ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
                for (int i = 0; i < numOfSplits; i++) {
                    simpleEntries.add(new AbstractMap.SimpleEntry<>(segment.getKeyStart() + delta * i,
                            segment.getKeyStart() + (delta * (i + 1))));
                }
                return executeScaleTask(request, Lists.newArrayList(request.getSegmentNumber()), simpleEntries,
                        context);
            }, executor);
}

From source file:com.ikanow.aleph2.aleph2_rest_utils.DataStoreCrudService.java

@Override
public CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> storeObjects(
        List<FileDescriptor> new_objects) {
    //TODO should I collect the futures so I can handle any failures?
    new_objects.stream().forEach(o -> this.storeObject(o));

    //TODO what to return?
    return CompletableFuture.completedFuture(null);
}

From source file:com.ikanow.aleph2.data_model.interfaces.shared_services.MockManagementCrudService.java

@Override
public ManagementFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> storeObjects(List<T> new_objects,
        boolean continue_on_error) {

    _mutable_values.addAll(new_objects);
    return FutureUtils.createManagementFuture(CompletableFuture.completedFuture(
            Tuples._2T(() -> new_objects.stream().map(t -> "" + t.hashCode()).collect(Collectors.toList()),
                    () -> (long) new_objects.size())));
}

From source file:com.microsoft.azure.servicebus.samples.scheduledmessages.ScheduledMessages.java

void initializeReceiver(QueueClient receiveClient, ExecutorService executorService) throws Exception {
    // register the RegisterMessageHandler callback
    receiveClient.registerMessageHandler(new IMessageHandler() {
        // callback invoked when the message handler loop has obtained a message
        public CompletableFuture<Void> onMessageAsync(IMessage message) {
            // receives message is passed to callback
            if (message.getLabel() != null && message.getContentType() != null
                    && message.getLabel().contentEquals("Scientist")
                    && message.getContentType().contentEquals("application/json")) {

                byte[] body = message.getBody();
                Map scientist = GSON.fromJson(new String(body, UTF_8), Map.class);

                System.out.printf(
                        "\n\t\t\t\tMessage received: \n\t\t\t\t\t\tMessageId = %s, \n\t\t\t\t\t\tSequenceNumber = %s, \n\t\t\t\t\t\tEnqueuedTimeUtc = %s,"
                                + "\n\t\t\t\t\t\tExpiresAtUtc = %s, \n\t\t\t\t\t\tContentType = \"%s\",  \n\t\t\t\t\t\tContent: [ firstName = %s, name = %s ]\n",
                        message.getMessageId(), message.getSequenceNumber(), message.getEnqueuedTimeUtc(),
                        message.getExpiresAtUtc(), message.getContentType(),
                        scientist != null ? scientist.get("firstName") : "",
                        scientist != null ? scientist.get("name") : "");
            }/* w w w  .j  a va2  s.c  om*/
            return CompletableFuture.completedFuture(null);
        }

        // callback invoked when the message handler has an exception to report
        public void notifyException(Throwable throwable, ExceptionPhase exceptionPhase) {
            System.out.printf(exceptionPhase + "-" + throwable.getMessage());
        }
    },
            // 1 concurrent call, messages are auto-completed, auto-renew duration
            new MessageHandlerOptions(1, true, Duration.ofMinutes(1)), executorService);

}

From source file:io.pravega.segmentstore.server.host.ZKSegmentContainerManagerTest.java

@Test
public void testContainerStart() throws Exception {
    @Cleanup// w  w w .jav a2 s.  co m
    CuratorFramework zkClient = startClient();
    initializeHostContainerMapping(zkClient);

    SegmentContainerRegistry containerRegistry = mock(SegmentContainerRegistry.class);
    ContainerHandle containerHandle1 = mock(ContainerHandle.class);
    when(containerHandle1.getContainerId()).thenReturn(1);
    when(containerRegistry.startContainer(eq(1), any()))
            .thenReturn(CompletableFuture.completedFuture(containerHandle1));

    @Cleanup
    ZKSegmentContainerManager segManager = createContainerManager(containerRegistry, zkClient);
    segManager.initialize();
    verify(containerRegistry, timeout(30000).atLeastOnce()).startContainer(eq(1), any());
}

From source file:com.microsoft.azure.servicebus.samples.queuesgettingstarted.QueuesGettingStarted.java

void registerReceiver(QueueClient queueClient, ExecutorService executorService) throws Exception {

    // register the RegisterMessageHandler callback with executor service
    queueClient.registerMessageHandler(new IMessageHandler() {
        // callback invoked when the message handler loop has obtained a message
        public CompletableFuture<Void> onMessageAsync(IMessage message) {
            // receives message is passed to callback
            if (message.getLabel() != null && message.getContentType() != null
                    && message.getLabel().contentEquals("Scientist")
                    && message.getContentType().contentEquals("application/json")) {

                byte[] body = message.getBody();
                Map scientist = GSON.fromJson(new String(body, UTF_8), Map.class);

                System.out.printf(
                        "\n\t\t\t\tMessage received: \n\t\t\t\t\t\tMessageId = %s, \n\t\t\t\t\t\tSequenceNumber = %s, \n\t\t\t\t\t\tEnqueuedTimeUtc = %s,"
                                + "\n\t\t\t\t\t\tExpiresAtUtc = %s, \n\t\t\t\t\t\tContentType = \"%s\",  \n\t\t\t\t\t\tContent: [ firstName = %s, name = %s ]\n",
                        message.getMessageId(), message.getSequenceNumber(), message.getEnqueuedTimeUtc(),
                        message.getExpiresAtUtc(), message.getContentType(),
                        scientist != null ? scientist.get("firstName") : "",
                        scientist != null ? scientist.get("name") : "");
            }//from w  ww. ja v a2 s. c  o  m
            return CompletableFuture.completedFuture(null);
        }

        // callback invoked when the message handler has an exception to report
        public void notifyException(Throwable throwable, ExceptionPhase exceptionPhase) {
            System.out.printf(exceptionPhase + "-" + throwable.getMessage());
        }
    },
            // 1 concurrent call, messages are auto-completed, auto-renew duration
            new MessageHandlerOptions(1, true, Duration.ofMinutes(1)), executorService);

}

From source file:io.pravega.controller.store.stream.ZKStream.java

@Override
public CompletableFuture<CreateStreamResponse> checkStreamExists(final StreamConfiguration configuration,
        final long creationTime) {
    // If stream exists, but is in a partially complete state, then fetch its creation time and configuration and any
    // metadata that is available from a previous run. If the existing stream has already been created successfully earlier,
    return store.checkExists(creationPath).thenCompose(exists -> {
        if (!exists) {
            return CompletableFuture.completedFuture(new CreateStreamResponse(
                    CreateStreamResponse.CreateStatus.NEW, configuration, creationTime));
        }//  w w w  .ja  va 2  s.  c om

        return getCreationTime().thenCompose(
                storedCreationTime -> store.checkExists(configurationPath).thenCompose(configExists -> {
                    if (configExists) {
                        return handleConfigExists(storedCreationTime, storedCreationTime == creationTime);
                    } else {
                        return CompletableFuture.completedFuture(new CreateStreamResponse(
                                CreateStreamResponse.CreateStatus.NEW, configuration, storedCreationTime));
                    }
                }));
    });
}

From source file:com.ikanow.aleph2.harvest.logstash.services.LogstashHarvestService.java

@Override
public CompletableFuture<BasicMessageBean> onNewSource(DataBucketBean new_bucket, IHarvestContext context,
        boolean enabled) {

    if (enabled) {
        final LogstashBucketConfigBean config = Optionals.ofNullable(new_bucket.harvest_configs()).stream()
                .findFirst()//from w  w  w  .  j a  v a  2s .c  o  m
                .map(cfg -> BeanTemplateUtils.from(cfg.config(), LogstashBucketConfigBean.class).get())
                .orElse(BeanTemplateUtils.build(LogstashBucketConfigBean.class).done().get());

        return CompletableFuture
                .completedFuture(startOrUpdateLogstash(new_bucket, config, _globals.get(), context));
    } else {
        return CompletableFuture.completedFuture(ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(),
                "onNewSource", "Bucket {0} created but suspended", new_bucket.full_name()));
    }
}