Example usage for java.util.concurrent CompletableFuture completedFuture

List of usage examples for java.util.concurrent CompletableFuture completedFuture

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture completedFuture.

Prototype

public static <U> CompletableFuture<U> completedFuture(U value) 

Source Link

Document

Returns a new CompletableFuture that is already completed with the given value.

Usage

From source file:com.srotya.tau.api.dao.TestRulesManager.java

@Before
public void before() {
    em = emf.createEntityManager();/*from w ww .  j  av a2 s.  c o  m*/
    when(am.getEM()).thenReturn(em);
    when(am.getSourcer()).thenReturn(kafkaCommandSourcer);
    kafkaCommandSourcer.setProducer(producer);
    kafkaCommandSourcer.setRuleTopicName("ruleTopic");
    kafkaCommandSourcer.setTemplateTopicName("templateTopic");
    when(producer.send(any())).thenReturn(
            CompletableFuture.completedFuture(new RecordMetadata(new TopicPartition("ruleTopic", 2), 1, 1)));
}

From source file:io.pravega.service.server.host.ZKSegmentContainerMonitorTest.java

@Test
public void testStartAndStopContainer() throws Exception {
    @Cleanup/* w ww.j a v a  2  s.  co m*/
    CuratorFramework zkClient = startClient();
    initializeHostContainerMapping(zkClient);

    SegmentContainerRegistry containerRegistry = createMockContainerRegistry();
    @Cleanup
    ZKSegmentContainerMonitor segMonitor = createContainerMonitor(containerRegistry, zkClient);
    segMonitor.initialize(Duration.ofSeconds(1));

    // Simulate a container that starts successfully.
    CompletableFuture<ContainerHandle> startupFuture = new CompletableFuture<>();
    ContainerHandle containerHandle = mock(ContainerHandle.class);
    when(containerHandle.getContainerId()).thenReturn(2);
    when(containerRegistry.startContainer(eq(2), any())).thenReturn(startupFuture);

    // Now modify the ZK entry.
    HashMap<Host, Set<Integer>> currentData = deserialize(zkClient, PATH);
    currentData.put(PRAVEGA_SERVICE_ENDPOINT, Collections.singleton(2));
    zkClient.setData().forPath(PATH, SerializationUtils.serialize(currentData));

    // Container finished starting.
    startupFuture.complete(containerHandle);
    verify(containerRegistry, timeout(10000).atLeastOnce()).startContainer(eq(2), any());

    Thread.sleep(2000);
    assertEquals(1, segMonitor.getRegisteredContainers().size());
    assertTrue(segMonitor.getRegisteredContainers().contains(2));

    // Now modify the ZK entry. Remove container 2 and add 1.
    HashMap<Host, Set<Integer>> newMapping = new HashMap<>();
    newMapping.put(PRAVEGA_SERVICE_ENDPOINT, Collections.singleton(1));
    zkClient.setData().forPath(PATH, SerializationUtils.serialize(newMapping));

    // Verify that stop is called and only the newly added container is in running state.
    when(containerRegistry.stopContainer(any(), any())).thenReturn(CompletableFuture.completedFuture(null));
    verify(containerRegistry, timeout(10000).atLeastOnce()).stopContainer(any(), any());

    // Using wait here to ensure the private data structure is updated.
    // TODO: Removing dependency on sleep here and other places in this class
    // - https://github.com/pravega/pravega/issues/1079
    Thread.sleep(2000);
    assertEquals(1, segMonitor.getRegisteredContainers().size());
    assertTrue(segMonitor.getRegisteredContainers().contains(1));
}

From source file:io.pravega.segmentstore.server.host.ZKSegmentContainerMonitorTest.java

@Test
public void testStartAndStopContainer() throws Exception {
    @Cleanup/*from   w  ww . jav a  2s . c  o m*/
    CuratorFramework zkClient = startClient();
    initializeHostContainerMapping(zkClient);

    SegmentContainerRegistry containerRegistry = createMockContainerRegistry();
    @Cleanup
    ZKSegmentContainerMonitor segMonitor = createContainerMonitor(containerRegistry, zkClient);
    segMonitor.initialize(Duration.ofSeconds(1));

    // Simulate a container that starts successfully.
    CompletableFuture<ContainerHandle> startupFuture = new CompletableFuture<>();
    ContainerHandle containerHandle = mock(ContainerHandle.class);
    when(containerHandle.getContainerId()).thenReturn(2);
    when(containerRegistry.startContainer(eq(2), any())).thenReturn(startupFuture);

    // Now modify the ZK entry.
    HashMap<Host, Set<Integer>> currentData = deserialize(zkClient, PATH);
    currentData.put(PRAVEGA_SERVICE_ENDPOINT, Collections.singleton(2));
    zkClient.setData().forPath(PATH, SerializationUtils.serialize(currentData));

    // Container finished starting.
    startupFuture.complete(containerHandle);
    verify(containerRegistry, timeout(1000).atLeastOnce()).startContainer(eq(2), any());

    Thread.sleep(2000);
    assertEquals(1, segMonitor.getRegisteredContainers().size());
    assertTrue(segMonitor.getRegisteredContainers().contains(2));

    // Now modify the ZK entry. Remove container 2 and add 1.
    HashMap<Host, Set<Integer>> newMapping = new HashMap<>();
    newMapping.put(PRAVEGA_SERVICE_ENDPOINT, Collections.singleton(1));
    zkClient.setData().forPath(PATH, SerializationUtils.serialize(newMapping));

    // Verify that stop is called and only the newly added container is in running state.
    when(containerRegistry.stopContainer(any(), any())).thenReturn(CompletableFuture.completedFuture(null));
    verify(containerRegistry, timeout(1000).atLeastOnce()).stopContainer(any(), any());

    // Using wait here to ensure the private data structure is updated.
    // TODO: Removing dependency on sleep here and other places in this class
    // - https://github.com/pravega/pravega/issues/1079
    Thread.sleep(2000);
    assertEquals(1, segMonitor.getRegisteredContainers().size());
    assertTrue(segMonitor.getRegisteredContainers().contains(1));
}

From source file:com.github.thesmartenergy.cnr.ChargingPlanSubscriber.java

@Override
public void run() {
    LOG.info("Starting worker.");
    while (!Thread.currentThread().isInterrupted()) {
        try {//w ww . ja v a  2s  .c o m
            ReceiveQueueMessageResult result = service.receiveQueueMessage(queue, opts);
            BrokeredMessage message = result.getValue();
            if (message == null) {
                continue;
            }

            System.out.println(message.getContentType());
            System.out.println(message.getCorrelationId());
            System.out.println(message.getDate());
            System.out.println(message.getDeliveryCount());
            System.out.println(message.getLabel());
            System.out.println(message.getLockLocation());
            System.out.println(message.getLockToken());
            System.out.println(message.getLockedUntilUtc());
            System.out.println(message.getMessageId());
            System.out.println(message.getMessageLocation());
            System.out.println(message.getPartitionKey());
            System.out.println(message.getProperties());
            System.out.println(message.getReplyTo());
            System.out.println(message.getSessionId());

            // SCP response
            String id = message.getMessageId();
            if (id == null) {
                LOG.log(Level.WARNING, "received message with null id. Try to delete:");
                try {
                    service.deleteMessage(message);
                } catch (Exception ex) {
                    LOG.warning("error while trying to delete message: " + ex.getMessage());
                    ex.printStackTrace();
                    LOG.warning("this was the content:");
                    System.out.println(IOUtils.toString(message.getBody(), "UTF-8"));
                }
                continue;
            } else {
                LOG.info("received response " + id);
            }

            byte[] body = IOUtils.toByteArray(message.getBody());
            String bodyAsString = IOUtils.toString(body, "UTF-8");
            LOG.info(bodyAsString);

            // strangely, string starts with "@strin3http://schemas.microsoft.com/2003/10/Serialization/?f<s:Envelope ..." on this side, 
            // although it starts with  "<?xml version="1.0" encoding="UTF-8"?> <s:Envelope ..." on the other side
            // so ... ugly hack
            bodyAsString = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
                    + bodyAsString.substring(64, bodyAsString.length() - 1);

            ResourceDescription presentation = new ResourceDescription(DEV_BASE + "outputGraph");
            Model output;
            try {
                PresentationUtils presentationUtils = new PresentationUtils();
                SPARQLGenerateHandler lifter = new SPARQLGenerateHandler(BASE, presentationUtils);
                output = lifter.lift(MediaType.APPLICATION_XML_TYPE, presentation,
                        IOUtils.toInputStream(bodyAsString, "UTF-8"));
            } catch (RDFPException ex) {
                throw new PEPException("error while lifting output", ex);
            }

            ProcessExecution processExecution = smartChargingProvider.find(id);
            processExecution = new ProcessExecutionImpl(BASE, processExecution.getContainerPath(),
                    processExecution.getId(), processExecution.getInput(),
                    CompletableFuture.completedFuture(output));
            smartChargingProvider.update(processExecution);

            // Delete message from queue
            service.deleteMessage(message);

        } catch (ServiceException | IOException | PEPException ex) {
            LOG.log(Level.WARNING, "error while processing input ", ex);
        }
        try {
            Thread.sleep(3000);
        } catch (InterruptedException ex) {
            Thread.currentThread().interrupt();
        }
    }
    LOG.info("Stopping worker.");
}

From source file:com.spotify.styx.api.MiddlewaresTest.java

@Test
public void testValidClientEmptyBlacklist() {
    Supplier<Optional<List<String>>> supplier = () -> Optional.of(ImmutableList.of());
    RequestContext requestContext = mockRequestContext(true);
    CompletionStage completionStage = CompletableFuture
            .completedFuture(Response.forStatus(Status.OK.withReasonPhrase("")));
    assertThat(Middlewares.clientValidator(supplier).apply(mockInnerHandler(requestContext, completionStage))
            .invoke(requestContext), equalTo(completionStage));
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Void> createConfigurationIfAbsent(StreamConfiguration config) {
    Preconditions.checkNotNull(config);//ww w.  j a va2 s . c  o m

    synchronized (lock) {
        if (configuration == null) {
            configuration = config;
        }
    }
    return CompletableFuture.completedFuture(null);
}

From source file:io.pravega.controller.eventProcessor.impl.SerializedRequestHandlerTest.java

@Test(timeout = 10000)
public void testProcessEvent() throws InterruptedException, ExecutionException {
    final ConcurrentHashMap<String, List<Integer>> orderOfProcessing = new ConcurrentHashMap<>();

    SerializedRequestHandler<TestEvent> requestHandler = new SerializedRequestHandler<TestEvent>(
            executorService()) {/*from  w  w w .ja va 2s  . c  o  m*/
        @Override
        public CompletableFuture<Void> processEvent(TestEvent event) {
            orderOfProcessing.compute(event.getKey(), (x, y) -> {
                if (y == null) {
                    y = new ArrayList<>();
                }
                y.add(event.getNumber());
                return y;
            });
            return event.getFuture();
        }
    };

    List<Pair<TestEvent, CompletableFuture<Void>>> stream1Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNull(stream1Queue);
    // post 3 work for stream1
    TestEvent s1e1 = new TestEvent("scope", "stream1", 1);
    CompletableFuture<Void> s1p1 = requestHandler.process(s1e1);
    TestEvent s1e2 = new TestEvent("scope", "stream1", 2);
    CompletableFuture<Void> s1p2 = requestHandler.process(s1e2);
    TestEvent s1e3 = new TestEvent("scope", "stream1", 3);
    CompletableFuture<Void> s1p3 = requestHandler.process(s1e3);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    List<Integer> collect = stream1Queue.stream().map(x -> x.getLeft().getNumber())
            .collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e3.complete();

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));

    // verify that no processing is complete
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    // post 3 work for stream2
    TestEvent s2e1 = new TestEvent("scope", "stream2", 1);
    CompletableFuture<Void> s2p1 = requestHandler.process(s2e1);
    TestEvent s2e2 = new TestEvent("scope", "stream2", 2);
    CompletableFuture<Void> s2p2 = requestHandler.process(s2e2);
    TestEvent s2e3 = new TestEvent("scope", "stream2", 3);
    CompletableFuture<Void> s2p3 = requestHandler.process(s2e3);

    List<Pair<TestEvent, CompletableFuture<Void>>> stream2Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream2Queue.size() >= 2);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e1.complete();
    Futures.await(s1p1);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 1);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now make sure that we have concurrently run for two streams
    s2e1.complete();
    Futures.await(s2p1);

    stream2Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2"));
    assertTrue(stream2Queue.size() >= 1);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now complete all processing
    s2e2.complete();
    Futures.await(s2p2);

    s2e3.complete();

    s1e2.complete();
    Futures.await(s1p2);

    Futures.await(s1p3);
    Futures.await(s2p3);

    assertTrue(
            orderOfProcessing.get(s1e1.getKey()).get(0) == 1 && orderOfProcessing.get(s1e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s1e1.getKey()).get(2) == 3);
    assertTrue(
            orderOfProcessing.get(s2e1.getKey()).get(0) == 1 && orderOfProcessing.get(s2e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s2e1.getKey()).get(2) == 3);

    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());
    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());

    // now that we have drained all the work from the processor.
    // let's post new work for stream 1
    TestEvent s1e4 = new TestEvent("scope", "stream1", 4);
    CompletableFuture<Void> s1p4 = requestHandler.process(s1e4);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNotNull(stream1Queue);

    s1e4.complete();
    Futures.await(s1p4);

    assertTrue(orderOfProcessing.get(s1e1.getKey()).get(3) == 4);
}

From source file:io.pravega.controller.server.eventProcessor.ScaleRequestHandler.java

private CompletableFuture<Void> processScaleDown(final ScaleEvent request, final ScalingPolicy policy,
        final OperationContext context) {
    log.debug("scale down request received for stream {} segment {}", request.getStream(),
            request.getSegmentNumber());
    if (policy.getType().equals(ScalingPolicy.Type.FIXED_NUM_SEGMENTS)) {
        return CompletableFuture.completedFuture(null);
    }/*from   w ww. j  a va2  s  .  com*/

    return streamMetadataStore
            .markCold(request.getScope(), request.getStream(), request.getSegmentNumber(),
                    request.isSilent() ? Long.MAX_VALUE : request.getTimestamp() + REQUEST_VALIDITY_PERIOD,
                    context, executor)
            .thenCompose(x -> streamMetadataStore.getActiveSegments(request.getScope(), request.getStream(),
                    context, executor))
            .thenApply(activeSegments -> {
                assert activeSegments != null;
                final Optional<Segment> currentOpt = activeSegments.stream()
                        .filter(y -> y.getNumber() == request.getSegmentNumber()).findAny();
                if (!currentOpt.isPresent() || activeSegments.size() == policy.getMinNumSegments()) {
                    // if we are already at min-number of segments, we cant scale down, we have put the marker,
                    // we should simply return and do nothing.
                    return null;
                } else {
                    final List<Segment> candidates = activeSegments.stream()
                            .filter(z -> z.getKeyEnd() == currentOpt.get().getKeyStart()
                                    || z.getKeyStart() == currentOpt.get().getKeyEnd()
                                    || z.getNumber() == request.getSegmentNumber())
                            .sorted(Comparator.comparingDouble(Segment::getKeyStart))
                            .collect(Collectors.toList());
                    return new ImmutablePair<>(candidates, activeSegments.size() - policy.getMinNumSegments());
                }
            }).thenCompose(input -> {
                if (input != null && input.getLeft().size() > 1) {
                    final List<Segment> candidates = input.getLeft();
                    final int maxScaleDownFactor = input.getRight();

                    // fetch their cold status for all candidates
                    return FutureHelpers
                            .filter(candidates,
                                    candidate -> streamMetadataStore.isCold(request.getScope(),
                                            request.getStream(), candidate.getNumber(), context, executor))
                            .thenApply(segments -> {
                                if (maxScaleDownFactor == 1 && segments.size() == 3) {
                                    // Note: sorted by keystart so just pick first two.
                                    return Lists.newArrayList(segments.get(0), segments.get(1));
                                } else {
                                    return segments;
                                }
                            });
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            }).thenCompose(toMerge -> {
                if (toMerge != null && toMerge.size() > 1) {
                    toMerge.forEach(x -> {
                        log.debug("merging stream {}: segment {} ", request.getStream(), x.getNumber());
                    });

                    final ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
                    double min = toMerge.stream().mapToDouble(Segment::getKeyStart).min().getAsDouble();
                    double max = toMerge.stream().mapToDouble(Segment::getKeyEnd).max().getAsDouble();
                    simpleEntries.add(new AbstractMap.SimpleEntry<>(min, max));
                    final ArrayList<Integer> segments = new ArrayList<>();
                    toMerge.forEach(segment -> segments.add(segment.getNumber()));
                    return executeScaleTask(request, segments, simpleEntries, context);
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            });
}

From source file:io.github.microcks.service.TestRunnerService.java

/**
 *
 * @param testResult TestResults to aggregate results within
 * @param service Service to test//from  ww  w  . j  a v  a  2s . c o m
 * @param runnerType Type of runner for launching the tests
 * @return A Future wrapping test results
 */
@Async
public CompletableFuture<TestResult> launchTestsInternal(TestResult testResult, Service service,
        TestRunnerType runnerType) {
    // Found next build number for this test.
    List<TestResult> older = testResultRepository.findByServiceId(service.getId(),
            new PageRequest(0, 2, Sort.Direction.DESC, "testNumber"));
    if (older != null && !older.isEmpty() && older.get(0).getTestNumber() != null) {
        testResult.setTestNumber(older.get(0).getTestNumber() + 1L);
    } else {
        testResult.setTestNumber(1L);
    }

    for (Operation operation : service.getOperations()) {
        // Prepare result container for operation tests.
        TestCaseResult testCaseResult = new TestCaseResult();
        testCaseResult.setOperationName(operation.getName());
        String testCaseId = IdBuilder.buildTestCaseId(testResult, operation);
        testResult.getTestCaseResults().add(testCaseResult);
        testResultRepository.save(testResult);

        // Prepare collection of requests to launch.
        List<Request> requests = requestRepository
                .findByOperationId(IdBuilder.buildOperationId(service, operation));
        requests = cloneRequestsForTestCase(requests, testCaseId);

        List<TestReturn> results = new ArrayList<TestReturn>();
        AbstractTestRunner<HttpMethod> testRunner = retrieveRunner(runnerType, service.getId());
        try {
            HttpMethod method = testRunner.buildMethod(operation.getMethod());
            results = testRunner.runTest(service, operation, testResult, requests,
                    testResult.getTestedEndpoint(), method);
        } catch (URISyntaxException use) {
            log.error("URISyntaxException on endpoint {}, aborting current tests",
                    testResult.getTestedEndpoint(), use);
            // Set flags and add to results before exiting loop.
            testCaseResult.setSuccess(false);
            testCaseResult.setElapsedTime(0);
            testResultRepository.save(testResult);
            break;
        } catch (Throwable t) {
            log.error("Throwable while testing operation {}", operation.getName(), t);
        }

        // Update result if we got returns. If no returns, it means that there's no
        // sample request for that operation.
        if (results != null && !results.isEmpty()) {
            updateTestCaseResultWithReturns(testCaseResult, results, testCaseId);
            testResultRepository.save(testResult);
        } else {
            //testCaseResult.setSuccess(false);
            //testCaseResult.setElapsedTime(0);
            //testResultRepository.save(testResult);
        }
    }

    // Update success, progress indicators and total time before saving and returning.
    updateTestResult(testResult);

    return CompletableFuture.completedFuture(testResult);
}

From source file:com.ibasco.agql.protocols.valve.source.query.client.SourceRconClient.java

/**
 * <p>Send a re-authentication request to the Server. This will only work if the client has been previously
 * authenticated.</p>/*from   w  ww.java 2s  .  co m*/
 *
 * @param address
 *         The {@link InetSocketAddress} of the source server
 *
 * @return A {@link CompletableFuture} which returns a {@link SourceRconAuthStatus} that holds the status of the
 * authentication request.
 *
 * @see #authenticate(InetSocketAddress, String)
 */
public CompletableFuture<SourceRconAuthStatus> authenticate(InetSocketAddress address) {
    if (isAuthenticated(address)) {
        return this.authenticate(address, this.credentialsMap.get(address));
    }
    return CompletableFuture.completedFuture(
            new SourceRconAuthStatus(false, String.format("Not yet authenticated from server %s.", address)));
}