Example usage for java.util.concurrent CompletableFuture allOf

List of usage examples for java.util.concurrent CompletableFuture allOf

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture allOf.

Prototype

public static CompletableFuture<Void> allOf(CompletableFuture<?>... cfs) 

Source Link

Document

Returns a new CompletableFuture that is completed when all of the given CompletableFutures complete.

Usage

From source file:com.opopov.cloud.image.service.ImageStitchingServiceImpl.java

@Override
public DeferredResult<ResponseEntity<?>> getStitchedImage(@RequestBody ImageStitchingConfiguration config) {

    validator.validateConfig(config);//from w w  w .j  a v  a2 s .c  o  m

    List<ListenableFuture<ResponseEntity<byte[]>>> futures = config.getUrlList().stream()
            .map(url -> remoteResource.getForEntity(url, byte[].class)).collect(Collectors.toList());

    //wrap the listenable futures into the completable futures
    //writing loop in pre-8 style, since it would be more concise compared to stream api in this case
    CompletableFuture[] imageFutures = new CompletableFuture[futures.size()];
    int taskIndex = 0;
    IndexMap indexMap = new IndexMap(config.getRowCount() * config.getColumnCount());
    for (ListenableFuture<ResponseEntity<byte[]>> f : futures) {
        imageFutures[taskIndex] = imageDataFromResponse(taskIndex, indexMap, utils.fromListenableFuture(f));
        taskIndex++;
    }

    CompletableFuture<Void> allDownloadedAndDecompressed = CompletableFuture.allOf(imageFutures);

    //Synchronous part - start - writing decompressed bytes to the large image
    final int DOWNLOAD_AND_DECOMPRESS_TIMEOUT = 30; //30 seconds for each of the individual tasks
    DeferredResult<ResponseEntity<?>> response = new DeferredResult<>();
    boolean allSuccessful = false;
    byte[] imageBytes = null;
    try {
        Void finishResult = allDownloadedAndDecompressed.get(DOWNLOAD_AND_DECOMPRESS_TIMEOUT, TimeUnit.SECONDS);

        imageBytes = combineImagesIntoStitchedImage(config, indexMap);

        HttpHeaders headers = new HttpHeaders();
        headers.setCacheControl(CacheControl.noCache().getHeaderValue());
        headers.setContentType(MediaType.IMAGE_JPEG);
        allSuccessful = true;
    } catch (InterruptedException | ExecutionException e) {
        // basically either download or decompression of the source image failed
        // just skip it then, we have no image to show
        response.setErrorResult(
                new SourceImageLoadException("Unable to load and decode one or more source images", e));
    } catch (TimeoutException e) {
        //send timeout response, via ImageLoadTimeoutException
        response.setErrorResult(new ImageLoadTimeoutException(
                String.format("Some of the images were not loaded and decoded before timeout of %d seconds",
                        DOWNLOAD_AND_DECOMPRESS_TIMEOUT),
                e

        ));
    } catch (IOException e) {
        response.setErrorResult(new ImageWriteException("Error writing image into output buffer", e));
    }

    //Synchronous part - end

    if (!allSuccessful) {
        //shoud not get here, some unknown error
        response.setErrorResult(
                new ImageLoadTimeoutException("Unknown error", new RuntimeException("Something went wrong")

                ));

        return response;
    }

    ResponseEntity<?> successResult = ResponseEntity.ok(imageBytes);
    response.setResult(successResult);

    return response;

}

From source file:ws.salient.session.Sessions.java

public CompletableFuture execute(List<Command> commands) {
    Instant now = Instant.now();
    CompletableFuture result;//from w  w  w.  j  a v a  2  s. c  o m
    try {
        commands.stream().filter(command -> command instanceof ModifyProfile).forEach((command) -> {
            log.info(toJson(command));
            profiles.modified(command.getAccountId());
        });

        commands.stream().filter(command -> command.getKnowledgeBaseId() != null).forEach((command) -> {
            String knowledgeBaseId = command.getKnowledgeBaseId();
            Map<String, String> aliases = profiles.getAliases(command.getAccountId(), command.getProfiles());
            if (aliases.containsKey(knowledgeBaseId)) {
                knowledgeBaseId = aliases.get(knowledgeBaseId);
            }
            command.setKnowledgeBaseId(knowledgeBaseId);
        });

        commands.forEach((command) -> {
            command.setTimestamp(now);
        });

        // Load knowledge bases in parallel
        List<CompletableFuture<KnowledgeBase>> knowledgeBases = commands.stream()
                .filter(command -> command.getKnowledgeBaseId() != null)
                .collect(Collectors.groupingBy((command) -> {
                    // Group commands by knowledgeBaseId
                    return command.getKnowledgeBaseId();
                })).values().stream().map((kbaseCommands) -> {
                    return CompletableFuture.supplyAsync(() -> {
                        // Load each knowledge base
                        return repository.getKnowledgeBase(kbaseCommands.get(0).getKnowledgeBaseId());
                    });
                }).collect(Collectors.toList());
        CompletableFuture.allOf(knowledgeBases.toArray(new CompletableFuture[knowledgeBases.size()])).get();

        // Load sessions in parallel
        List<CompletableFuture<Session>> sessions = commands.stream()
                .filter(command -> command.getSessionId() != null).collect(Collectors.groupingBy((command) -> {
                    // Group commands by sessionId
                    return command.getSessionId();
                })).values().stream().map((sessionCommands) -> {
                    return CompletableFuture.supplyAsync(() -> {
                        // Load each session
                        return getSession(sessionCommands.get(0));
                    });
                }).collect(Collectors.toList());
        CompletableFuture.allOf(sessions.toArray(new CompletableFuture[sessions.size()])).get();

        result = CompletableFuture.runAsync(() -> {
            int requestIndex = 0;
            for (Command command : commands) {
                if (command.getSessionId() != null) {
                    command.setTimestamp(now);
                    Session session = getSession(command);
                    session.accept(command);
                    store.put(session, command, requestIndex);
                    requestIndex++;
                }
            }
        }, commandExecutor).thenRun(() -> {
            this.sessions.forEach((id, session) -> {
                if (session.expired(now)) {
                    if (session.getProcessCount() == 0) {
                        int oldcount = sessions.size();
                        sessions.remove(id);
                        session.dispose();
                        log.info("Session count was " + oldcount + " now " + sessions.size());
                    }
                }
            });
        });
    } catch (InterruptedException | ExecutionException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    }
    return result;
}

From source file:ch.bender.evacuate.Runner.java

/**
 * run/* ww w.  j  a  v a  2  s.  com*/
 * <p>
 * @throws Exception 
 */
public void run() throws Exception {
    checkDirectories();
    initExcludeMatchers();

    myEvacuateCandidates = new TreeMap<>();
    myFailedChainPreparations = Collections.synchronizedMap(new HashMap<>());
    myFutures = new HashSet<>();
    myExclusionDirCount = 0;
    myEvacuationDirCount = 0;
    myExclusionFileCount = 0;
    myEvacuationFileCount = 0;

    Files.walkFileTree(myBackupDir, new SimpleFileVisitor<Path>() {
        /**
         * @see java.nio.file.SimpleFileVisitor#visitFileFailed(java.lang.Object, java.io.IOException)
         */
        @Override
        public FileVisitResult visitFileFailed(Path aFile, IOException aExc) throws IOException {
            if ("System Volume Information".equals((aFile.getFileName().toString()))) {
                return FileVisitResult.SKIP_SUBTREE;
            }

            throw aExc;
        }

        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            return Runner.this.visitFile(file, attrs);
        }

        @Override
        public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
            if ("System Volume Information".equals((dir.getFileName()))) {
                return FileVisitResult.SKIP_SUBTREE;
            }

            return Runner.this.preVisitDirectory(dir, attrs);
        }

    });

    if (myEvacuateCandidates.size() == 0) {
        myLog.info("No candidates for evacuation found");
    } else {
        StringBuilder sb = new StringBuilder("\nFound candidates for evacuation:");
        myEvacuateCandidates.keySet().forEach(p -> sb.append("\n    " + p.toString()));
        myLog.info(sb.toString());
    }

    if (myDryRun) {
        myLog.debug("DryRun flag is set. Doing nothing");
        return;
    }

    if (myFutures.size() > 0) {
        myLog.debug("Waiting for all async tasks to complete");
        CompletableFuture.allOf(myFutures.toArray(new CompletableFuture[myFutures.size()])).get();
    }

    if (myFailedChainPreparations.size() > 0) {
        for (Path path : myFailedChainPreparations.keySet()) {
            myLog.error("exception occured", myFailedChainPreparations.get(path));
        }

        throw new Exception("chain preparation failed. See above error messages");
    }

    for (Path src : myEvacuateCandidates.keySet()) {
        Path dst = myEvacuateCandidates.get(src);
        Path dstParent = dst.getParent();

        if (Files.notExists(dstParent)) {
            Files.createDirectories(dstParent); // FUTURE: overtake file attributes from src
        }

        if (myMove) {
            try {
                myLog.debug(
                        "Moving file system object \"" + src.toString() + "\" to \"" + dst.toString() + "\"");
                Files.move(src, dst, StandardCopyOption.ATOMIC_MOVE);
            } catch (AtomicMoveNotSupportedException e) {
                myLog.warn("Atomic move not supported. Try copy and then delete");

                if (Files.isDirectory(src)) {
                    myLog.debug("Copying folder \"" + src.toString() + "\" to \"" + dst.toString() + "\"");
                    FileUtils.copyDirectory(src.toFile(), dst.toFile());
                    myLog.debug("Delete folder \"" + src.toString() + "\"");
                    FileUtils.deleteDirectory(src.toFile());
                } else {
                    myLog.debug("Copy file \"" + src.toString() + "\" to \"" + dst.toString() + "\"");
                    FileUtils.copyFile(src.toFile(), dst.toFile());
                    myLog.debug("Delete file \"" + src.toString() + "\"");
                    Files.delete(src);
                }
            }

        } else {
            if (Files.isDirectory(src)) {
                myLog.debug("Copying folder \"" + src.toString() + "\" to \"" + dst.toString() + "\"");
                FileUtils.copyDirectory(src.toFile(), dst.toFile());
            } else {
                myLog.debug("Copy file \"" + src.toString() + "\" to \"" + dst.toString() + "\"");
                FileUtils.copyFile(src.toFile(), dst.toFile());
            }
        }
    }

    myLog.info("\nSuccessfully terminated." + "\n             Evacuated  Skipped" + "\n    Files  : "
            + StringUtils.leftPad("" + myEvacuationDirCount, 9)
            + StringUtils.leftPad("" + myExclusionDirCount, 9) + "\n    Folders: "
            + StringUtils.leftPad("" + myEvacuationFileCount, 9)
            + StringUtils.leftPad("" + myExclusionFileCount, 9));
}

From source file:com.devicehive.service.DeviceNotificationService.java

public CompletableFuture<DeviceNotification> insert(final DeviceNotification notification,
        final DeviceVO device) {
    List<CompletableFuture<Response>> futures = processDeviceNotification(notification, device).stream()
            .map(n -> {// w w  w .ja v  a 2s .  c o m
                CompletableFuture<Response> future = new CompletableFuture<>();
                rpcClient.call(Request.newBuilder().withBody(new NotificationInsertRequest(n))
                        .withPartitionKey(device.getGuid()).build(), new ResponseConsumer(future));
                return future;
            }).collect(Collectors.toList());

    return CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()]))
            .thenApply(x -> futures.stream().map(CompletableFuture::join)
                    .map(r -> r.getBody().cast(NotificationInsertResponse.class).getDeviceNotification())
                    .filter(n -> !SpecialNotifications.DEVICE_UPDATE.equals(n.getNotification())) // we are not going to return DEVICE_UPDATE notification
                    .collect(Collectors.toList()).get(0)); // after filter we should get only one notification
}

From source file:grakn.core.deduplicator.AttributeDeduplicatorE2E.java

private static void insertNameShuffled(GraknClient.Session session, int nameCount, int duplicatePerNameCount,
        ExecutorService executorService) throws ExecutionException, InterruptedException {

    List<String> duplicatedNames = new ArrayList<>();
    for (int i = 0; i < nameCount; ++i) {
        for (int j = 0; j < duplicatePerNameCount; ++j) {
            String name = "lorem ipsum dolor sit amet " + i;
            duplicatedNames.add(name);/*from   w w  w.  j a v a2 s .  com*/
        }
    }

    Collections.shuffle(duplicatedNames, new Random(1));

    List<CompletableFuture<Void>> asyncInsertions = new ArrayList<>();
    for (String name : duplicatedNames) {
        CompletableFuture<Void> asyncInsert = CompletableFuture.supplyAsync(() -> {
            try (GraknClient.Transaction tx = session.transaction().write()) {
                List<ConceptMap> answer = tx.execute(Graql.insert(var().isa("name").val(name)));
                tx.commit();
            }
            return null;
        }, executorService);
        asyncInsertions.add(asyncInsert);
    }

    CompletableFuture.allOf(asyncInsertions.toArray(new CompletableFuture[] {})).get();
}

From source file:com.devicehive.service.DeviceCommandService.java

public Pair<String, CompletableFuture<List<DeviceCommand>>> sendSubscribeRequest(final Set<String> devices,
        final Set<String> names, final Date timestamp, final BiConsumer<DeviceCommand, String> callback)
        throws InterruptedException {

    final String subscriptionId = UUID.randomUUID().toString();
    Collection<CompletableFuture<Collection<DeviceCommand>>> futures = devices.stream()
            .map(device -> new CommandSubscribeRequest(subscriptionId, device, names, timestamp))
            .map(subscribeRequest -> {
                CompletableFuture<Collection<DeviceCommand>> future = new CompletableFuture<>();
                Consumer<Response> responseConsumer = response -> {
                    String resAction = response.getBody().getAction();
                    if (resAction.equals(Action.COMMAND_SUBSCRIBE_RESPONSE.name())) {
                        future.complete(response.getBody().cast(CommandSubscribeResponse.class).getCommands());
                    } else if (resAction.equals(Action.COMMAND_EVENT.name())) {
                        callback.accept(response.getBody().cast(CommandEvent.class).getCommand(),
                                subscriptionId);
                    } else {
                        logger.warn("Unknown action received from backend {}", resAction);
                    }/*from   w w  w  . j  av  a 2s  .  c  o m*/
                };
                Request request = Request.newBuilder().withBody(subscribeRequest)
                        .withPartitionKey(subscribeRequest.getDevice()).withSingleReply(false).build();
                rpcClient.call(request, responseConsumer);
                return future;
            }).collect(Collectors.toList());

    CompletableFuture<List<DeviceCommand>> future = CompletableFuture
            .allOf(futures.toArray(new CompletableFuture[futures.size()])).thenApply(v -> futures.stream()
                    .map(CompletableFuture::join).flatMap(Collection::stream).collect(Collectors.toList()));
    return Pair.of(subscriptionId, future);
}

From source file:com.devicehive.service.DeviceNotificationService.java

public Pair<String, CompletableFuture<List<DeviceNotification>>> subscribe(final Set<String> devices,
        final Set<String> names, final Date timestamp, final BiConsumer<DeviceNotification, String> callback) {

    final String subscriptionId = UUID.randomUUID().toString();
    Set<NotificationSubscribeRequest> subscribeRequests = devices.stream()
            .map(device -> new NotificationSubscribeRequest(subscriptionId, device, names, timestamp))
            .collect(Collectors.toSet());
    Collection<CompletableFuture<Collection<DeviceNotification>>> futures = new ArrayList<>();
    for (NotificationSubscribeRequest sr : subscribeRequests) {
        CompletableFuture<Collection<DeviceNotification>> future = new CompletableFuture<>();
        Consumer<Response> responseConsumer = response -> {
            String resAction = response.getBody().getAction();
            if (resAction.equals(Action.NOTIFICATION_SUBSCRIBE_RESPONSE.name())) {
                NotificationSubscribeResponse r = response.getBody().cast(NotificationSubscribeResponse.class);
                future.complete(r.getNotifications());
            } else if (resAction.equals(Action.NOTIFICATION_EVENT.name())) {
                NotificationEvent event = response.getBody().cast(NotificationEvent.class);
                callback.accept(event.getNotification(), subscriptionId);
            } else {
                logger.warn("Unknown action received from backend {}", resAction);
            }/*from   ww w .  ja v a2s  .  com*/
        };
        futures.add(future);
        Request request = Request.newBuilder().withBody(sr).withPartitionKey(sr.getDevice())
                .withSingleReply(false).build();
        rpcClient.call(request, responseConsumer);
    }

    CompletableFuture<List<DeviceNotification>> future = CompletableFuture
            .allOf(futures.toArray(new CompletableFuture[futures.size()])).thenApply(v -> futures.stream()
                    .map(CompletableFuture::join).flatMap(Collection::stream).collect(Collectors.toList()));
    return Pair.of(subscriptionId, future);
}

From source file:com.teradata.benchto.driver.listeners.BenchmarkServiceExecutionListener.java

private CompletableFuture<List<Measurement>> getMeasurements(Measurable measurable) {
    List<CompletableFuture<?>> providerFutures = new ArrayList<>();
    List<Measurement> measurementsList = Collections.synchronizedList(new ArrayList<>());
    for (PostExecutionMeasurementProvider measurementProvider : measurementProviders) {
        CompletableFuture<?> future = measurementProvider.loadMeasurements(measurable)
                .thenAccept(measurementsList::addAll);
        providerFutures.add(future);/* w  w  w . j av a2  s.  c om*/
    }

    return CompletableFuture.allOf(providerFutures.stream().toArray(CompletableFuture[]::new))
            .thenApply(aVoid -> ImmutableList.copyOf(measurementsList));
}

From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java

/**
 * @param unique_spec//from ww w .j  av  a 2  s  .  com
 * @param field_list
 * @param include
 * @return
 * @see com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService#getObjectBySpec(com.ikanow.aleph2.data_model.utils.CrudUtils.QueryComponent, java.util.List, boolean)
 */
public CompletableFuture<Optional<T>> getObjectBySpec(QueryComponent<T> unique_spec, List<String> field_list,
        boolean include) {
    final Stream<CompletableFuture<Optional<T>>> intermed_res1 = _services.stream()
            .map(s -> s.getObjectBySpec(unique_spec, field_list, include));

    @SuppressWarnings("unchecked")
    CompletableFuture<Optional<T>>[] intermed_res2 = (CompletableFuture<Optional<T>>[]) intermed_res1
            .toArray(CompletableFuture[]::new);

    return CompletableFuture.allOf(intermed_res2).thenApply(__ -> {
        return Arrays.stream(intermed_res2).map(res -> res.join()).filter(maybe -> maybe.isPresent())
                .map(maybe -> maybe.get()).findFirst();
    });
}

From source file:fi.hsl.parkandride.itest.RequestLogITest.java

private void concurrentlyGenerateLogs(int numberOfRequests, int numberOfUpdates) {
    withDate(DateTime.now().withTime(12, 2, 0, 0), () -> {
        final Stream<CompletableFuture<Integer>> statusCodes = range(0, numberOfRequests).parallel()
                .mapToObj(i -> {//w  w w. j a v a2s  . co  m
                    final Response response = given().header(SOURCE_HEADER, WEB_UI_SOURCE).when()
                            .get(UrlSchema.CAPACITY_TYPES).thenReturn();
                    return CompletableFuture.completedFuture(response.statusCode());
                });

        final Stream<CompletableFuture<Integer>> updates = range(0, numberOfUpdates).parallel().mapToObj(i -> {
            batchingRequestLogService.updateRequestLogs();
            return CompletableFuture.completedFuture(0);
        });

        try {
            CompletableFuture.allOf(Stream.concat(statusCodes, updates).toArray(i -> new CompletableFuture[i]))
                    .get();
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
            throw new AssertionFailedError(e.getMessage());
        }
    });
}