List of usage examples for java.util.concurrent CompletableFuture allOf
public static CompletableFuture<Void> allOf(CompletableFuture<?>... cfs)
From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java
/** * @param id/* ww w. j a v a 2s . c om*/ * @param field_list * @param include * @return * @see com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService#getObjectById(java.lang.Object, java.util.List, boolean) */ public CompletableFuture<Optional<T>> getObjectById(Object id, List<String> field_list, boolean include) { final Stream<CompletableFuture<Optional<T>>> intermed_res1 = _services.stream() .map(s -> s.getObjectById(id, field_list, include)); @SuppressWarnings("unchecked") CompletableFuture<Optional<T>>[] intermed_res2 = (CompletableFuture<Optional<T>>[]) intermed_res1 .toArray(CompletableFuture[]::new); return CompletableFuture.allOf(intermed_res2).thenApply(__ -> { return Arrays.stream(intermed_res2).map(res -> res.join()).filter(maybe -> maybe.isPresent()) .map(maybe -> maybe.get()).findFirst(); }); }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_PurgeBuckets.java
/** Top level logic for source synchronization * @param bucket_mgmt/*from ww w . j a va 2 s.co m*/ * @param source_db */ protected CompletableFuture<Void> synchronizePurgeSources( final IManagementCrudService<DataBucketBean> bucket_mgmt, final IManagementCrudService<DataBucketStatusBean> underlying_bucket_status_mgmt, final ICrudService<PurgeQueueBean> source_purge_db) { //_logger.debug("Starting a sync purge sources cycle"); // final List<CompletableFuture<?>> new_results = new ArrayList<CompletableFuture<?>>(); // (not used for synchronization - in a single) final List<CompletableFuture<?>> purge_results = new ArrayList<CompletableFuture<?>>(); final CompletableFuture<List<PurgeQueueBean>> future_purge_sources = getAllPurgeSources(source_purge_db); //check for entries in test db return future_purge_sources.thenCompose(purge_sources -> { //_logger.debug("Got test sources successfully, looping over any results"); purge_sources.forEach(Lambdas.wrap_consumer_u(purge_source -> { _logger.debug("Looking at purge source: " + purge_source._id()); final DataBucketBean to_purge = Lambdas .wrap_u(() -> IkanowV1SyncService_Buckets.getBucketFromV1Source(purge_source.source())) .get(); //always try to purge the source we pulled purge_results.add(handlePurgeSource(to_purge, purge_source)); })); // if (existing_results.isEmpty()) { // Make sure at least that we don't start a new thread until we've got all the tests from the previous sources // existing_results.add(future_purge_sources); // } //_logger.debug("done looping over test sources"); //combine response of new and old entries, return List<CompletableFuture<?>> retval = Arrays.asList(purge_results).stream() // potentially block on existing results but not new tests 'cos that can take ages .flatMap(l -> l.stream()).collect(Collectors.toList()); return CompletableFuture.allOf(retval.toArray(new CompletableFuture[0])); }); }
From source file:com.ikanow.aleph2.management_db.controllers.actors.BucketDeletionActor.java
/** Deletes the data in all data services * TODO (ALEPH-26): assume default ones for now * @param bucket - the bucket to cleanse *//*from w ww . j a va2 s . co m*/ public static CompletableFuture<Collection<BasicMessageBean>> deleteAllDataStoresForBucket( final DataBucketBean bucket, final IServiceContext service_context, boolean delete_bucket) { // Currently the only supported data service is the search index try { final LinkedList<CompletableFuture<BasicMessageBean>> vals = new LinkedList<>(); service_context.listServiceProviders().stream().map(t3 -> t3._1().get()) .filter(s -> IDataServiceProvider.class.isAssignableFrom(s.getClass())) .map(s -> (IDataServiceProvider) s).distinct().forEach(service -> { if (!(delete_bucket && IStorageService.class.isAssignableFrom(service.getClass()))) { // if deleting the bucket then don't need to remove the storage path service.getDataService().ifPresent(ds -> vals .add(ds.handleBucketDeletionRequest(bucket, Optional.empty(), delete_bucket))); } }); return CompletableFuture.allOf(vals.toArray(new CompletableFuture[0])).thenApply(__ -> { return vals.stream().map(x -> x.join()).collect(Collectors.toList()); }); } catch (Throwable t) { return CompletableFuture.completedFuture( Arrays.asList(ErrorUtils.buildErrorMessage(BucketDeletionActor.class.getSimpleName(), "deleteAllDataStoresForBucket", ErrorUtils.getLongForm("{0}", t)))); } }
From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java
/** * @param spec// www . j a v a2s . c o m * @param field_list * @param include * @return * @see com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService#getObjectsBySpec(com.ikanow.aleph2.data_model.utils.CrudUtils.QueryComponent, java.util.List, boolean) */ public CompletableFuture<ICrudService.Cursor<T>> getObjectsBySpec(QueryComponent<T> spec, List<String> field_list, boolean include) { final Stream<CompletableFuture<ICrudService.Cursor<T>>> intermed_res1 = _services.stream() .map(s -> s.getObjectsBySpec(spec, field_list, include)); @SuppressWarnings("unchecked") CompletableFuture<ICrudService.Cursor<T>>[] intermed_res2 = (CompletableFuture<ICrudService.Cursor<T>>[]) intermed_res1 .toArray(CompletableFuture[]::new); return CompletableFuture.allOf(intermed_res2).thenApply(__ -> { return new MultiCursor<T>( Arrays.stream(intermed_res2).map(res -> res.join()).collect(Collectors.toList())); }); }
From source file:com.ikanow.aleph2.core.shared.services.MultiDataService.java
/** Returns a completable future for when all batches are flushed * @return//from ww w . j a v a 2 s. co m */ public CompletableFuture<?> flushBatchOutput() { return CompletableFuture.allOf( getBatchWriters().stream().map(batch -> batch.flushOutput()).toArray(CompletableFuture[]::new)); }
From source file:ai.grakn.engine.controller.TasksController.java
static private <T> CompletableFuture<List<T>> all(List<CompletableFuture<T>> cf) { return CompletableFuture.allOf(cf.toArray(new CompletableFuture[cf.size()])) .thenApply(v -> cf.stream().map(CompletableFuture::join).collect(toList())); }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_Buckets.java
/** Top level logic for source synchronization * @param bucket_mgmt/*from w w w . j av a2 s .co m*/ * @param source_db */ protected CompletableFuture<Void> synchronizeSources(final IManagementCrudService<DataBucketBean> bucket_mgmt, final IManagementCrudService<DataBucketStatusBean> underlying_bucket_status_mgmt, final ICrudService<JsonNode> source_db) { return compareSourcesToBuckets_get(bucket_mgmt, source_db).thenApply(v1_v2 -> { return compareSourcesToBuckets_categorize(v1_v2); }).thenCompose(create_update_delete -> { if (create_update_delete._1().isEmpty() && create_update_delete._2().isEmpty() && create_update_delete._3().isEmpty()) { //(nothing to do) return CompletableFuture.completedFuture(null); } _logger.info(ErrorUtils.get("Found [create={0}, delete={1}, update={2}] sources", create_update_delete._1().size(), create_update_delete._2().size(), create_update_delete._3().size())); final List<CompletableFuture<Boolean>> l1 = create_update_delete._1().stream().parallel() .<Tuple2<String, ManagementFuture<?>>>map(key -> Tuples._2T(key, createNewBucket(key, bucket_mgmt, underlying_bucket_status_mgmt, source_db))) .<CompletableFuture<Boolean>>map( key_fres -> updateV1SourceStatus_top(key_fres._1(), key_fres._2(), true, source_db)) .collect(Collectors.toList()); ; final List<CompletableFuture<Boolean>> l2 = create_update_delete._2().stream().parallel() .<Tuple2<String, ManagementFuture<?>>>map( key -> Tuples._2T(key, deleteBucket(key, bucket_mgmt))) .<CompletableFuture<Boolean>>map(key_fres -> CompletableFuture.completedFuture(true)) // (don't update source in delete case obviously) .collect(Collectors.toList()); ; final List<CompletableFuture<Boolean>> l3 = create_update_delete._3().stream().parallel() .<Tuple2<String, ManagementFuture<?>>>map(key -> Tuples._2T(key, updateBucket(key, bucket_mgmt, underlying_bucket_status_mgmt, source_db))) .<CompletableFuture<Boolean>>map( key_fres -> updateV1SourceStatus_top(key_fres._1(), key_fres._2(), false, source_db)) .collect(Collectors.toList()); ; List<CompletableFuture<?>> retval = Arrays.asList(l1, l2, l3).stream().flatMap(l -> l.stream()) .collect(Collectors.toList()); ; return CompletableFuture.allOf(retval.toArray(new CompletableFuture[0])); }); }
From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java
/** * @param spec//from www .ja v a 2s . c o m * @return * @see com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService#countObjectsBySpec(com.ikanow.aleph2.data_model.utils.CrudUtils.QueryComponent) */ public CompletableFuture<Long> countObjectsBySpec(QueryComponent<T> spec) { final Stream<CompletableFuture<Long>> intermed_res1 = _services.stream() .map(s -> s.countObjectsBySpec(spec)); @SuppressWarnings("unchecked") CompletableFuture<Long>[] intermed_res2 = (CompletableFuture<Long>[]) intermed_res1 .toArray(CompletableFuture[]::new); return CompletableFuture.allOf(intermed_res2).thenApply(__ -> { return Arrays.stream(intermed_res2).map(res -> res.join()).reduce((a, b) -> a + b).orElse(0L); }); }
From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java
/** * @return//from w ww. ja v a 2 s. c om * @see com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService#countObjects() */ public CompletableFuture<Long> countObjects() { final Stream<CompletableFuture<Long>> intermed_res1 = _services.stream().map(s -> s.countObjects()); @SuppressWarnings("unchecked") CompletableFuture<Long>[] intermed_res2 = (CompletableFuture<Long>[]) intermed_res1 .toArray(CompletableFuture[]::new); return CompletableFuture.allOf(intermed_res2).thenApply(__ -> { return Arrays.stream(intermed_res2).map(res -> res.join()).reduce((a, b) -> a + b).orElse(0L); }); }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_TestBuckets.java
/** Top level logic for source synchronization * @param bucket_mgmt/*from w w w .j a v a2 s. c o m*/ * @param source_db */ protected CompletableFuture<Void> synchronizeTestSources( final IManagementCrudService<DataBucketBean> bucket_mgmt, final IManagementCrudService<DataBucketStatusBean> underlying_bucket_status_mgmt, final ICrudService<TestQueueBean> source_test_db, final BucketTestService bucket_test_service) { //_logger.debug("Starting a sync test sources cycle"); final List<CompletableFuture<?>> new_results = new ArrayList<CompletableFuture<?>>(); // (not used for synchronization - in a single) final List<CompletableFuture<?>> existing_results = new ArrayList<CompletableFuture<?>>(); final CompletableFuture<List<TestQueueBean>> future_test_sources = getAllTestSources(source_test_db); //check for entries in test db return future_test_sources.thenCompose(test_sources -> { //_logger.debug("Got test sources successfully, looping over any results"); test_sources.forEach(Lambdas.wrap_consumer_u(test_source -> { _logger.debug("Looking at test source: " + test_source._id()); try { final DataBucketBean to_test = Lambdas.wrap_u(() -> getBucketFromV1Source(test_source.source())) .get(); if (test_source.status() != null && (test_source.status() == TestStatus.in_progress || test_source.status() == TestStatus.completed || test_source.status() == TestStatus.error) //(test_source.status().equals("in_progress") || test_source.status().equals("completed") || test_source.status().equals("error")) ) { existing_results.add(handleExistingTestSource(to_test, test_source, source_test_db)); } else { // in progress... _logger.debug("Found a new entry, setting up test"); new_results.add( handleNewTestSource(to_test, test_source, bucket_test_service, source_test_db)); } } catch (Exception ex) { final String error = ErrorUtils.getLongForm("error: {0}", ex); _logger.error("Error when checking test source: " + error); //turn off this test source updateTestSourceStatus(test_source._id(), TestStatus.error, source_test_db, Optional.empty(), Optional.empty(), Optional.of(error)).join(); } })); if (existing_results.isEmpty()) { // Make sure at least that we don't start a new thread until we've got all the tests from the previous sources existing_results.add(future_test_sources); } //_logger.debug("done looping over test sources"); //combine response of new and old entries, return List<CompletableFuture<?>> retval = Arrays.asList(existing_results).stream() // potentially block on existing results but not new tests 'cos that can take ages .flatMap(l -> l.stream()).collect(Collectors.toList()); return CompletableFuture.allOf(retval.toArray(new CompletableFuture[0])); }); }