List of usage examples for java.util.function BiConsumer accept
void accept(T t, U u);
From source file:com.devicehive.service.DeviceNotificationService.java
public Pair<String, CompletableFuture<List<DeviceNotification>>> subscribe(final Set<String> devices, final Set<String> names, final Date timestamp, final BiConsumer<DeviceNotification, String> callback) { final String subscriptionId = UUID.randomUUID().toString(); Set<NotificationSubscribeRequest> subscribeRequests = devices.stream() .map(device -> new NotificationSubscribeRequest(subscriptionId, device, names, timestamp)) .collect(Collectors.toSet()); Collection<CompletableFuture<Collection<DeviceNotification>>> futures = new ArrayList<>(); for (NotificationSubscribeRequest sr : subscribeRequests) { CompletableFuture<Collection<DeviceNotification>> future = new CompletableFuture<>(); Consumer<Response> responseConsumer = response -> { String resAction = response.getBody().getAction(); if (resAction.equals(Action.NOTIFICATION_SUBSCRIBE_RESPONSE.name())) { NotificationSubscribeResponse r = response.getBody().cast(NotificationSubscribeResponse.class); future.complete(r.getNotifications()); } else if (resAction.equals(Action.NOTIFICATION_EVENT.name())) { NotificationEvent event = response.getBody().cast(NotificationEvent.class); callback.accept(event.getNotification(), subscriptionId); } else { logger.warn("Unknown action received from backend {}", resAction); }//w w w . ja v a2 s. c o m }; futures.add(future); Request request = Request.newBuilder().withBody(sr).withPartitionKey(sr.getDevice()) .withSingleReply(false).build(); rpcClient.call(request, responseConsumer); } CompletableFuture<List<DeviceNotification>> future = CompletableFuture .allOf(futures.toArray(new CompletableFuture[futures.size()])).thenApply(v -> futures.stream() .map(CompletableFuture::join).flatMap(Collection::stream).collect(Collectors.toList())); return Pair.of(subscriptionId, future); }
From source file:blusunrize.immersiveengineering.api.energy.wires.ImmersiveNetHandler.java
public void removeConnection(World world, Connection con, Vec3d vecStart, Vec3d vecEnd) { if (con == null || world == null) return;//from w w w . j a va 2s . c o m int dim = world.provider.getDimension(); resetCachedIndirectConnections(world, con.start); Map<BlockPos, Set<Connection>> connsInDim = getMultimap(world.provider.getDimension()); Set<Connection> reverseConns = connsInDim.get(con.end); Set<Connection> forwardConns = connsInDim.get(con.start); Optional<Connection> back = reverseConns.stream().filter(con::hasSameConnectors).findAny(); reverseConns.removeIf(con::hasSameConnectors); forwardConns.removeIf(con::hasSameConnectors); Map<BlockPos, BlockWireInfo> mapForDim = blockWireMap.lookup(world.provider.getDimension()); BiConsumer<BlockPos, Map<BlockPos, BlockWireInfo>> handle = (p, map) -> { if (mapForDim != null) { BlockWireInfo info = map.get(p); if (info != null) { for (int i = 0; i < 2; i++) { Set<Triple<Connection, Vec3d, Vec3d>> s = i == 0 ? info.in : info.near; s.removeIf((t) -> t.getLeft().hasSameConnectors(con)); if (s.isEmpty()) map.remove(p); } if (info.near.isEmpty() && info.in.isEmpty()) map.remove(p); } } }; raytraceAlongCatenaryRelative(con, (p) -> { handle.accept(p.getLeft(), mapForDim); return false; }, (p) -> handle.accept(p.getLeft(), mapForDim), vecStart, vecEnd); IImmersiveConnectable iic = toIIC(con.end, world); if (iic != null) { iic.removeCable(con); back.ifPresent(iic::removeCable); } iic = toIIC(con.start, world); if (iic != null) { iic.removeCable(con); back.ifPresent(iic::removeCable); } if (world.isBlockLoaded(con.start)) world.addBlockEvent(con.start, world.getBlockState(con.start).getBlock(), -1, 0); if (world.isBlockLoaded(con.end)) world.addBlockEvent(con.end, world.getBlockState(con.end).getBlock(), -1, 0); IESaveData.setDirty(dim); }
From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java
/** Builds objects for all the aleph2 inputs and provides a method to use them in context-dependent ways * @param context/*from ww w . j a v a2s . com*/ * @param bucket * @param job * @param config * @param per_input_action - user lambda that determines how they are used */ public static final void buildAleph2Inputs(final IAnalyticsContext context, final DataBucketBean bucket, final AnalyticThreadJobBean job, final Optional<ProcessingTestSpecBean> maybe_test_spec, final Configuration config, final Set<String> exclude_names, BiConsumer<AnalyticThreadJobInputBean, Job> per_input_action) { transformInputBean(Optionals.ofNullable(job.inputs()).stream(), maybe_test_spec) .filter(input -> !exclude_names.contains(input.name())) .forEach(Lambdas.wrap_consumer_u(input_with_test_settings -> { final Optional<IBucketLogger> a2_logger = Optional .ofNullable(context.getLogger(Optional.of(bucket))); final List<String> paths = context.getInputPaths(Optional.empty(), job, input_with_test_settings); if (!paths.isEmpty()) { _logger.info(ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(), paths.stream().collect(Collectors.joining(";")))); a2_logger.ifPresent(l -> l.log(Level.INFO, true, () -> ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(), paths.stream().collect(Collectors.joining(";"))), () -> SparkTechnologyService.class.getSimpleName() + "." + Optional.ofNullable(job.name()).orElse("no_name"), () -> "startAnalyticJobOrTest")); //DEBUG //System.out.println(ErrorUtils.get("Adding storage paths for bucket {0}: {1}", bucket.full_name(), paths.stream().collect(Collectors.joining(";")))); final Job input_job = Job.getInstance(config); input_job.setInputFormatClass(BeFileInputFormat_Pure.class); paths.stream().forEach(Lambdas .wrap_consumer_u(path -> FileInputFormat.addInputPath(input_job, new Path(path)))); // (Add the input config in) input_job.getConfiguration().set(HadoopBatchEnrichmentUtils.BE_BUCKET_INPUT_CONFIG, BeanTemplateUtils.toJson(input_with_test_settings).toString()); per_input_action.accept(input_with_test_settings, input_job); } else { // not easily available in HDFS directory format, try getting from the context Optional<HadoopBatchEnrichmentUtils.HadoopAccessContext> input_format_info = context .getServiceInput(HadoopBatchEnrichmentUtils.HadoopAccessContext.class, Optional.empty(), job, input_with_test_settings); if (!input_format_info.isPresent()) { _logger.warn(ErrorUtils.get("Tried but failed to get input format from {0}", BeanTemplateUtils.toJson(input_with_test_settings))); a2_logger.ifPresent(l -> l.log(Level.WARN, true, () -> ErrorUtils.get("Tried but failed to get input format from {0}", BeanTemplateUtils.toJson(input_with_test_settings)), () -> SparkTechnologyService.class.getSimpleName() + "." + Optional.ofNullable(job.name()).orElse("no_name"), () -> "startAnalyticJobOrTest")); //DEBUG //System.out.println(ErrorUtils.get("Tried but failed to get input format from {0}", BeanTemplateUtils.toJson(input_with_test_settings))); } else { _logger.info(ErrorUtils.get("Adding data service path for bucket {0}: {1}", bucket.full_name(), input_format_info.get().describe())); a2_logger.ifPresent(l -> l.log(Level.INFO, true, () -> ErrorUtils.get("Adding data service path for bucket {0}: {1}", bucket.full_name(), input_format_info.get().describe()), () -> SparkTechnologyService.class.getSimpleName() + "." + Optional.ofNullable(job.name()).orElse("no_name"), () -> "startAnalyticJobOrTest")); //DEBUG //System.out.println(ErrorUtils.get("Adding data service path for bucket {0}: {1}", bucket.full_name(),input_format_info.get().describe())); final Job input_job = Job.getInstance(config); input_job.setInputFormatClass( input_format_info.get().getAccessService().either(l -> l.getClass(), r -> r)); input_format_info.get().getAccessConfig().ifPresent(map -> { map.entrySet().forEach(kv -> input_job.getConfiguration().set(kv.getKey(), kv.getValue().toString())); }); per_input_action.accept(input_with_test_settings, input_job); } } })); }
From source file:gov.va.oia.terminology.converters.sharedUtils.EConceptUtility.java
/** * Utility method to build and store a metadata concept. * @param callback - optional - used to fire a callback if present. No impact on created concept. * @param dos - optional - does not store when not provided * @param secondParent - optional//from ww w . jav a2 s .com */ public TtkConceptChronicle createMetaDataConcept(UUID primordial, String fsnName, String preferredName, String altName, String definition, UUID relParentPrimordial, UUID secondParent, BiConsumer<TtkConceptChronicle, EConceptUtility> callback, DataOutputStream dos) throws Exception { TtkConceptChronicle concept = createConcept(primordial, fsnName); addRelationship(concept, relParentPrimordial); if (secondParent != null) { addRelationship(concept, secondParent); } if (StringUtils.isNotEmpty(preferredName)) { addDescription(concept, preferredName, DescriptionType.SYNONYM, true, null, null, Status.ACTIVE); } if (StringUtils.isNotEmpty(altName)) { addDescription(concept, altName, DescriptionType.SYNONYM, false, null, null, Status.ACTIVE); } if (StringUtils.isNotEmpty(definition)) { addDescription(concept, definition, DescriptionType.DEFINITION, true, null, null, Status.ACTIVE); } //Fire the callback if (callback != null) { callback.accept(concept, this); } if (dos != null) { concept.writeExternal(dos); } return concept; }
From source file:com.netflix.spinnaker.clouddriver.cloudfoundry.client.ServiceInstances.java
private <T extends AbstractCreateServiceInstance, S extends AbstractServiceInstance> ServiceInstanceResponse createServiceInstance( T command, Function<T, Resource<S>> create, BiFunction<String, T, Resource<S>> update, BiFunction<Integer, List<String>, Page<S>> getAllServices, Function<T, CloudFoundryServiceInstance> getServiceInstance, BiConsumer<T, CloudFoundryServiceInstance> updateValidation, boolean updatable, CloudFoundrySpace space) {/*w w w. j a v a 2 s .com*/ LastOperation.Type operationType; List<String> serviceInstanceQuery = getServiceQueryParams(Collections.singletonList(command.getName()), space); List<Resource<? extends AbstractServiceInstance>> serviceInstances = new ArrayList<>(); serviceInstances.addAll( collectPageResources("service instances", pg -> getAllServices.apply(pg, serviceInstanceQuery))); operationType = CREATE; if (serviceInstances.size() == 0) { safelyCall(() -> create.apply(command)).map(res -> res.getMetadata().getGuid()) .orElseThrow(() -> new CloudFoundryApiException( "service instance '" + command.getName() + "' could not be created")); } else if (updatable) { operationType = UPDATE; serviceInstances.stream().findFirst().map(r -> r.getMetadata().getGuid()).orElseThrow( () -> new CloudFoundryApiException("Service instance '" + command.getName() + "' not found")); CloudFoundryServiceInstance serviceInstance = getServiceInstance.apply(command); if (serviceInstance == null) { throw new CloudFoundryApiException("No service instances with name '" + command.getName() + "' found in space " + space.getName()); } updateValidation.accept(command, serviceInstance); safelyCall(() -> update.apply(serviceInstance.getId(), command)); } return new ServiceInstanceResponse().setServiceInstanceName(command.getName()).setType(operationType); }
From source file:ijfx.core.batch.BatchService.java
public Boolean applyWorkflow(ProgressHandler progress, List<? extends BatchSingleInput> inputs, Workflow workflow) {//from ww w .j ava2s .co m final Timer t = timerService.getTimer("Workflow"); if (progress == null) { progress = new SilentProgressHandler(); } Boolean lock = new Boolean(true); if (workflow == null) { logger.warning("No workflow was provided"); return true; } int totalOps = inputs.size() * (2 + workflow.getStepList().size()); progress.setStatus("Starting batch processing..."); boolean success = true; BooleanProperty successProperty = new SimpleBooleanProperty(); Exception error = null; setRunning(true); BiConsumer<String, String> logTime = (step, msg) -> { t.elapsed(String.format("[%s][%s]%s", workflow.getName(), step, msg)); }; progress.setTotal(totalOps); for (int i = 0; i != inputs.size(); i++) { //inputs.parallelStream().forEach(input->{ logger.info("Running..."); final BatchSingleInput input = inputs.get(i); if (progress.isCancelled()) { progress.setStatus("Batch Processing cancelled"); success = false; //return; break; } t.start(); synchronized (lock) { logger.info("Loading input..."); progress.setStatus("Loading %s...", input.getName()); try { getContext().inject(input); } catch (IllegalStateException ise) { logger.warning("Context already injected"); } try { input.load(); } catch (Exception e) { logger.log(Level.SEVERE, "Couldn't load input", e); error = e; success = false; break; } logger.info("Input loaded"); } logTime.accept("loading", "done"); progress.increment(1); if (i < inputs.size() - 1) { // loading the next one while processing the current one BatchSingleInput next = inputs.get(i + 1); ImageJFX.getThreadPool().execute(() -> { synchronized (lock) { logger.info("Loading next input..."); next.load(); logger.info("Next input loaded."); } }); } for (WorkflowStep step : workflow.getStepList()) { logger.info("Executing step : " + step.getId()); String title; try { title = step.getModule().getInfo().getTitle(); progress.setStatus(String.format("Processing %s with %s", input.getName(), title)); } catch (NullPointerException e) { title = "???"; progress.setStatus("..."); } progress.increment(1); final Module module = moduleService.createModule(step.getModule().getInfo()); try { getContext().inject(module.getDelegateObject()); } catch (Exception e) { logger.warning("Context already injected in module ?"); } logTime.accept("injection", "done"); logger.info("Module created : " + module.getDelegateObject().getClass().getSimpleName()); if (!executeModule(input, module, step.getParameters())) { progress.setStatus("Error :-("); progress.setProgress(0, 1); success = false; logger.info("Error when executing module : " + module.getInfo().getName()); break; } ; logTime.accept(title, "done"); } if (success == false) { break; } synchronized (lock) { progress.setStatus("Saving %s...", input.getName()); input.save(); progress.increment(1); } logTime.accept("saving", "done"); input.dispose(); } if (success) { logger.info("Batch processing completed"); progress.setStatus("Batch processing completed."); progress.setProgress(1.0); } else if (progress.isCancelled()) { progress.setStatus("Batch processing cancelled"); } else { progress.setStatus("An error happend during the process."); progress.setProgress(1, 1); } setRunning(false); return success; }
From source file:com.evolveum.midpoint.repo.sql.helpers.ObjectDeltaUpdater.java
private void processAnyExtensionDeltaValues(Collection<PrismValue> values, RObject object, RObjectExtensionType objectOwnerType, RAssignmentExtension assignmentExtension, RAssignmentExtensionType assignmentExtensionType, BiConsumer<Collection<? extends RAnyValue>, Collection<PrismEntityPair<RAnyValue>>> processObjectValues) { RAnyConverter converter = new RAnyConverter(prismContext, extItemDictionary); if (values == null || values.isEmpty()) { return;/*from ww w. ja va 2 s .c o m*/ } try { Collection<PrismEntityPair<RAnyValue>> extValues = new ArrayList<>(); for (PrismValue value : values) { RAnyValue extValue = converter.convertToRValue(value, object == null); if (extValue == null) { continue; } extValues.add(new PrismEntityPair(value, extValue)); } if (extValues.isEmpty()) { // no changes in indexed values return; // todo can't return if new "values" collection is empty, if it was REPLACE with "nothing" we have to remove proper attributes } Class type = null; if (!extValues.isEmpty()) { RAnyValue first = extValues.iterator().next().getRepository(); type = first.getClass(); } if (object != null) { extValues.stream().forEach(item -> { ROExtValue val = (ROExtValue) item.getRepository(); val.setOwner(object); val.setOwnerType(objectOwnerType); }); processObjectExtensionValues(object, type, (existing) -> processObjectValues.accept(existing, extValues)); } else { extValues.stream().forEach(item -> { RAExtValue val = (RAExtValue) item.getRepository(); val.setAnyContainer(assignmentExtension); val.setExtensionType(assignmentExtensionType); }); processAssignmentExtensionValues(assignmentExtension, type, (existing) -> processObjectValues.accept(existing, extValues)); } } catch (SchemaException ex) { throw new SystemException("Couldn't process extension attributes", ex); } }
From source file:org.apache.bookkeeper.test.TestStatsProvider.java
public void forEachOpStatLogger(BiConsumer<String, TestOpStatsLogger> f) { for (Map.Entry<String, TestOpStatsLogger> entry : opStatLoggerMap.entrySet()) { f.accept(entry.getKey(), entry.getValue()); }/*from w w w . j a v a2s . c o m*/ }
From source file:org.apache.drill.exec.store.pcapng.PcapngRecordReader.java
private Integer iterateOverBlocks(BiConsumer<IPcapngType, Integer> consumer) { int counter = 0; while (it.hasNext() && counter < BATCH_SIZE) { IPcapngType block = it.next();/*from www . j av a 2 s . c om*/ if (block instanceof IEnhancedPacketBLock) { consumer.accept(block, counter); counter++; } } return counter; }