List of usage examples for java.util.function BiFunction apply
R apply(T t, U u);
From source file:sh.isaac.convert.rxnorm.standard.RxNormMojo.java
/** * Process SAT./*from w w w . ja v a2 s . c om*/ * * @param itemToAnnotate the item to annotate * @param satRows the sat rows * @param itemCode the item code * @param itemSab the item sab * @param skipCheck the skip check * @throws SQLException the SQL exception * @throws PropertyVetoException the property veto exception */ private void processSAT(ComponentReference itemToAnnotate, List<RXNSAT> satRows, String itemCode, String itemSab, BiFunction<String, String, Boolean> skipCheck) throws SQLException, PropertyVetoException { for (final RXNSAT rxnsat : satRows) { if (skipCheck != null) { if (skipCheck.apply(rxnsat.atn, rxnsat.atv)) { continue; } } // for some reason, ATUI isn't always provided - don't know why. must gen differently in those cases... UUID stringAttrUUID; final UUID refsetUUID = this.ptTermAttributes.getProperty(rxnsat.atn).getUUID(); if (rxnsat.atui != null) { stringAttrUUID = ConverterUUID.createNamespaceUUIDFromString("ATUI" + rxnsat.atui); } else { // need to put the aui in here, to keep it unique, as each AUI frequently specs the same CUI stringAttrUUID = ConverterUUID.createNamespaceUUIDFromStrings( itemToAnnotate.getPrimordialUuid().toString(), rxnsat.rxaui, rxnsat.atv, refsetUUID.toString()); } // You would expect that ptTermAttributes_.get() would be looking up sab, rather than having RxNorm hardcoded... but this is an oddity of // a hack we are doing within the RxNorm load. final ComponentReference attribute = ComponentReference.fromChronology(this.importUtil .addStringAnnotation(itemToAnnotate, stringAttrUUID, rxnsat.atv, refsetUUID, Status.ACTIVE), () -> "Attribute"); if (StringUtils.isNotBlank(rxnsat.atui)) { this.importUtil.addStringAnnotation(attribute, rxnsat.atui, this.ptUMLSAttributes.getProperty("ATUI").getUUID(), null); } if (StringUtils.isNotBlank(rxnsat.stype)) { this.importUtil.addUUIDAnnotation(attribute, this.sTypes.get(rxnsat.stype), this.ptUMLSAttributes.getProperty("STYPE").getUUID()); } if (StringUtils.isNotBlank(rxnsat.code) && StringUtils.isNotBlank(itemCode) && !rxnsat.code.equals(itemCode)) { throw new RuntimeException("oops"); // if () // { // eConcepts_.addStringAnnotation(attribute, code, ptUMLSAttributes_.getProperty("CODE").getUUID(), Status.ACTIVE); // } } if (StringUtils.isNotBlank(rxnsat.satui)) { this.importUtil.addStringAnnotation(attribute, rxnsat.satui, this.ptUMLSAttributes.getProperty("SATUI").getUUID(), Status.ACTIVE); } // only load the sab if it is different than the sab of the item we are putting this attribute on if (StringUtils.isNotBlank(rxnsat.sab) && !rxnsat.sab.equals(itemSab)) { throw new RuntimeException("Oops"); // eConcepts_.addUuidAnnotation(attribute, ptSABs_.getProperty(sab).getUUID(), ptUMLSAttributes_.getProperty("SAB").getUUID()); } if (StringUtils.isNotBlank(rxnsat.suppress)) { this.importUtil.addUUIDAnnotation(attribute, this.suppress.get(rxnsat.suppress), this.ptUMLSAttributes.getProperty("SUPPRESS").getUUID()); } if (StringUtils.isNotBlank(rxnsat.cvf)) { if (rxnsat.cvf.equals("4096")) { this.importUtil.addRefsetMembership(attribute, this.cpcRefsetConcept.getPrimordialUuid(), Status.ACTIVE, null); } else { throw new RuntimeException("Unexpected value in RXNSAT cvf column '" + rxnsat.cvf + "'"); } } } }
From source file:org.apache.nifi.processors.hive.TestPutHive3Streaming.java
private void configureComplex(final MockPutHive3Streaming processor, final int numUsers, final int failAfter, final BiFunction<Integer, MockRecordParser, Void> recordGenerator) throws IOException, InitializationException { final String avroSchema = IOUtils.toString(new FileInputStream("src/test/resources/array_of_records.avsc"), StandardCharsets.UTF_8); schema = new Schema.Parser().parse(avroSchema); processor//w w w .j a va 2 s. c o m .setFields(Arrays.asList(new FieldSchema("records", serdeConstants.LIST_TYPE_NAME + "<" + serdeConstants.MAP_TYPE_NAME + "<" + serdeConstants.STRING_TYPE_NAME + "," + serdeConstants.STRING_TYPE_NAME + ">>", ""))); runner = TestRunners.newTestRunner(processor); runner.setProperty(PutHive3Streaming.HIVE_CONFIGURATION_RESOURCES, TEST_CONF_PATH); MockRecordParser readerFactory = new MockRecordParser(); final RecordSchema recordSchema = AvroTypeUtil.createSchema(schema); for (final RecordField recordField : recordSchema.getFields()) { readerFactory.addSchemaField(recordField.getFieldName(), recordField.getDataType().getFieldType(), recordField.isNullable()); } if (recordGenerator == null) { Object[] mapArray = new Object[numUsers]; for (int i = 0; i < numUsers; i++) { final int x = i; Map<String, Object> map = new HashMap<String, Object>() { { put("name", "name" + x); put("age", x * 5); } }; mapArray[i] = map; } readerFactory.addRecord((Object) mapArray); } else { recordGenerator.apply(numUsers, readerFactory); } readerFactory.failAfter(failAfter); runner.addControllerService("mock-reader-factory", readerFactory); runner.enableControllerService(readerFactory); runner.setProperty(PutHive3Streaming.RECORD_READER, "mock-reader-factory"); }
From source file:org.jsweet.transpiler.Java2TypeScriptTranslator.java
private AbstractTreePrinter substituteAndPrintType(JCTree typeTree, boolean arrayComponent, boolean inTypeParameters, boolean completeRawTypes, boolean disableSubstitution) { if (typeTree.type.tsym instanceof TypeVariableSymbol) { if (getAdapter().typeVariablesToErase.contains(typeTree.type.tsym)) { return print("any"); }// w w w. jav a 2 s . c o m } if (!disableSubstitution) { if (context.hasAnnotationType(typeTree.type.tsym, ANNOTATION_ERASED)) { return print("any"); } if (context.hasAnnotationType(typeTree.type.tsym, ANNOTATION_OBJECT_TYPE)) { // TODO: in case of object types, we should replace with the org // object type... return print("any"); } String typeFullName = typeTree.type.getModelType().toString(); // typeTree.type.tsym.getQualifiedName().toString(); if (Runnable.class.getName().equals(typeFullName)) { if (arrayComponent) { print("("); } print("() => void"); if (arrayComponent) { print(")"); } return this; } if (typeTree instanceof JCTypeApply) { JCTypeApply typeApply = ((JCTypeApply) typeTree); String typeName = typeApply.clazz.toString(); String mappedTypeName = context.getTypeMappingTarget(typeName); if (mappedTypeName != null && mappedTypeName.endsWith("<>")) { print(typeName.substring(0, mappedTypeName.length() - 2)); return this; } if (typeFullName.startsWith(TUPLE_CLASSES_PACKAGE + ".")) { print("["); for (JCExpression argument : typeApply.arguments) { substituteAndPrintType(argument, arrayComponent, inTypeParameters, completeRawTypes, false) .print(","); } if (typeApply.arguments.length() > 0) { removeLastChar(); } print("]"); return this; } if (typeFullName.startsWith(UNION_CLASS_NAME)) { print("("); for (JCExpression argument : typeApply.arguments) { print("("); substituteAndPrintType(argument, arrayComponent, inTypeParameters, completeRawTypes, false); print(")"); print("|"); } if (typeApply.arguments.length() > 0) { removeLastChar(); } print(")"); return this; } if (typeFullName.startsWith(UTIL_PACKAGE + ".") || typeFullName.startsWith("java.util.function.")) { if (typeName.endsWith("Consumer") || typeName.startsWith("Consumer")) { if (arrayComponent) { print("("); } print("("); if (typeName.startsWith("Int") || typeName.startsWith("Long") || typeName.startsWith("Double")) { print("p0 : number"); } else { printArguments(typeApply.arguments); } print(") => void"); if (arrayComponent) { print(")"); } return this; } else if (typeName.endsWith("Function") || typeName.startsWith("Function")) { if (arrayComponent) { print("("); } print("("); if (typeName.startsWith("Int") || typeName.startsWith("Long") || typeName.startsWith("Double")) { print("p0 : number"); } else { printArguments(typeApply.arguments.subList(0, typeApply.arguments.length() - 1)); } print(") => "); substituteAndPrintType(typeApply.arguments.get(typeApply.arguments.length() - 1), arrayComponent, inTypeParameters, completeRawTypes, false); if (arrayComponent) { print(")"); } return this; } else if (typeName.endsWith("Supplier") || typeName.startsWith("Supplier")) { if (arrayComponent) { print("("); } print("("); print(") => "); if (typeName.startsWith("Int") || typeName.startsWith("Long") || typeName.startsWith("Double")) { print("number"); } else { substituteAndPrintType(typeApply.arguments.get(0), arrayComponent, inTypeParameters, completeRawTypes, false); } if (arrayComponent) { print(")"); } return this; } else if (typeName.endsWith("Predicate")) { if (arrayComponent) { print("("); } print("("); if (typeName.startsWith("Int") || typeName.startsWith("Long") || typeName.startsWith("Double")) { print("p0 : number"); } else { printArguments(typeApply.arguments); } print(") => boolean"); if (arrayComponent) { print(")"); } return this; } else if (typeName.endsWith("Operator")) { if (arrayComponent) { print("("); } print("("); printArgument(typeApply.arguments.head, 1); if (typeName.startsWith("Binary")) { print(", "); printArgument(typeApply.arguments.head, 2); } print(") => "); substituteAndPrintType(typeApply.arguments.head, arrayComponent, inTypeParameters, completeRawTypes, false); if (arrayComponent) { print(")"); } return this; } } if (typeFullName.startsWith(Class.class.getName() + "<")) { return print("any"); } } else { if (!(typeTree instanceof JCArrayTypeTree) && typeFullName.startsWith("java.util.function.")) { // case of a raw functional type (programmer's mistake) return print("any"); } String mappedType = context.getTypeMappingTarget(typeFullName); if (mappedType != null) { if (mappedType.endsWith("<>")) { print(mappedType.substring(0, mappedType.length() - 2)); } else { print(mappedType); if (completeRawTypes && !typeTree.type.tsym.getTypeParameters().isEmpty() && !context.getTypeMappingTarget(typeFullName).equals("any")) { printAnyTypeArguments(typeTree.type.tsym.getTypeParameters().size()); } } return this; } } for (BiFunction<ExtendedElement, String, Object> mapping : context.getFunctionalTypeMappings()) { Object mapped = mapping.apply(new ExtendedElementSupport(typeTree), typeFullName); if (mapped instanceof String) { print((String) mapped); return this; } else if (mapped instanceof JCTree) { substituteAndPrintType((JCTree) mapped); return this; } else if (mapped instanceof TypeMirror) { print(getAdapter().getMappedType((TypeMirror) mapped)); return this; } } } if (typeTree instanceof JCTypeApply) { JCTypeApply typeApply = ((JCTypeApply) typeTree); substituteAndPrintType(typeApply.clazz, arrayComponent, inTypeParameters, false, disableSubstitution); if (!typeApply.arguments.isEmpty() && !"any".equals(getLastPrintedString(3)) && !"Object".equals(getLastPrintedString(6))) { print("<"); for (JCExpression argument : typeApply.arguments) { substituteAndPrintType(argument, arrayComponent, false, completeRawTypes, false).print(", "); } if (typeApply.arguments.length() > 0) { removeLastChars(2); } print(">"); } return this; } else if (typeTree instanceof JCWildcard) { JCWildcard wildcard = ((JCWildcard) typeTree); String name = context.getWildcardName(wildcard); if (name == null) { return print("any"); } else { print(name); if (inTypeParameters) { print(" extends "); return substituteAndPrintType(wildcard.getBound(), arrayComponent, false, completeRawTypes, disableSubstitution); } else { return this; } } } else { if (typeTree instanceof JCArrayTypeTree) { return substituteAndPrintType(((JCArrayTypeTree) typeTree).elemtype, true, inTypeParameters, completeRawTypes, disableSubstitution).print("[]"); } if (completeRawTypes && typeTree.type.tsym.getTypeParameters() != null && !typeTree.type.tsym.getTypeParameters().isEmpty()) { // raw type case (Java warning) print(typeTree); print("<"); for (int i = 0; i < typeTree.type.tsym.getTypeParameters().length(); i++) { print("any, "); } removeLastChars(2); print(">"); return this; } else { return print(typeTree); } } }
From source file:org.openecomp.sdc.be.components.impl.CommonImportManager.java
protected <ElementTypeDefinition> Either<List<ImmutablePair<ElementTypeDefinition, Boolean>>, ResponseFormat> createElementTypesByDao( List<ElementTypeDefinition> elementTypesToCreate, Function<ElementTypeDefinition, Either<ActionStatus, ResponseFormat>> validator, Function<ElementTypeDefinition, ImmutablePair<ElementTypeEnum, String>> elementInfoGetter, Function<String, Either<ElementTypeDefinition, StorageOperationStatus>> elementFetcher, Function<ElementTypeDefinition, Either<ElementTypeDefinition, StorageOperationStatus>> elementAdder, BiFunction<ElementTypeDefinition, ElementTypeDefinition, Either<ElementTypeDefinition, StorageOperationStatus>> elementUpgrader) { List<ImmutablePair<ElementTypeDefinition, Boolean>> createdElementTypes = new ArrayList<>(); Either<List<ImmutablePair<ElementTypeDefinition, Boolean>>, ResponseFormat> eitherResult = Either .left(createdElementTypes);/*from www . j av a2 s.c o m*/ Iterator<ElementTypeDefinition> elementTypeItr = elementTypesToCreate.iterator(); try { while (elementTypeItr.hasNext()) { ElementTypeDefinition elementType = elementTypeItr.next(); final ImmutablePair<ElementTypeEnum, String> elementInfo = elementInfoGetter.apply(elementType); ElementTypeEnum elementTypeEnum = elementInfo.left; String elementName = elementInfo.right; Either<ActionStatus, ResponseFormat> validateElementType = validator.apply(elementType); if (validateElementType.isRight()) { ResponseFormat responseFormat = validateElementType.right().value(); log.debug("Failed in validation of element type {}. Response is {}", elementType, responseFormat.getFormattedMessage()); eitherResult = Either.right(responseFormat); break; } log.info("send {} : {} to dao for create", elementTypeEnum.name(), elementName); Either<ElementTypeDefinition, StorageOperationStatus> findElementType = elementFetcher .apply(elementName); if (findElementType.isRight()) { StorageOperationStatus status = findElementType.right().value(); log.debug("searched {} finished with result:{}", elementTypeEnum.name(), status.name()); if (status != StorageOperationStatus.NOT_FOUND) { ResponseFormat responseFormat = getResponseFormatForElementType( convertFromStorageResponseForElementType(status, elementTypeEnum), elementTypeEnum, elementType); eitherResult = Either.right(responseFormat); break; } else { Either<ElementTypeDefinition, StorageOperationStatus> dataModelResponse = elementAdder .apply(elementType); if (dataModelResponse.isRight()) { try { BeEcompErrorManager.getInstance().logBeFailedAddingNodeTypeError("Create {}", elementTypeEnum.name()); log.debug("failed to create {}: {}", elementTypeEnum.name(), elementName); if (dataModelResponse.right().value() != StorageOperationStatus.SCHEMA_VIOLATION) { ResponseFormat responseFormat = getResponseFormatForElementType( convertFromStorageResponseForElementType( dataModelResponse.right().value(), elementTypeEnum), elementTypeEnum, elementType); eitherResult = Either.right(responseFormat); break; } else { createdElementTypes.add( new ImmutablePair<ElementTypeDefinition, Boolean>(elementType, false)); } } finally { propertyOperation.getTitanGenericDao().rollback(); } } else { propertyOperation.getTitanGenericDao().commit(); createdElementTypes .add(new ImmutablePair<ElementTypeDefinition, Boolean>(elementType, true)); log.debug("{} : {} was created successfully.", elementTypeEnum.name(), elementName); } if (!elementTypeItr.hasNext()) { log.info("all {} were created successfully!!!", elementTypeEnum.name()); } } } else { if (elementUpgrader != null) { Either<ElementTypeDefinition, StorageOperationStatus> upgradeResponse = null; try { upgradeResponse = elementUpgrader.apply(elementType, findElementType.left().value()); if (upgradeResponse.isRight()) { StorageOperationStatus status = upgradeResponse.right().value(); if (status == StorageOperationStatus.OK) { createdElementTypes.add( new ImmutablePair<ElementTypeDefinition, Boolean>(elementType, false)); } else { ResponseFormat responseFormat = getResponseFormatForElementType( convertFromStorageResponseForElementType( upgradeResponse.right().value(), elementTypeEnum), elementTypeEnum, elementType); eitherResult = Either.right(responseFormat); break; } } else { log.debug("{} : {} was upgraded successfully.", elementTypeEnum.name(), elementName); createdElementTypes .add(new ImmutablePair<ElementTypeDefinition, Boolean>(elementType, true)); } } finally { if (upgradeResponse == null || upgradeResponse.isRight()) { propertyOperation.getTitanGenericDao().rollback(); } else { propertyOperation.getTitanGenericDao().commit(); } } } else { // mshitrit Once GroupType Versions are supported add // code here createdElementTypes .add(new ImmutablePair<ElementTypeDefinition, Boolean>(elementType, false)); log.debug("{} : {} already exists.", elementTypeEnum.name(), elementName); } } } } finally { if (eitherResult.isRight()) { propertyOperation.getTitanGenericDao().rollback(); } } return eitherResult; }
From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.DataBucketAnalyticsChangeActor.java
/** Make various requests of the analytics module based on the message type * @param bucket/* w w w . j a v a 2 s .c om*/ * @param tech_module * @param m * @return - a future containing the reply or an error (they're the same type at this point hence can discard the Validation finally) */ protected static CompletableFuture<BucketActionReplyMessage> talkToAnalytics(final DataBucketBean bucket, final BucketActionMessage m, final String source, final AnalyticsContext context, final DataImportActorContext dim_context, final Tuple2<ActorRef, ActorSelection> me_sibling, final Map<String, Tuple2<SharedLibraryBean, String>> libs, // (if we're here then must be valid) final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> err_or_tech_module, // "pipeline element" final ILoggingService _logging_service) { final List<AnalyticThreadJobBean> jobs = bucket.analytic_thread().jobs(); final BiFunction<Stream<AnalyticThreadJobBean>, Tuple2<Boolean, Boolean>, Stream<AnalyticThreadJobBean>> perJobSetup = ( job_stream, existingbucket_bucketactive) -> { return job_stream.filter( job -> existingbucket_bucketactive._1() || Optional.ofNullable(job.enabled()).orElse(true)) .filter(job -> !isBatchJobWithDependencies(bucket, job, existingbucket_bucketactive)) .peek(job -> setPerJobContextParams(job, context, libs)); //(WARNING: mutates context) }; final ClassLoader saved_current_classloader = Thread.currentThread().getContextClassLoader(); try { return err_or_tech_module.<CompletableFuture<BucketActionReplyMessage>>validation( //Error: error -> CompletableFuture.completedFuture(new BucketActionHandlerMessage(source, error)), // Normal techmodule_classloader -> { final IAnalyticsTechnologyModule tech_module = techmodule_classloader._1(); if (shouldLog(m)) _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage( false, () -> DataBucketAnalyticsChangeActor.class.getSimpleName(), () -> "talkToAnalytics", () -> null, () -> "Set active classloader=" + techmodule_classloader._2() + " class=" + tech_module.getClass() + " message=" + m.getClass().getSimpleName() + " bucket=" + bucket.full_name(), () -> Collections.emptyMap())); Thread.currentThread().setContextClassLoader(techmodule_classloader._2()); tech_module.onInit(context); // One final check before we do anything: are we allowed to run multi-node if we're trying // By construction, all the jobs have the same setting, so: final boolean multi_node_enabled = jobs.stream().findFirst() .map(j -> j.multi_node_enabled()).orElse(false); if (multi_node_enabled) { if (!tech_module.supportsMultiNode(bucket, jobs, context)) { return CompletableFuture.completedFuture(new BucketActionHandlerMessage(source, SharedErrorUtils.buildErrorMessage(source, m, ErrorUtils.get( AnalyticsErrorUtils.TRIED_TO_RUN_MULTI_NODE_ON_UNSUPPORTED_TECH, bucket.full_name(), tech_module.getClass().getSimpleName())))); } } return Patterns.match(m).<CompletableFuture<BucketActionReplyMessage>>andReturn() .when(BucketActionMessage.BucketActionOfferMessage.class, msg -> { final boolean accept_or_ignore = NodeRuleUtils.canRunOnThisNode( jobs.stream().map(j -> Optional.ofNullable(j.node_list_rules())), dim_context) && tech_module.canRunOnThisNode(bucket, jobs, context); return CompletableFuture.completedFuture(accept_or_ignore ? new BucketActionReplyMessage.BucketActionWillAcceptMessage(source) : new BucketActionReplyMessage.BucketActionIgnoredMessage(source)); }).when(BucketActionMessage.DeleteBucketActionMessage.class, msg -> { //(note have already told the sibling about this) final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onDeleteThread(bucket, jobs, context); final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = perJobSetup .apply(jobs.stream(), Tuples._2T(true, false)) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .stopAnalyticJob(bucket, jobs, job, context))) .collect(Collectors.toList()); //(no need to call the context.completeJobOutput since we're deleting the bucket) sendOnTriggerEventMessages(job_results, msg.bucket(), __ -> Optional.of(JobMessageType.stopping), me_sibling, _logging_service); return combineResults(top_level_result, job_results.stream().map(jf -> jf._2()).collect(Collectors.toList()), source); }).when(BucketActionMessage.NewBucketActionMessage.class, msg -> { final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onNewThread(bucket, jobs, context, !msg.is_suspended()); return top_level_result.thenCompose(ret_val -> { if (!ret_val.success()) { return combineResults(top_level_result, Arrays.asList(), source); } else { // success, carry on // Firstly, tell the sibling if (null != me_sibling) me_sibling._2().tell(msg, me_sibling._1()); final boolean starting_thread = msg.is_suspended() ? false : perJobSetup.apply(jobs.stream(), Tuples._2T(false, true)) .anyMatch(job -> _batch_types .contains(job.analytic_type())); if (starting_thread) { BasicMessageBean thread_start_result = tech_module.onThreadExecute( bucket, jobs, Collections.emptyList(), context).join(); // (wait for completion before doing anything else) _logging_service.getSystemLogger(bucket).log( thread_start_result.success() ? Level.INFO : Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Executing thread for bucket {0}, success={1} (error={2})", bucket.full_name(), thread_start_result.success(), thread_start_result.success() ? "none" : thread_start_result.message()), () -> Collections.emptyMap())); } final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = msg .is_suspended() ? Collections.emptyList() : perJobSetup .apply(jobs.stream(), Tuples._2T(false, true)) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .startAnalyticJob(bucket, jobs, job, context))) .collect(Collectors.toList()); // Only send on trigger events for messages that started sendOnTriggerEventMessages(job_results, msg.bucket(), j_r -> { _logging_service.getSystemLogger(bucket).log( j_r._2().success() ? Level.INFO : Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Starting bucket:job {0}:{1} success={2}{3}", bucket.full_name(), j_r._1().name(), j_r._2().success(), j_r._2().success() ? "" : (" error = " + j_r._2().message())), () -> Collections.emptyMap())); return j_r._2().success() ? Optional.of(JobMessageType.starting) : Optional.empty(); }, me_sibling, _logging_service); return combineResults(top_level_result, job_results.stream() .map(jf -> jf._2()).collect(Collectors.toList()), source); } }); }).when(BucketActionMessage.UpdateBucketActionMessage.class, msg -> { final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onUpdatedThread(msg.old_bucket(), bucket, jobs, msg.is_enabled(), Optional.empty(), context); return top_level_result.thenCompose(ret_val -> { if (!ret_val.success()) { return combineResults(top_level_result, Arrays.asList(), source); } else { // success, carry on // Firstly, tell the sibling if (null != me_sibling) me_sibling._2().tell(msg, me_sibling._1()); final boolean starting_thread = !msg.is_enabled() ? false : perJobSetup.apply(jobs.stream(), Tuples._2T(true, true)) .filter(job -> Optional.ofNullable(job.enabled()) .orElse(true)) .anyMatch(job -> _batch_types .contains(job.analytic_type())); if (starting_thread) { BasicMessageBean thread_start_result = tech_module.onThreadExecute( bucket, jobs, Collections.emptyList(), context).join(); // (wait for completion before doing anything else) _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Executing thread for bucket {0}, success={1} (error={2})", bucket.full_name(), thread_start_result.success(), thread_start_result.success() ? "none" : thread_start_result.message()), () -> Collections.emptyMap())); } //(don't need the analog for stopping because the trigger will give me the notification once all jobs are completed) final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = perJobSetup .apply(jobs.stream(), Tuples._2T(true, msg.is_enabled())) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) ((msg.is_enabled() && Optional.ofNullable(job.enabled()) .orElse(true)) ? tech_module.resumeAnalyticJob( bucket, jobs, job, context) : tech_module .suspendAnalyticJob( bucket, jobs, job, context)))) .collect(Collectors.toList()); // Send all stop messages, and start messages for jobs that succeeeded sendOnTriggerEventMessages(job_results, msg.bucket(), j_r -> { if (msg.is_enabled() && Optional.ofNullable(j_r._1().enabled()).orElse(true)) { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Starting bucket:job {0}:{1} success={2}{3}", bucket.full_name(), j_r._1().name(), j_r._2().success(), j_r._2().success() ? "" : (" error = " + j_r._2().message())), () -> Collections.emptyMap())); return j_r._2().success() ? Optional.of(JobMessageType.starting) : Optional.empty(); } else { // either stopping all, or have disabled certain jobs _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Stopping bucket:job {0}:{1}", bucket.full_name(), j_r._1().name()), () -> Collections.emptyMap())); if (msg.is_enabled()) { //(else stopping the entire bucket) context.completeJobOutput(msg.bucket(), j_r._1()); } return Optional.of(JobMessageType.stopping); } }, me_sibling, _logging_service); return combineResults(top_level_result, job_results.stream() .map(jf -> jf._2()).collect(Collectors.toList()), source); } }); }).when(BucketActionMessage.PurgeBucketActionMessage.class, msg -> { final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onPurge(bucket, jobs, context); // (don't need to tell the sibling about this) return combineResults(top_level_result, Collections.emptyList(), source); }).when(BucketActionMessage.TestBucketActionMessage.class, msg -> { final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onTestThread(bucket, jobs, msg.test_spec(), context); return top_level_result.thenCompose(ret_val -> { if (!ret_val.success()) { return combineResults(top_level_result, Arrays.asList(), source); } else { // success, carry on // Firstly, tell the sibling if (null != me_sibling) me_sibling._2().tell(msg, me_sibling._1()); final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = perJobSetup .apply(jobs.stream(), Tuples._2T(false, true)) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .startAnalyticJobTest(bucket, jobs, job, msg.test_spec(), context))) .collect(Collectors.toList()); // Only send on trigger events for messages that started sendOnTriggerEventMessages(job_results, msg.bucket(), j_r -> { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Starting test bucket:job {0}:{1} success={2}{3}", bucket.full_name(), j_r._1().name(), j_r._2().success(), j_r._2().success() ? "" : (" error = " + j_r._2().message())), () -> Collections.emptyMap())); return j_r._2().success() ? Optional.of(JobMessageType.starting) : Optional.empty(); }, me_sibling, _logging_service); return combineResults(top_level_result, job_results.stream() .map(jf -> jf._2()).collect(Collectors.toList()), source); } }); }).when(BucketActionMessage.PollFreqBucketActionMessage.class, msg -> { final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onPeriodicPoll(bucket, jobs, context); //(don't need to tell trigger sibling about this) return combineResults(top_level_result, Collections.emptyList(), source); }) // Finally, a bunch of analytic messages (don't tell trigger sibling about any of these) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.check_completion == msg.type()), msg -> { // Check whether these jobs are complete, send message back to sibling asynchronously //(note: don't use perJobSetup for these explicity analytic event messages) final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<Boolean>>> job_results = Optionals .ofNullable(msg.jobs()).stream() .peek(job -> setPerJobContextParams(job, context, libs)) //(WARNING: mutates context) .map(job -> Tuples._2T(job, (CompletableFuture<Boolean>) tech_module .checkAnalyticJobProgress(msg.bucket(), msg.jobs(), job, context))) .collect(Collectors.toList()); // In addition (for now) just log the management results job_results.stream().forEach(jr -> { if (jr._2() instanceof ManagementFuture) { ManagementFuture<Boolean> jr2 = (ManagementFuture<Boolean>) jr ._2(); jr2.thenAccept(result -> { if (result) { jr2.getManagementResults().thenAccept(mgmt_results -> { List<String> errs = mgmt_results.stream() .filter(res -> !res.success()) .map(res -> res.message()) .collect(Collectors.toList()); if (!errs.isEmpty()) { _logging_service.getSystemLogger(bucket).log( Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Completed bucket:job {0}:{1} had errors: {2}", bucket.full_name(), jr._1().name(), errs.stream().collect( Collectors .joining( ";"))), () -> Collections.emptyMap())); } }); } }); } //(it will always be) }); sendOnTriggerEventMessages(job_results, msg.bucket(), t2 -> { if (t2._2()) { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Completed: bucket:job {0}:{1}", bucket.full_name(), t2._1().name()), () -> Collections.emptyMap())); context.completeJobOutput(msg.bucket(), t2._1()); } return t2._2() ? Optional.of(JobMessageType.stopping) : Optional.empty(); }, me_sibling, _logging_service); // Send a status message (Which will be ignored) return CompletableFuture.completedFuture( new BucketActionReplyMessage.BucketActionNullReplyMessage()); }) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.starting == msg.type()) && (null == msg.jobs()), msg -> { // Received a start notification for the bucket //TODO (ALEPH-12): get the matching triggers into the message final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onThreadExecute(msg.bucket(), jobs, Collections.emptyList(), context); //(ignore the reply apart from logging - failures will be identified by triggers) top_level_result.thenAccept(reply -> { if (!reply.success()) { _logging_service.getSystemLogger(bucket).log(Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Error starting analytic thread {0}: message={1}", bucket.full_name(), reply.message()), () -> Collections.emptyMap())); } else { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(true, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Started analytic thread {0}", bucket.full_name()), () -> Collections.emptyMap())); } }); // Now start any enabled jobs that have no dependencies final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = jobs .stream() .filter(job -> Optional.ofNullable(job.enabled()).orElse(true)) .filter(job -> Optionals.ofNullable(job.dependencies()) .isEmpty()) .peek(job -> setPerJobContextParams(job, context, libs)) //(WARNING: mutates context) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .startAnalyticJob(msg.bucket(), jobs, job, context))) .collect(Collectors.toList()); // Only send on trigger events for messages that started sendOnTriggerEventMessages(job_results, msg.bucket(), j_r -> { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Starting bucket:job {0}:{1} success={2}{3}", bucket.full_name(), j_r._1().name(), j_r._2().success(), j_r._2().success() ? "" : (" error = " + j_r._2().message())), () -> Collections.emptyMap())); return j_r._2().success() ? Optional.of(JobMessageType.starting) : Optional.empty(); }, me_sibling, _logging_service); // Send a status message (Which will be ignored) return CompletableFuture.completedFuture( new BucketActionReplyMessage.BucketActionNullReplyMessage()); }) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.starting == msg.type()) && (null != msg.jobs()), msg -> { // Received a start notification for 1+ of the jobs //(note: don't use perJobSetup for these explicity analytic event messages) final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = msg .jobs().stream() .peek(job -> setPerJobContextParams(job, context, libs)) //(WARNING: mutates context) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .startAnalyticJob(msg.bucket(), jobs, job, context))) .collect(Collectors.toList()); //(ignore the reply apart from logging - failures will be identified by triggers) job_results.forEach(job_res -> { job_res._2().thenAccept(res -> { if (!res.success()) { _logging_service.getSystemLogger(bucket).log(Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Error starting analytic job {0}:{1}: message={2}", bucket.full_name(), job_res._1().name(), res.message()), () -> Collections.emptyMap())); } else { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(true, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Started analytic job {0}:{1}", bucket.full_name(), job_res._1().name()), () -> Collections.emptyMap())); } }); }); // Send a status message (Which will be ignored) return CompletableFuture.completedFuture( new BucketActionReplyMessage.BucketActionNullReplyMessage()); }) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.stopping == msg.type()) && (null == msg.jobs()), msg -> { // Received a stop notification for the bucket // Complete the job output context.completeBucketOutput(msg.bucket()); final CompletableFuture<BasicMessageBean> top_level_result = tech_module .onThreadComplete(msg.bucket(), jobs, context); //(ignore the reply apart from logging - failures will be identified by triggers) top_level_result.thenAccept(reply -> { if (!reply.success()) { _logging_service.getSystemLogger(bucket).log(Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Error stopping analytic thread {0}: message={1}", bucket.full_name(), reply.message()), () -> Collections.emptyMap())); } else { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(true, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Stopping analytic thread {0}", bucket.full_name()), () -> Collections.emptyMap())); } }); // Send a status message (Which will be ignored) return CompletableFuture.completedFuture( new BucketActionReplyMessage.BucketActionNullReplyMessage()); }) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.stopping == msg.type()) && (null != msg.jobs()), msg -> { final List<Tuple2<AnalyticThreadJobBean, CompletableFuture<BasicMessageBean>>> job_results = msg .jobs().stream() .peek(job -> setPerJobContextParams(job, context, libs)) //(WARNING: mutates context) .map(job -> Tuples._2T(job, (CompletableFuture<BasicMessageBean>) tech_module .suspendAnalyticJob(msg.bucket(), jobs, job, context))) .collect(Collectors.toList()); //(ignore the reply apart from logging - failures will be identified by triggers) job_results.forEach(job_res -> { job_res._2().thenAccept(res -> { if (!res.success()) { _logging_service.getSystemLogger(bucket).log(Level.WARN, ErrorUtils.lazyBuildMessage(false, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Error stopping analytic job {0}:{1}: message={2}", bucket.full_name(), job_res._1().name(), res.message()), () -> Collections.emptyMap())); } else { _logging_service.getSystemLogger(bucket).log(Level.INFO, ErrorUtils.lazyBuildMessage(true, () -> DataBucketAnalyticsChangeActor.class .getSimpleName(), () -> "talkToAnalytics", () -> null, () -> ErrorUtils.get( "Stopping analytic job {0}:{1}", bucket.full_name(), job_res._1().name()), () -> Collections.emptyMap())); } }); }); // Send a status message (Which will be ignored) return CompletableFuture.completedFuture( new BucketActionReplyMessage.BucketActionNullReplyMessage()); }) .when(BucketActionMessage.BucketActionAnalyticJobMessage.class, msg -> (JobMessageType.deleting == msg.type()), msg -> { // This is different because it happens as part of a user action related to buckets, whereas stopping occurs based on trigger related actions final CompletableFuture<BasicMessageBean> top_level_result = CompletableFuture .completedFuture(ErrorUtils.buildSuccessMessage( DataBucketAnalyticsChangeActor.class.getSimpleName(), "BucketActionAnalyticJobMessage:deleting", "")); final List<CompletableFuture<BasicMessageBean>> job_results = Optionals .ofNullable(msg.jobs()).stream().map(job -> tech_module .suspendAnalyticJob(bucket, jobs, job, context)) .collect(Collectors.toList()); // Hence do return a legit reply message here return combineResults(top_level_result, job_results, source); }) .otherwise(msg -> { // return "command not recognized" error return CompletableFuture.completedFuture(new BucketActionHandlerMessage(source, SharedErrorUtils.buildErrorMessage(source, m, AnalyticsErrorUtils.MESSAGE_NOT_RECOGNIZED, bucket.full_name(), m.getClass().getSimpleName()))); }); }); } catch (Throwable e) { // (trying to use Validation to avoid this, but just in case...) return CompletableFuture.completedFuture(new BucketActionHandlerMessage(source, SharedErrorUtils.buildErrorMessage(source, m, ErrorUtils.getLongForm(SharedErrorUtils.ERROR_LOADING_CLASS, e, err_or_tech_module.success()._1().getClass())))); } finally { Thread.currentThread().setContextClassLoader(saved_current_classloader); } }