Example usage for java.util Optional filter

List of usage examples for java.util Optional filter

Introduction

In this page you can find the example usage for java.util Optional filter.

Prototype

public Optional<T> filter(Predicate<? super T> predicate) 

Source Link

Document

If a value is present, and the value matches the given predicate, returns an Optional describing the value, otherwise returns an empty Optional .

Usage

From source file:alfio.manager.CheckInManager.java

private TicketAndCheckInResult extractStatus(Optional<Event> maybeEvent, Optional<Ticket> maybeTicket,
        String ticketIdentifier, Optional<String> ticketCode) {

    if (!maybeEvent.isPresent()) {
        return new TicketAndCheckInResult(null, new DefaultCheckInResult(EVENT_NOT_FOUND, "Event not found"));
    }//from   ww w  .j a  v a  2 s  .c  o m

    if (!maybeTicket.isPresent()) {
        return new TicketAndCheckInResult(null, new DefaultCheckInResult(TICKET_NOT_FOUND,
                "Ticket with uuid " + ticketIdentifier + " not found"));
    }

    if (!ticketCode.filter(StringUtils::isNotEmpty).isPresent()) {
        return new TicketAndCheckInResult(null,
                new DefaultCheckInResult(EMPTY_TICKET_CODE, "Missing ticket code"));
    }

    Ticket ticket = maybeTicket.get();
    Event event = maybeEvent.get();
    String code = ticketCode.get();

    TicketCategory tc = ticketCategoryRepository.getById(ticket.getCategoryId());

    ZonedDateTime now = ZonedDateTime.now(event.getZoneId());
    if (!tc.hasValidCheckIn(now, event.getZoneId())) {
        DateTimeFormatter formatter = DateTimeFormatter.ofPattern("dd/MM/yyyy - hh:mm");
        String from = tc.getValidCheckInFrom() == null ? ".."
                : formatter.format(tc.getValidCheckInFrom(event.getZoneId()));
        String to = tc.getValidCheckInTo() == null ? ".."
                : formatter.format(tc.getValidCheckInTo(event.getZoneId()));
        String formattedNow = formatter.format(now);
        return new TicketAndCheckInResult(ticket,
                new DefaultCheckInResult(INVALID_TICKET_CATEGORY_CHECK_IN_DATE, String.format(
                        "Invalid check-in date: valid range for category %s is from %s to %s, current time is: %s",
                        tc.getName(), from, to, formattedNow)));
    }

    log.trace("scanned code is {}", code);
    log.trace("true code    is {}", ticket.ticketCode(event.getPrivateKey()));

    if (!code.equals(ticket.ticketCode(event.getPrivateKey()))) {
        return new TicketAndCheckInResult(null,
                new DefaultCheckInResult(INVALID_TICKET_CODE, "Ticket qr code does not match"));
    }

    final TicketStatus ticketStatus = ticket.getStatus();

    if (ticketStatus == TicketStatus.TO_BE_PAID) {
        return new TicketAndCheckInResult(ticket, new OnSitePaymentResult(MUST_PAY, "Must pay for ticket",
                MonetaryUtil.centsToUnit(ticket.getFinalPriceCts()), event.getCurrency()));
    }

    if (ticketStatus == TicketStatus.CHECKED_IN) {
        return new TicketAndCheckInResult(ticket,
                new DefaultCheckInResult(ALREADY_CHECK_IN, "Error: already checked in"));
    }

    if (ticket.getStatus() != TicketStatus.ACQUIRED) {
        return new TicketAndCheckInResult(ticket, new DefaultCheckInResult(INVALID_TICKET_STATE,
                "Invalid ticket state, expected ACQUIRED state, received " + ticket.getStatus()));
    }

    return new TicketAndCheckInResult(ticket,
            new DefaultCheckInResult(OK_READY_TO_BE_CHECKED_IN, "Ready to be checked in"));
}

From source file:alfio.controller.api.ReservationApiController.java

@RequestMapping(value = "/event/{eventName}/reservation/{reservationId}/vat-validation", method = RequestMethod.POST)
@Transactional/*from  w ww .  j  a  va  2 s .c o m*/
public ResponseEntity<VatDetail> validateEUVat(@PathVariable("eventName") String eventName,
        @PathVariable("reservationId") String reservationId, PaymentForm paymentForm, Locale locale,
        HttpServletRequest request) {

    String country = paymentForm.getVatCountryCode();
    Optional<Triple<Event, TicketReservation, VatDetail>> vatDetail = eventRepository
            .findOptionalByShortName(eventName)
            .flatMap(e -> ticketReservationRepository.findOptionalReservationById(reservationId)
                    .map(r -> Pair.of(e, r)))
            .filter(e -> EnumSet.of(INCLUDED, NOT_INCLUDED).contains(e.getKey().getVatStatus()))
            .filter(e -> vatChecker.isVatCheckingEnabledFor(e.getKey().getOrganizationId()))
            .flatMap(e -> vatChecker.checkVat(paymentForm.getVatNr(), country, e.getKey().getOrganizationId())
                    .map(vd -> Triple.of(e.getLeft(), e.getRight(), vd)));

    vatDetail.filter(t -> t.getRight().isValid()).ifPresent(t -> {
        VatDetail vd = t.getRight();
        String billingAddress = vd.getName() + "\n" + vd.getAddress();
        PriceContainer.VatStatus vatStatus = determineVatStatus(t.getLeft().getVatStatus(),
                t.getRight().isVatExempt());
        ticketReservationRepository.updateBillingData(vatStatus, vd.getVatNr(), country,
                paymentForm.isInvoiceRequested(), reservationId);
        OrderSummary orderSummary = ticketReservationManager.orderSummaryForReservationId(reservationId,
                t.getLeft(), Locale.forLanguageTag(t.getMiddle().getUserLanguage()));
        ticketReservationRepository.addReservationInvoiceOrReceiptModel(reservationId,
                Json.toJson(orderSummary));
        ticketReservationRepository.updateTicketReservation(reservationId, t.getMiddle().getStatus().name(),
                paymentForm.getEmail(), paymentForm.getFullName(), paymentForm.getFirstName(),
                paymentForm.getLastName(), locale.getLanguage(), billingAddress, null,
                Optional.ofNullable(paymentForm.getPaymentMethod()).map(PaymentProxy::name).orElse(null));
        paymentForm.getTickets().forEach((ticketId, owner) -> {
            if (isNotEmpty(owner.getEmail())
                    && ((isNotEmpty(owner.getFirstName()) && isNotEmpty(owner.getLastName()))
                            || isNotEmpty(owner.getFullName()))) {
                ticketHelper.preAssignTicket(eventName, reservationId, ticketId, owner, Optional.empty(),
                        request, (tr) -> {
                        }, Optional.empty());
            }
        });
    });

    return vatDetail.map(Triple::getRight).map(vd -> {
        if (vd.isValid()) {
            return ResponseEntity.ok(vd);
        } else {
            return new ResponseEntity<VatDetail>(HttpStatus.BAD_REQUEST);
        }
    }).orElseGet(() -> new ResponseEntity<>(HttpStatus.NOT_FOUND));
}

From source file:com.ikanow.aleph2.analytics.hadoop.assets.SampleReduceEnrichmentModule.java

@Override
public void onStageInitialize(IEnrichmentModuleContext context, DataBucketBean bucket,
        EnrichmentControlMetadataBean control, Tuple2<ProcessingStage, ProcessingStage> previous_next,
        Optional<List<String>> next_grouping_fields) {

    _context.set(context);/*from   ww w.  ja v  a 2 s . co m*/

    // Infer what the stage is from the grouping info

    // input -> ... -> chain (map) -> grouping -> chain (combine) -> grouping -> chain (reduce) -> ...
    // input -> ... -> chain (map) -> grouping -> chain (reduce) -> ...

    _stage.set(Patterns.match(previous_next).<Stage>andReturn()
            .when(t2 -> t2.equals(Tuples._2T(ProcessingStage.grouping, ProcessingStage.grouping)),
                    __ -> Stage.combine)
            .when(t2 -> ProcessingStage.grouping == t2._1(), // (grouping,*)
                    __ -> Stage.reduce)
            .when(t2 -> ProcessingStage.grouping == t2._2(), // (*.grouping)
                    __ -> Stage.map)
            .otherwiseAssert());

    _logger.info("STAGE = " + _stage + "(from = " + previous_next);

    final ConfigBean config = BeanTemplateUtils
            .from(Optional.ofNullable(control.config()).orElse(Collections.emptyMap()), ConfigBean.class).get();

    next_grouping_fields.filter(ngf -> !ngf.isEmpty()) //if empty then in auto mode
            .map(Optional::of).orElseGet(() -> Optional.ofNullable(config.key_field_override))
            .ifPresent(kf -> _key_fields.set(kf));

    _logger.info("NEXT GROUPING FIELDS = " + _key_fields + "( from = " + next_grouping_fields);

}

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.ElasticsearchIndexService.java

@SuppressWarnings({ "unchecked", "rawtypes" })
@Override/*from ww  w. j  a va2  s  . c o m*/
public <T> Optional<T> getUnderlyingPlatformDriver(final Class<T> driver_class,
        final Optional<String> driver_options) {
    if (Client.class.isAssignableFrom(driver_class)) {
        return (Optional<T>) Optional.of(_crud_factory.getClient());
    }
    if (IAnalyticsAccessContext.class.isAssignableFrom(driver_class)) {
        final String[] owner_bucket_config = driver_options.orElse("unknown:/unknown:{}").split(":", 3);

        if (InputFormat.class.isAssignableFrom(
                AnalyticsUtils.getTypeName((Class<? extends IAnalyticsAccessContext>) driver_class))) { // INPUT FORMAT
            return (Optional<T>) driver_options.filter(__ -> 3 == owner_bucket_config.length)
                    .map(__ -> BeanTemplateUtils.from(owner_bucket_config[2],
                            AnalyticThreadJobBean.AnalyticThreadJobInputBean.class))
                    .map(job_input -> ElasticsearchHadoopUtils.getInputFormat(_crud_factory.getClient(),
                            job_input.get()))
                    .map(access_context -> AnalyticsUtils.injectImplementation(
                            (Class<? extends IAnalyticsAccessContext>) driver_class, access_context));
        } else if (DataFrame.class.isAssignableFrom(
                AnalyticsUtils.getTypeName((Class<? extends IAnalyticsAccessContext>) driver_class))) { // SCHEMA RDD
            return (Optional<T>) driver_options.filter(__ -> 3 == owner_bucket_config.length)
                    .map(__ -> BeanTemplateUtils.from(owner_bucket_config[2],
                            AnalyticThreadJobBean.AnalyticThreadJobInputBean.class))
                    .map(job_input -> ElasticsearchSparkUtils.getDataFrame(_crud_factory.getClient(),
                            job_input.get()))
                    .map(access_context -> AnalyticsUtils.injectImplementation(
                            (Class<? extends IAnalyticsAccessContext>) driver_class, access_context));
        }
    }
    return Optional.empty();
}

From source file:alfio.controller.EventController.java

@RequestMapping(value = "/event/{eventName}", method = { RequestMethod.GET, RequestMethod.HEAD })
public String showEvent(@PathVariable("eventName") String eventName, Model model, HttpServletRequest request,
        Locale locale) {//from  w w  w .  jav a  2 s . co m

    return eventRepository.findOptionalByShortName(eventName)
            .filter(e -> e.getStatus() != Event.Status.DISABLED).map(event -> {
                Optional<String> maybeSpecialCode = SessionUtil.retrieveSpecialPriceCode(request);
                Optional<SpecialPrice> specialCode = maybeSpecialCode
                        .flatMap((trimmedCode) -> specialPriceRepository.getByCode(trimmedCode));

                Optional<PromoCodeDiscount> promoCodeDiscount = SessionUtil
                        .retrievePromotionCodeDiscount(request).flatMap((code) -> promoCodeRepository
                                .findPromoCodeInEventOrOrganization(event.getId(), code));

                final ZonedDateTime now = ZonedDateTime.now(event.getZoneId());
                //hide access restricted ticket categories
                List<TicketCategory> ticketCategories = ticketCategoryRepository
                        .findAllTicketCategories(event.getId());
                Map<Integer, String> categoriesDescription = ticketCategoryDescriptionRepository
                        .descriptionsByTicketCategory(ticketCategories.stream().map(TicketCategory::getId)
                                .collect(Collectors.toList()), locale.getLanguage());

                List<SaleableTicketCategory> saleableTicketCategories = ticketCategories.stream()
                        .filter((c) -> !c.isAccessRestricted() || (specialCode
                                .filter(sc -> sc.getTicketCategoryId() == c.getId()).isPresent()))
                        .map((m) -> new SaleableTicketCategory(m,
                                categoriesDescription.getOrDefault(m.getId(), ""), now, event,
                                ticketReservationManager.countAvailableTickets(event, m),
                                configurationManager.getIntConfigValue(
                                        Configuration.from(event.getOrganizationId(), event.getId(), m.getId(),
                                                ConfigurationKeys.MAX_AMOUNT_OF_TICKETS_BY_RESERVATION),
                                        5),
                                promoCodeDiscount.filter(promoCode -> shouldApplyDiscount(promoCode, m))
                                        .orElse(null)))
                        .collect(Collectors.toList());
                //

                final int orgId = event.getOrganizationId();
                final int eventId = event.getId();
                Map<ConfigurationKeys, Optional<String>> geoInfoConfiguration = configurationManager
                        .getStringConfigValueFrom(
                                Configuration.from(orgId, eventId, ConfigurationKeys.MAPS_PROVIDER),
                                Configuration.from(orgId, eventId, ConfigurationKeys.MAPS_CLIENT_API_KEY),
                                Configuration.from(orgId, eventId, ConfigurationKeys.MAPS_HERE_APP_ID),
                                Configuration.from(orgId, eventId, ConfigurationKeys.MAPS_HERE_APP_CODE));

                LocationDescriptor ld = LocationDescriptor.fromGeoData(event.getLatLong(),
                        TimeZone.getTimeZone(event.getTimeZone()), geoInfoConfiguration);

                final boolean hasAccessPromotions = ticketCategoryRepository
                        .countAccessRestrictedRepositoryByEventId(event.getId()) > 0
                        || promoCodeRepository.countByEventAndOrganizationId(event.getId(),
                                event.getOrganizationId()) > 0;

                String eventDescription = eventDescriptionRepository
                        .findDescriptionByEventIdTypeAndLocale(event.getId(),
                                EventDescription.EventDescriptionType.DESCRIPTION, locale.getLanguage())
                        .orElse("");

                final EventDescriptor eventDescriptor = new EventDescriptor(event, eventDescription);
                List<SaleableTicketCategory> expiredCategories = saleableTicketCategories.stream()
                        .filter(SaleableTicketCategory::getExpired).collect(Collectors.toList());
                List<SaleableTicketCategory> validCategories = saleableTicketCategories.stream()
                        .filter(tc -> !tc.getExpired()).collect(Collectors.toList());
                List<SaleableAdditionalService> additionalServices = additionalServiceRepository
                        .loadAllForEvent(event.getId()).stream().map((as) -> getSaleableAdditionalService(event,
                                locale, as, promoCodeDiscount.orElse(null)))
                        .collect(Collectors.toList());
                Predicate<SaleableTicketCategory> waitingQueueTargetCategory = tc -> !tc.getExpired()
                        && !tc.isBounded();
                boolean validPaymentConfigured = isEventHasValidPaymentConfigurations(event,
                        configurationManager);

                List<SaleableAdditionalService> notExpiredServices = additionalServices.stream()
                        .filter(SaleableAdditionalService::isNotExpired).collect(Collectors.toList());

                List<SaleableAdditionalService> supplements = adjustIndex(0,
                        notExpiredServices.stream()
                                .filter(a -> a.getType() == AdditionalService.AdditionalServiceType.SUPPLEMENT)
                                .collect(Collectors.toList()));
                List<SaleableAdditionalService> donations = adjustIndex(supplements.size(),
                        notExpiredServices.stream()
                                .filter(a -> a.getType() == AdditionalService.AdditionalServiceType.DONATION)
                                .collect(Collectors.toList()));

                model.addAttribute("event", eventDescriptor)//
                        .addAttribute("organization", organizationRepository.getById(event.getOrganizationId()))
                        .addAttribute("ticketCategories", validCategories)//
                        .addAttribute("expiredCategories", expiredCategories)//
                        .addAttribute("containsExpiredCategories", !expiredCategories.isEmpty())//
                        .addAttribute("showNoCategoriesWarning", validCategories.isEmpty())
                        .addAttribute("hasAccessPromotions", hasAccessPromotions)
                        .addAttribute("promoCode", specialCode.map(SpecialPrice::getCode).orElse(null))
                        .addAttribute("locationDescriptor", ld)
                        .addAttribute("pageTitle", "show-event.header.title")
                        .addAttribute("hasPromoCodeDiscount", promoCodeDiscount.isPresent())
                        .addAttribute("promoCodeDiscount", promoCodeDiscount.orElse(null))
                        .addAttribute("displayWaitingQueueForm",
                                EventUtil.displayWaitingQueueForm(event, saleableTicketCategories,
                                        configurationManager, eventStatisticsManager.noSeatsAvailable()))
                        .addAttribute("displayCategorySelectionForWaitingQueue",
                                saleableTicketCategories.stream().filter(waitingQueueTargetCategory)
                                        .count() > 1)
                        .addAttribute("unboundedCategories",
                                saleableTicketCategories.stream().filter(waitingQueueTargetCategory)
                                        .collect(Collectors.toList()))
                        .addAttribute("preSales", EventUtil.isPreSales(event, saleableTicketCategories))
                        .addAttribute("userLanguage", locale.getLanguage())
                        .addAttribute("showAdditionalServices", !notExpiredServices.isEmpty())
                        .addAttribute("showAdditionalServicesDonations", !donations.isEmpty())
                        .addAttribute("showAdditionalServicesSupplements", !supplements.isEmpty())
                        .addAttribute("enabledAdditionalServicesDonations", donations)
                        .addAttribute("enabledAdditionalServicesSupplements", supplements)
                        .addAttribute("forwardButtonDisabled",
                                (saleableTicketCategories.stream()
                                        .noneMatch(SaleableTicketCategory::getSaleable))
                                        || !validPaymentConfigured)
                        .addAttribute("useFirstAndLastName", event.mustUseFirstAndLastName())
                        .addAttribute("validPaymentMethodAvailable", validPaymentConfigured)
                        .addAttribute("validityStart", event.getBegin())
                        .addAttribute("validityEnd", event.getEnd());

                model.asMap().putIfAbsent("hasErrors", false);//
                return "/event/show-event";
            }).orElse(REDIRECT + "/");
}

From source file:com.ikanow.aleph2.analytics.services.AnalyticsContext.java

/** Gets the secondary buffer (deletes any existing data, and switches to "ping" on an uninitialized index)
 *  NOTE: CAN HAVE SIDE EFFECTS IF UNINITIALIZED
 * @param bucket/* w w  w. j  a  va  2  s  .  co m*/
 * @param job - if present _and_ points to transient output, then returns the buffers for that transient output, else for the entire bucket
 * @param need_ping_pong_buffer - based on the job.output
 * @param data_service
 * @return
 */
protected Optional<String> getSecondaryBuffer(final DataBucketBean bucket,
        final Optional<AnalyticThreadJobBean> job, final boolean need_ping_pong_buffer,
        final IGenericDataService data_service) {
    if (need_ping_pong_buffer) {
        final Optional<String> job_name = job
                .filter(j -> Optionals.of(() -> j.output().is_transient()).orElse(false)).map(j -> j.name());
        final Optional<String> write_buffer = data_service.getPrimaryBufferName(bucket, job_name)
                .map(Optional::of).orElseGet(() -> { // Two cases:

                    final Set<String> secondaries = data_service.getSecondaryBuffers(bucket, job_name);
                    final int ping_pong_count = (secondaries.contains(IGenericDataService.SECONDARY_PING) ? 1
                            : 0) + (secondaries.contains(IGenericDataService.SECONDARY_PONG) ? 1 : 0);

                    if (1 == ping_pong_count) { // 1) one of ping/pong exists but not the other ... this is the file case where we can't tell what the primary actually is
                        if (secondaries.contains(IGenericDataService.SECONDARY_PONG)) { //(eg pong is secondary so ping must be primary)
                            return Optional.of(IGenericDataService.SECONDARY_PING);
                        } else
                            return Optional.of(IGenericDataService.SECONDARY_PONG);
                    } else { // 2) all other cases: this is the ES case, where we just use an alias to switch ..
                        // So here there are side effects
                        if (_state_name == State.IN_MODULE) { // this should not happen (unless the data service doesn't support secondary buffers)
                            _logger.warn(ErrorUtils.get(
                                    "Startup case: no primary buffer for bucket:job {0}:{1} service {2}, number of secondary buffers = {3} (ping/pong={4}, secondaries={5})",
                                    bucket.full_name(), job_name.orElse("(none)"),
                                    data_service.getClass().getSimpleName(), ping_pong_count,
                                    need_ping_pong_buffer,
                                    secondaries.stream().collect(Collectors.joining(";"))));
                        } else {
                            _logger.info(ErrorUtils.get(
                                    "Startup case: no primary buffer for bucket:job {0}:{1} service {2}, number of secondary buffers = {3} (ping/pong={4})",
                                    bucket.full_name(), job_name.orElse("(none)"),
                                    data_service.getClass().getSimpleName(), ping_pong_count,
                                    need_ping_pong_buffer));
                        }

                        // ... but we don't currently have a primary so need to build that
                        if (0 == ping_pong_count) { // first time through, create the buffers:
                            data_service.getWritableDataService(JsonNode.class, bucket, Optional.empty(),
                                    Optional.of(IGenericDataService.SECONDARY_PONG));
                            data_service.getWritableDataService(JsonNode.class, bucket, Optional.empty(),
                                    Optional.of(IGenericDataService.SECONDARY_PING));
                        }
                        final Optional<String> curr_primary = Optional.of(IGenericDataService.SECONDARY_PING);
                        final CompletableFuture<BasicMessageBean> future_res = data_service
                                .switchCrudServiceToPrimaryBuffer(bucket, curr_primary, Optional.empty(),
                                        job_name);
                        future_res.thenAccept(res -> {
                            if (!res.success()) {
                                _logger.warn(ErrorUtils.get(
                                        "Error switching between ping/pong buffers (service {0}: ",
                                        data_service.getClass().getSimpleName()) + res.message());
                            }
                        });
                        return curr_primary;
                    }
                }).map(curr_pri -> { // then just pick the buffer that isn't the primary
                    if (IGenericDataService.SECONDARY_PING.equals(curr_pri)) {
                        return IGenericDataService.SECONDARY_PONG;
                    } else
                        return IGenericDataService.SECONDARY_PING;
                });

        return write_buffer;
    } else
        return Optional.empty();
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.ElasticsearchIndexService.java

@Override
public CompletableFuture<Collection<BasicMessageBean>> onPublishOrUpdate(DataBucketBean bucket,
        Optional<DataBucketBean> old_bucket, boolean suspended, Set<String> data_services,
        Set<String> previous_data_services) {
    try {//  w w w .j av  a  2s. co m

        final LinkedList<BasicMessageBean> mutable_errors = new LinkedList<>();

        // If search_index_service or document_service is enabled then update mapping

        if ((data_services.contains(DataSchemaBean.SearchIndexSchemaBean.name))
                || data_services.contains(DataSchemaBean.DocumentSchemaBean.name)) {

            final Tuple3<ElasticsearchIndexServiceConfigBean, String, Optional<String>> schema_index_type = getSchemaConfigAndIndexAndType(
                    bucket, _config);

            handlePotentiallyNewIndex(bucket, Optional.empty(), true, schema_index_type._1(),
                    schema_index_type._2());
        }

        // If data_warehouse_service is enabled then update Hive table (remove and reinsert super quick)
        // If data_warehouse_service _was_ enabled then remove Hive table

        final boolean old_data_service_matches_dw = previous_data_services
                .contains(DataSchemaBean.DataWarehouseSchemaBean.name);
        if ((data_services.contains(DataSchemaBean.DataWarehouseSchemaBean.name))
                || old_data_service_matches_dw) {
            final Configuration hive_config = ElasticsearchHiveUtils
                    .getHiveConfiguration(_service_context.getGlobalProperties());

            final DataBucketBean delete_bucket = old_bucket.filter(__ -> old_data_service_matches_dw)
                    .orElse(bucket);
            final String delete_string = ElasticsearchHiveUtils.deleteHiveSchema(delete_bucket,
                    delete_bucket.data_schema().data_warehouse_schema());

            final Validation<String, String> maybe_recreate_string = data_services
                    .contains(DataSchemaBean.DataWarehouseSchemaBean.name)
                            ? ElasticsearchHiveUtils.generateFullHiveSchema(Optional.empty(), bucket,
                                    bucket.data_schema().data_warehouse_schema(),
                                    Optional.of(_crud_factory.getClient()), _config)
                            : Validation.success(null);

            final Validation<String, Boolean> ret_val = maybe_recreate_string
                    .bind(recreate_string -> ElasticsearchHiveUtils.registerHiveTable(Optional.empty(),
                            hive_config, Optional.of(delete_string), Optional.ofNullable(recreate_string)));

            if (ret_val.isFail()) {
                mutable_errors.add(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                        "onPublishOrUpdate", ret_val.fail()));
            } else {
                _logger.info(ErrorUtils.get("Register/update/delete hive ({2}) table for bucket {0}: {1}",
                        bucket.full_name(), delete_string + "/" + maybe_recreate_string.success(),
                        ElasticsearchHiveUtils.getParamsFromHiveConfig(hive_config)));
            }
        }
        return CompletableFuture.completedFuture(mutable_errors);
    } catch (Throwable t) {
        return CompletableFuture
                .completedFuture(Arrays.asList(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                        "onPublishOrUpdate", ErrorUtils.getLongForm("{0}", t))));
    }
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java

/** Creates a list of JsonNodes containing the mapping for fields that will _enable_ or _disable_ field data depending on fielddata_info is present 
 *  (note this can convert a property to a dynamic template, but never the other way round)
 * @param instream//from  ww  w . j  a  v  a  2  s.  co m
 * @param f
 * @param field_lookups
 * @param fielddata_info 3tuple containing not_analyzed, analyzed, and override
 * @param mapper
 * @return
 */
protected static Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> createFieldLookups(
        final Stream<String> instream, final Function<String, Either<String, Tuple2<String, String>>> f,
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups,
        final Optional<Tuple3<JsonNode, JsonNode, Boolean>> fielddata_info,
        final SearchIndexSchemaDefaultBean search_index_schema_override,
        final Map<Either<String, Tuple2<String, String>>, String> type_override, final ObjectMapper mapper,
        final String index_type) {
    return instream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>map(Lambdas.wrap_u(fn -> {
        final Either<String, Tuple2<String, String>> either_tmp = f.apply(fn);
        final Optional<String> maybe_type = Optional.ofNullable(type_override.get(either_tmp));

        // add type if present
        final Either<String, Tuple2<String, String>> either = maybe_type
                .<Either<String, Tuple2<String, String>>>map(type -> {
                    return either_tmp.<Either<String, Tuple2<String, String>>>either(s -> Either.left(s),
                            t2 -> Either.right(Tuples._2T(t2._1(), type)));
                }).orElse(either_tmp);

        final ObjectNode mutable_field_metadata = (ObjectNode) Optional.ofNullable(field_lookups.get(either))
                .map(j -> j.deepCopy())
                .orElse(either.either(Lambdas.wrap_fj_u(__ -> mapper.readTree(BACKUP_FIELD_MAPPING_PROPERTIES)),
                        Lambdas.wrap_fj_u(__ -> mapper.readTree(BACKUP_FIELD_MAPPING_TEMPLATES))));
        //(note that these 2 mappings don't have "type"s - therefore they will result in default_templates not properties - you need the type to generate a property)

        final ObjectNode mutable_field_mapping_tmp = either.isLeft() ? mutable_field_metadata
                : (ObjectNode) mutable_field_metadata.get("mapping");

        //(override with type if set)
        maybe_type.ifPresent(type -> mutable_field_mapping_tmp.put("type", type));

        final boolean has_type = mutable_field_mapping_tmp.has("type");

        final Tuple2<ObjectNode, Either<String, Tuple2<String, String>>> toplevel_eithermod = Lambdas
                .get(() -> {
                    if (either.isLeft() && !has_type) {
                        final ObjectNode top_level = (ObjectNode) mapper.createObjectNode().set("mapping",
                                mutable_field_metadata);
                        return Tuples._2T(top_level,
                                Either.<String, Tuple2<String, String>>right(Tuples._2T(fn, "*")));
                    } else { // right[dynamic] *OR* (left[properties] and has-type)
                        return Tuples._2T(mutable_field_metadata, either);
                    }
                });

        final ObjectNode mutable_field_mapping = toplevel_eithermod._2().isLeft() ? toplevel_eithermod._1()
                : (ObjectNode) toplevel_eithermod._1().get("mapping");

        // Special case ... if we're columnar and we're merging with tokenized and non-dual then convert to untokenized instead 
        if (fielddata_info.filter(t3 -> t3._3()).isPresent() && mutable_field_mapping.equals(
                mapper.convertValue(search_index_schema_override.tokenized_string_field(), JsonNode.class))) {
            mutable_field_mapping.removeAll();
            mutable_field_mapping.setAll((ObjectNode) mapper
                    .convertValue(search_index_schema_override.untokenized_string_field(), ObjectNode.class));
        }

        if (toplevel_eithermod._2().isRight()) {
            if (!toplevel_eithermod._1().has(PATH_MATCH_NAME) && !toplevel_eithermod._1().has(RAW_MATCH_NAME)) {
                toplevel_eithermod._1().put(PATH_MATCH_NAME, toplevel_eithermod._2().right().value()._1());

                if (!toplevel_eithermod._1().has(TYPE_MATCH_NAME))
                    toplevel_eithermod._1().put(TYPE_MATCH_NAME, toplevel_eithermod._2().right().value()._2());
            }
            if (!has_type) {
                if (toplevel_eithermod._2().right().value()._2().equals("*")) { // type is mandatory
                    mutable_field_mapping.put("type", "{dynamic_type}");
                } else {
                    mutable_field_mapping.put("type", toplevel_eithermod._2().right().value()._2());
                }
            }
        }
        handleMappingFields(mutable_field_mapping, fielddata_info, mapper, index_type);
        setMapping(mutable_field_mapping, fielddata_info, mapper, index_type);
        return Tuples._2T(toplevel_eithermod._2(), toplevel_eithermod._1());
    }));

}

From source file:org.apache.james.jmap.model.MessagePreviewGenerator.java

public String forHTMLBody(Optional<String> body) {
    return body.filter(text -> !text.isEmpty()).map(this::asText).map(this::abbreviate).orElse(NO_BODY);
}

From source file:org.apache.james.jmap.model.MessagePreviewGenerator.java

public String forTextBody(Optional<String> body) {
    return body.filter(text -> !text.isEmpty()).map(this::abbreviate).orElse(NO_BODY);
}