Example usage for java.util Optional map

List of usage examples for java.util Optional map

Introduction

In this page you can find the example usage for java.util Optional map.

Prototype

public <U> Optional<U> map(Function<? super T, ? extends U> mapper) 

Source Link

Document

If a value is present, returns an Optional describing (as if by #ofNullable ) the result of applying the given mapping function to the value, otherwise returns an empty Optional .

Usage

From source file:alfio.manager.TicketReservationManager.java

/**
 * Create a ticket reservation. It will create a reservation _only_ if it can find enough tickets. Note that it will not do date/validity validation. This must be ensured by the
 * caller./*from ww w.j  av a2  s  .co m*/
 *
 * @param event
 * @param list
 * @param reservationExpiration
 * @param forWaitingQueue
 * @return
 */
public String createTicketReservation(Event event, List<TicketReservationWithOptionalCodeModification> list,
        List<ASReservationWithOptionalCodeModification> additionalServices, Date reservationExpiration,
        Optional<String> specialPriceSessionId, Optional<String> promotionCodeDiscount, Locale locale,
        boolean forWaitingQueue)
        throws NotEnoughTicketsException, MissingSpecialPriceTokenException, InvalidSpecialPriceTokenException {
    String reservationId = UUID.randomUUID().toString();

    Optional<PromoCodeDiscount> discount = promotionCodeDiscount
            .flatMap((promoCodeDiscount) -> promoCodeDiscountRepository
                    .findPromoCodeInEventOrOrganization(event.getId(), promoCodeDiscount));

    ticketReservationRepository.createNewReservation(reservationId, reservationExpiration,
            discount.map(PromoCodeDiscount::getId).orElse(null), locale.getLanguage(), event.getId(),
            event.getVat(), event.isVatIncluded());
    list.forEach(t -> reserveTicketsForCategory(event, specialPriceSessionId, reservationId, t, locale,
            forWaitingQueue, discount.orElse(null)));

    int ticketCount = list.stream().map(TicketReservationWithOptionalCodeModification::getAmount)
            .mapToInt(Integer::intValue).sum();

    // apply valid additional service with supplement policy mandatory one for ticket
    additionalServiceRepository
            .findAllInEventWithPolicy(event.getId(),
                    AdditionalService.SupplementPolicy.MANDATORY_ONE_FOR_TICKET)
            .stream().filter(AdditionalService::getSaleable).forEach(as -> {
                AdditionalServiceReservationModification asrm = new AdditionalServiceReservationModification();
                asrm.setAdditionalServiceId(as.getId());
                asrm.setQuantity(ticketCount);
                reserveAdditionalServicesForReservation(event.getId(), reservationId,
                        new ASReservationWithOptionalCodeModification(asrm, Optional.empty()),
                        discount.orElse(null));
            });

    additionalServices.forEach(as -> reserveAdditionalServicesForReservation(event.getId(), reservationId, as,
            discount.orElse(null)));

    TicketReservation reservation = ticketReservationRepository.findReservationById(reservationId);

    OrderSummary orderSummary = orderSummaryForReservationId(reservation.getId(), event,
            Locale.forLanguageTag(reservation.getUserLanguage()));
    ticketReservationRepository.addReservationInvoiceOrReceiptModel(reservationId, Json.toJson(orderSummary));

    auditingRepository.insert(reservationId, null, event.getId(), Audit.EventType.RESERVATION_CREATE,
            new Date(), Audit.EntityType.RESERVATION, reservationId);

    return reservationId;
}

From source file:com.rcn.controller.ResourceController.java

@RequestMapping(value = "/create-certificate", method = RequestMethod.POST)
public String createCertPost(@RequestParam("resourceType") String resourceType,
        @RequestParam("certName") String certName, @RequestParam("validDays") int validDays,
        @RequestParam("certDesc") String certDesc, @RequestParam("certType") String certType,
        @RequestParam("taPemCert") String taPemCert, @RequestParam("taPkcs10") String taPkcs10,
        @RequestParam("ca") String ca, @RequestParam("caPassword") String caPassword,
        @RequestParam("password1") String password1, @RequestParam("password2") String password2,
        Authentication principal, Model model) {

    Optional<String> optError = !password1.equals(password2) ? Optional.of(l("password.does.not.match"))
            : Optional.empty();/*from  w  w  w. j a  va 2s  .c  om*/

    if (!optError.isPresent()) {
        try {
            String type = ResoureMapping.computeIfAbsent(resourceType, a -> {
                throw new IllegalArgumentException("could not map a resource key:" + a);
            });

            RcnUserDetail user = (RcnUserDetail) principal.getPrincipal();
            Long targetUserId = user.getTargetUser().getId();

            Optional<String> caCert = ca.trim().length() > 0
                    ? Optional.ofNullable(
                            resourceRepository.certById(targetUserId, user.getId(), Long.valueOf(ca)))
                    : Optional.empty();

            Optional<String> clientCert = Optional.ofNullable("certImport".equals(certType) ? taPemCert : null);
            Optional<String> pkcs10Req = Optional
                    .ofNullable("certGeneratePkcs10".equals(certType) ? taPkcs10 : null);
            String cnName = certName.startsWith("cn=") ? certName : "cn=" + certName;
            String certPem = clientCert.orElseGet(() -> certificateService.generateCert(cnName, password1,
                    validDays, caCert, caPassword, TYPE_CA.equals(type), pkcs10Req));

            Long resourceId = resourceRepository.createResource(targetUserId, type, certName, certDesc);
            certificateService.storeCert(resourceId, certPem, password1);
        } catch (Exception e) {
            log.error("createCertPost", e);
            optError = Optional.of(e.getMessage());
        }
    }

    optError.ifPresent(e -> model.addAttribute("error", e));
    return optError.map(a -> "create-certificate").orElse("redirect:/resources");
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.TestElasticsearchIndexService.java

public void test_endToEnd_autoTime(boolean test_not_create_mode, Optional<String> primary_name)
        throws IOException, InterruptedException, ExecutionException {
    final Calendar time_setter = GregorianCalendar.getInstance();
    time_setter.set(2015, 1, 1, 13, 0, 0);
    final String bucket_str = Resources.toString(
            Resources.getResource(
                    "com/ikanow/aleph2/search_service/elasticsearch/services/test_end_2_end_bucket.json"),
            Charsets.UTF_8);//from ww  w .j a v a2s .c  o  m
    final DataBucketBean bucket = BeanTemplateUtils.build(bucket_str, DataBucketBean.class)
            .with("_id", "test_end_2_end")
            .with("full_name", "/test/end-end/auto-time" + primary_name.map(s -> "/" + s).orElse(""))
            .with("modified", time_setter.getTime()).done().get();

    final String template_name = ElasticsearchIndexUtils.getBaseIndexName(bucket, primary_name);

    // Check starting from clean

    {
        try {
            _crud_factory.getClient().admin().indices().prepareDeleteTemplate(template_name).execute()
                    .actionGet();
        } catch (Exception e) {
        } // (This is fine, just means it doesn't exist)      
        try {
            _crud_factory.getClient().admin().indices().prepareDelete(template_name + "*").execute()
                    .actionGet();
        } catch (Exception e) {
        } // (This is fine, just means it doesn't exist)      

        final GetIndexTemplatesRequest gt = new GetIndexTemplatesRequest().names(template_name);
        final GetIndexTemplatesResponse gtr = _crud_factory.getClient().admin().indices().getTemplates(gt)
                .actionGet();
        assertTrue("No templates to start with", gtr.getIndexTemplates().isEmpty());
    }

    // If the primary buffer is specified then create it and switch to it
    primary_name.ifPresent(primary -> {
        _index_service.getDataService()
                .flatMap(s -> s.getWritableDataService(JsonNode.class, bucket, Optional.empty(), primary_name))
                .flatMap(IDataWriteService::getCrudService).get();

        _index_service.getDataService().get().switchCrudServiceToPrimaryBuffer(bucket, primary_name,
                Optional.empty(), Optional.empty());
    });

    // (note pass Optional.empty() in regardless of primary, since it should return the non-default primary regardless)
    final ICrudService<JsonNode> index_service_crud = _index_service.getDataService()
            .flatMap(s -> s.getWritableDataService(JsonNode.class, bucket, Optional.empty(), Optional.empty()))
            .flatMap(IDataWriteService::getCrudService).get();

    // Check template added:

    {
        final GetIndexTemplatesRequest gt2 = new GetIndexTemplatesRequest().names(template_name);
        final GetIndexTemplatesResponse gtr2 = _crud_factory.getClient().admin().indices().getTemplates(gt2)
                .actionGet();
        assertTrue(
                "Cache should contain the template: " + _index_service._bucket_template_cache.asMap().keySet(),
                _index_service._bucket_template_cache.asMap()
                        .containsKey(bucket._id() + primary_name.map(s -> ":" + s).orElse("") + ":true"));
        assertEquals(1, gtr2.getIndexTemplates().size());
    }

    // Get batch sub-service

    @SuppressWarnings("unchecked")
    final Optional<ICrudService.IBatchSubservice<JsonNode>> batch_service = index_service_crud
            .getUnderlyingPlatformDriver(ICrudService.IBatchSubservice.class, Optional.empty())
            .map(t -> (IBatchSubservice<JsonNode>) t);

    {
        assertTrue("Batch service must exist", batch_service.isPresent());
    }

    // Get information about the crud service

    final ElasticsearchContext es_context = (ElasticsearchContext) index_service_crud
            .getUnderlyingPlatformDriver(ElasticsearchContext.class, Optional.empty()).get();

    {
        assertTrue("Read write index", es_context instanceof ElasticsearchContext.ReadWriteContext);
        assertTrue("Temporal index", es_context
                .indexContext() instanceof ElasticsearchContext.IndexContext.ReadWriteIndexContext.TimedRwIndexContext);
        assertTrue("Auto type", es_context
                .typeContext() instanceof ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext);

        // Check the the context contains the invalid 

        final ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext context = (ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext) es_context
                .typeContext();

        assertEquals(Arrays.asList("@timestamp"),
                context.fixed_type_fields().stream().collect(Collectors.toList()));
    }

    // Write some docs out

    Arrays.asList(1, 2, 3, 4, 5).stream().map(i -> {
        time_setter.set(2015, i, 1, 13, 0, 0);
        return time_setter.getTime();
    }).map(d -> (ObjectNode) _mapper.createObjectNode().put("@timestamp", d.getTime())).forEach(o -> {
        ObjectNode o1 = o.deepCopy();
        o1.set("val1", _mapper.createObjectNode().put("val2", "test"));
        ObjectNode o2 = o.deepCopy();
        o2.put("val1", "test");
        batch_service.get().storeObject(o1, false);
        batch_service.get().storeObject(o2, false);
    });

    for (int i = 0; i < 30; ++i) {
        Thread.sleep(1000L);
        if (index_service_crud.countObjects().get() >= 10) {
            System.out.println("Test end 2 end: (Got all the records)");
            break;
        }
    }
    Thread.sleep(2100L); // sleep another 2s+e for the aliases)

    // Check an alias per time slice gets created also
    Arrays.asList("_2015.02.01", "_2015.03.01", "_2015.04.01", "_2015.05.01", "_2015.06.01").stream()
            .forEach(time_suffix -> {
                final List<String> aliases = getAliasedBuffers(bucket, Optional.of(time_suffix));
                assertEquals(Arrays.asList(template_name + time_suffix), aliases);
            });

    // Check the top level alias is created
    final List<String> aliases = this.getMainAliasedBuffers(bucket);
    assertEquals(Arrays.asList("_2015.02.01", "_2015.03.01", "_2015.04.01", "_2015.05.01", "_2015.06.01")
            .stream().map(x -> template_name + x).collect(Collectors.toList()), aliases);

    final GetMappingsResponse gmr = es_context.client().admin().indices()
            .prepareGetMappings(template_name + "*").execute().actionGet();

    // Should have 5 different indexes, each with 2 types + _default_

    assertEquals(5, gmr.getMappings().keys().size());
    final Set<String> expected_keys = Arrays.asList(1, 2, 3, 4, 5).stream()
            .map(i -> template_name + "_2015.0" + (i + 1) + ".01").collect(Collectors.toSet());
    final Set<String> expected_types = Arrays.asList("_default_", "type_1", "type_2").stream()
            .collect(Collectors.toSet());

    if (test_not_create_mode)
        StreamSupport.stream(gmr.getMappings().spliterator(), false).forEach(x -> {
            assertTrue(
                    "Is one of the expected keys: " + x.key + " vs  "
                            + expected_keys.stream().collect(Collectors.joining(":")),
                    expected_keys.contains(x.key));
            //DEBUG
            //System.out.println(" ? " + x.key);
            StreamSupport.stream(x.value.spliterator(), false).forEach(Lambdas.wrap_consumer_u(y -> {
                //DEBUG
                //System.out.println("?? " + y.key + " --- " + y.value.sourceAsMap().toString());
                // Size 3: _default_, type1 and type2
                assertTrue("Is expected type: " + y.key, expected_types.contains(y.key));
            }));
            // Size 3: _default_, type_1, type_2 
            assertEquals("Should have 3 indexes: " + x.value.toString(), 3, x.value.size());
        });

    //TEST DELETION:
    if (test_not_create_mode)
        test_handleDeleteOrPurge(bucket, primary_name, true);
}

From source file:com.ikanow.aleph2.analytics.services.AnalyticsContext.java

@Override
public List<String> getInputPaths(final Optional<DataBucketBean> bucket, final AnalyticThreadJobBean job,
        final AnalyticThreadJobInputBean job_input) {

    final DataBucketBean my_bucket = bucket.orElseGet(() -> _mutable_state.bucket.get());

    final AuthorizationBean auth_bean = new AuthorizationBean(my_bucket.owner_id());
    final ICrudService<DataBucketBean> secured_bucket_crud = _core_management_db.readOnlyVersion()
            .getDataBucketStore().secured(_service_context, auth_bean);

    return Optional.of(job_input).filter(i -> null != i.data_service())
            .filter(i -> "batch".equalsIgnoreCase(i.data_service())
                    || DataSchemaBean.StorageSchemaBean.name.equalsIgnoreCase(i.data_service()))
            .map(Lambdas.wrap_u(i -> {
                if ("batch".equalsIgnoreCase(i.data_service())) {
                    final String[] bucket_subchannel = Lambdas.<String, String[]>wrap_u(s -> {

                        // 1) If the resource starts with "/" then must point to an intermediate batch result of an external bucket
                        // 2) If the resource is a pointer then

                        if (s.startsWith("/")) { //1.*
                            if (s.endsWith(":")) {
                                return new String[] { s.substring(0, s.length() - 1), "" }; // (1.2a)
                            } else {
                                final String[] b_sc = s.split(":");
                                if (1 == b_sc.length) {
                                    return new String[] { my_bucket.full_name(), "" };
                                } else {
                                    return b_sc; //(1.1)
                                }/*w  ww. j  a v a  2  s  . c om*/
                            }
                        } else { //2.*
                            return new String[] { my_bucket.full_name(), s };
                        }
                    }).apply(Optional.ofNullable(i.resource_name_or_id()).orElse(""));

                    final Optional<DataBucketBean> bucket_to_check = Lambdas.get(Lambdas.wrap_u(() -> {
                        if (bucket_subchannel[0] == my_bucket.full_name()) {
                            return Optional.of(my_bucket);
                        } else {
                            return secured_bucket_crud.getObjectBySpec(CrudUtils.allOf(DataBucketBean.class)
                                    .when(DataBucketBean::full_name, bucket_subchannel[0])).get();
                        }
                    }));
                    return Lambdas.get(() -> {
                        if (!bucket_subchannel[0].equals(my_bucket.full_name())
                                || !bucket_subchannel[1].isEmpty()) {
                            bucket_to_check.map(input_bucket -> input_bucket.analytic_thread())
                                    .flatMap(
                                            a_thread -> Optional.ofNullable(a_thread.jobs()))
                                    .flatMap(jobs -> jobs.stream()
                                            .filter(j -> bucket_subchannel[1].equals(j.name()))
                                            .filter(j -> _batch_types
                                                    .contains(Optionals.of(() -> j.output().transient_type())
                                                            .orElse(MasterEnrichmentType.none)))
                                            .filter(j -> Optionals.of(() -> j.output().is_transient())
                                                    .orElse(false))
                                            .findFirst())
                                    .orElseThrow(() -> new RuntimeException(ErrorUtils.get(
                                            ErrorUtils.INPUT_PATH_NOT_A_TRANSIENT_BATCH, my_bucket.full_name(),
                                            job.name(), bucket_subchannel[0], bucket_subchannel[1])));

                            return Arrays.asList(_storage_service.getBucketRootPath() + bucket_subchannel[0]
                                    + IStorageService.TRANSIENT_DATA_SUFFIX_SECONDARY + bucket_subchannel[1]
                                    + IStorageService.PRIMARY_BUFFER_SUFFIX + "**/*");
                        } else { // This is my input directory
                            return Arrays.asList(_storage_service.getBucketRootPath() + my_bucket.full_name()
                                    + IStorageService.TO_IMPORT_DATA_SUFFIX + "*");
                        }
                    });
                } else { // storage service ... 3 options :raw, :json, :processed (defaults to :processed)
                    if (Optional.of(true).equals(
                            Optional.ofNullable(i.config()).map(cfg -> cfg.high_granularity_filter()))) {
                        throw new RuntimeException(ErrorUtils.get(
                                ErrorUtils.HIGH_GRANULARITY_FILTER_NOT_SUPPORTED, my_bucket.full_name(),
                                job.name(), Optional.ofNullable(i.name()).orElse("(no name)")));
                    }

                    final String bucket_name = i.resource_name_or_id().split(":")[0];

                    // Check we have authentication for this bucket:

                    final boolean found_bucket = secured_bucket_crud
                            .getObjectBySpec(
                                    CrudUtils.allOf(DataBucketBean.class).when(DataBucketBean::full_name,
                                            bucket_name),
                                    Collections.emptyList(), // (don't want any part of the bucket, just whether it exists or not)
                                    true)
                            .get().isPresent();

                    if (!found_bucket) {
                        throw new RuntimeException(
                                ErrorUtils.get(ErrorUtils.BUCKET_NOT_FOUND_OR_NOT_READABLE, bucket_name));
                    }
                    final String sub_service = Patterns.match(i.resource_name_or_id()).<String>andReturn()
                            .when(s -> s.endsWith(":raw"), __ -> "raw/current/") // (input paths are always from primary)
                            .when(s -> s.endsWith(":json"), __ -> "json/current/")
                            .otherwise(__ -> "processed/current/");

                    final String base_path = _storage_service.getBucketRootPath() + bucket_name
                            + IStorageService.STORED_DATA_SUFFIX + sub_service;
                    return Optional.ofNullable(i.config())
                            .filter(cfg -> (null != cfg.time_min()) || (null != cfg.time_max())).map(cfg -> {
                                try {
                                    final FileContext fc = _storage_service
                                            .getUnderlyingPlatformDriver(FileContext.class, Optional.empty())
                                            .get();

                                    //DEBUG
                                    //_logger.warn("Found1: " + Arrays.stream(fc.util().listStatus(new Path(base_path))).map(f -> f.getPath().toString()).collect(Collectors.joining(";")));                                                            
                                    //_logger.warn("Found2: " + TimeSliceDirUtils.annotateTimedDirectories(tmp_paths).map(t -> t.toString()).collect(Collectors.joining(";")));
                                    //_logger.warn("Found3: " + TimeSliceDirUtils.getQueryTimeRange(cfg, new Date()));

                                    final Stream<String> paths = Arrays
                                            .stream(fc.util().listStatus(new Path(base_path)))
                                            .filter(f -> f.isDirectory())
                                            .map(f -> f.getPath().toUri().getPath()) // (remove the hdfs:// bit, which seems to be breaking with HA)
                                    ;

                                    return TimeSliceDirUtils
                                            .filterTimedDirectories(
                                                    TimeSliceDirUtils.annotateTimedDirectories(paths),
                                                    TimeSliceDirUtils.getQueryTimeRange(cfg, new Date()))
                                            .map(s -> s + "/*").collect(Collectors.toList());
                                } catch (Exception e) {
                                    return null;
                                } // will fall through to...
                            }).orElseGet(() -> {
                                // No time based filtering possible
                                final String suffix = "**/*";
                                return Arrays.asList(base_path + suffix);
                            });
                }
            })).orElse(Collections.emptyList());

}

From source file:com.ikanow.aleph2.harvest.logstash.services.LogstashHarvestService.java

@Override
public CompletableFuture<BasicMessageBean> onUpdatedSource(DataBucketBean old_bucket, DataBucketBean new_bucket,
        boolean is_enabled, Optional<BucketDiffBean> diff, IHarvestContext context) {

    final LogstashBucketConfigBean config = Optionals.ofNullable(new_bucket.harvest_configs()).stream()
            .findFirst().map(cfg -> BeanTemplateUtils.from(cfg.config(), LogstashBucketConfigBean.class).get())
            .orElse(BeanTemplateUtils.build(LogstashBucketConfigBean.class).done().get());

    // Handle test case - use process utils to delete
    if (BucketUtils.isTestBucket(new_bucket)) {
        resetFilePointer(new_bucket, config, _globals.get());

        //kill/log
        final Tuple2<String, Boolean> kill_result = ProcessUtils.stopProcess(this.getClass().getSimpleName(),
                new_bucket, _global_propertes.get().local_root_dir() + LOCAL_RUN_DIR_SUFFIX, Optional.of(2));

        //log any output (don't crash if something goes wrong, this is just icing)
        try {/*from   w w  w .  j  a v a 2s . c om*/
            final String log_file = System.getProperty("java.io.tmpdir") + File.separator
                    + BucketUtils.getUniqueSignature(new_bucket.full_name(), Optional.empty());
            final File log_file_handle = new File(log_file);
            LogstashUtils.sendOutputToLogger(context.getLogger(Optional.empty()), Level.INFO, log_file_handle,
                    Optional.empty());
            log_file_handle.delete();
        } catch (Exception ex) {
            context.getLogger(Optional.empty()).log(Level.ERROR,
                    ErrorUtils.lazyBuildMessage(false, () -> this.getClass().getSimpleName(),
                            () -> "onUpdatedSource", () -> null,
                            () -> ErrorUtils.getLongForm("Error getting logstash test output: {0}", ex),
                            () -> Collections.emptyMap()));
        }

        return CompletableFuture.completedFuture(ErrorUtils.buildMessage(true, this.getClass().getSimpleName(),
                "Bucket suspended: {0}", kill_result._1()));
    } else {
        if (diff.map(bdb -> bdb.diffs().isEmpty()).orElse(false)) { // if nothing's changed then do nothing
            //TODO: longer term could do better here, eg we don't care unless data_schema or harvest_configs have changed, right?            
            return CompletableFuture.completedFuture(ErrorUtils.buildSuccessMessage(
                    this.getClass().getSimpleName(), "onUpdatedSource", "No change to bucket"));
        }
        if (is_enabled) {
            return CompletableFuture
                    .completedFuture(startOrUpdateLogstash(new_bucket, config, _globals.get(), context));
        } else { // Just stop
            //(this does nothing if the bucket isn't actually running)
            return CompletableFuture.completedFuture(stopLogstash(new_bucket, config, _globals.get()));
        }
    }
}

From source file:alfio.controller.api.ReservationApiController.java

@RequestMapping(value = "/event/{eventName}/reservation/{reservationId}/vat-validation", method = RequestMethod.POST)
@Transactional// w  w w. j  a  va  2  s .  co m
public ResponseEntity<VatDetail> validateEUVat(@PathVariable("eventName") String eventName,
        @PathVariable("reservationId") String reservationId, PaymentForm paymentForm, Locale locale,
        HttpServletRequest request) {

    String country = paymentForm.getVatCountryCode();
    Optional<Triple<Event, TicketReservation, VatDetail>> vatDetail = eventRepository
            .findOptionalByShortName(eventName)
            .flatMap(e -> ticketReservationRepository.findOptionalReservationById(reservationId)
                    .map(r -> Pair.of(e, r)))
            .filter(e -> EnumSet.of(INCLUDED, NOT_INCLUDED).contains(e.getKey().getVatStatus()))
            .filter(e -> vatChecker.isVatCheckingEnabledFor(e.getKey().getOrganizationId()))
            .flatMap(e -> vatChecker.checkVat(paymentForm.getVatNr(), country, e.getKey().getOrganizationId())
                    .map(vd -> Triple.of(e.getLeft(), e.getRight(), vd)));

    vatDetail.filter(t -> t.getRight().isValid()).ifPresent(t -> {
        VatDetail vd = t.getRight();
        String billingAddress = vd.getName() + "\n" + vd.getAddress();
        PriceContainer.VatStatus vatStatus = determineVatStatus(t.getLeft().getVatStatus(),
                t.getRight().isVatExempt());
        ticketReservationRepository.updateBillingData(vatStatus, vd.getVatNr(), country,
                paymentForm.isInvoiceRequested(), reservationId);
        OrderSummary orderSummary = ticketReservationManager.orderSummaryForReservationId(reservationId,
                t.getLeft(), Locale.forLanguageTag(t.getMiddle().getUserLanguage()));
        ticketReservationRepository.addReservationInvoiceOrReceiptModel(reservationId,
                Json.toJson(orderSummary));
        ticketReservationRepository.updateTicketReservation(reservationId, t.getMiddle().getStatus().name(),
                paymentForm.getEmail(), paymentForm.getFullName(), paymentForm.getFirstName(),
                paymentForm.getLastName(), locale.getLanguage(), billingAddress, null,
                Optional.ofNullable(paymentForm.getPaymentMethod()).map(PaymentProxy::name).orElse(null));
        paymentForm.getTickets().forEach((ticketId, owner) -> {
            if (isNotEmpty(owner.getEmail())
                    && ((isNotEmpty(owner.getFirstName()) && isNotEmpty(owner.getLastName()))
                            || isNotEmpty(owner.getFullName()))) {
                ticketHelper.preAssignTicket(eventName, reservationId, ticketId, owner, Optional.empty(),
                        request, (tr) -> {
                        }, Optional.empty());
            }
        });
    });

    return vatDetail.map(Triple::getRight).map(vd -> {
        if (vd.isValid()) {
            return ResponseEntity.ok(vd);
        } else {
            return new ResponseEntity<VatDetail>(HttpStatus.BAD_REQUEST);
        }
    }).orElseGet(() -> new ResponseEntity<>(HttpStatus.NOT_FOUND));
}

From source file:it.polimi.diceH2020.launcher.controller.view.MainFlowController.java

private List<Map<String, String>> getFolderList(List<SimulationsManager> smList) {
    List<Map<String, String>> returnList = new ArrayList<>();

    for (SimulationsManager simMan : smList) {

        Map<String, String> tmpMap = new HashMap<>();
        States state = SimulationsUtilities
                .getStateFromList(simulationsManagerRepository.findStatesByFolder(simMan.getFolder()));

        tmpMap.put("date", simMan.getDate());
        tmpMap.put("time", simMan.getTime());
        tmpMap.put("scenario", simMan.getScenario().getShortDescription());
        tmpMap.put("id", simMan.getId().toString());
        tmpMap.put("state", state.toString());
        tmpMap.put("input", simMan.getInput());
        tmpMap.put("folder", simMan.getFolder());
        tmpMap.put("num", String.valueOf(simulationsManagerRepository.countByFolder(simMan.getFolder())));
        tmpMap.put("completed", simMan.getNumCompletedSimulations().toString());
        Optional<Double> result = Optional.empty();
        Optional<Boolean> feasible = Optional.empty();
        Optional<String> provider = Optional.empty();
        Optional<String> vms = Optional.empty();
        String buildingVms = "";
        // I expect this list to be always one element long
        for (InteractiveExperiment experiment : simMan.getExperimentsList()) {
            logger.trace("Found experiment");
            if (experiment.isDone()) {
                logger.trace("Experiment is done");
                try {
                    Solution solution = experiment.getSol();
                    result = Optional.of(solution.getCost());
                    feasible = Optional.of(solution.getFeasible());
                    provider = Optional.of(solution.getProvider());
                    Map<String, Integer> vmsCounter = new HashMap<String, Integer>();
                    for (SolutionPerJob solutionPerJob : solution.getLstSolutions()) {
                        final String typeVMselected = solutionPerJob.getTypeVMselected().getId();
                        vmsCounter.put(typeVMselected,
                                (vmsCounter.get(typeVMselected) != null ? vmsCounter.get(typeVMselected) : 0)
                                        + solutionPerJob.getNumberVM());
                    }//from   ww  w. ja  v a  2  s. c o  m
                    for (Map.Entry<String, Integer> vm : vmsCounter.entrySet()) {
                        if (!buildingVms.isEmpty()) {
                            buildingVms += ", ";
                        }
                        buildingVms += vm.getKey() + "=" + String.valueOf(vm.getValue());
                    }
                    vms = Optional.of(buildingVms);
                } catch (JsonParseException | JsonMappingException e) {
                    logger.debug(String.format("Error while parsing the solution JSON for experiment no. %d",
                            experiment.getId()), e);
                } catch (IOException e) {
                    logger.debug(String.format("Error while reading the solution for experiment no. %d",
                            experiment.getId()), e);
                }
            }
        }
        tmpMap.put("result", result.map(value -> String.format("%.2f", value)).orElse("N/D"));
        tmpMap.put("feasible", feasible.map(Object::toString).orElse(""));
        tmpMap.put("provider", provider.map(Object::toString).orElse(""));
        tmpMap.put("vms", vms.map(Object::toString).orElse(""));

        returnList.add(tmpMap);
    }

    return returnList;
}

From source file:com.ikanow.aleph2.analytics.services.AnalyticsContext.java

@Override
public Validation<BasicMessageBean, JsonNode> sendObjectToStreamingPipeline(
        final Optional<DataBucketBean> bucket, final AnalyticThreadJobBean job,
        final Either<JsonNode, Map<String, Object>> object, final Optional<AnnotationBean> annotations) {
    if (annotations.isPresent()) {
        throw new RuntimeException(ErrorUtils.get(ErrorUtils.NOT_YET_IMPLEMENTED, "annotations"));
    }//from   w  ww  .ja  va  2s. c o m
    final JsonNode obj_json = object.either(__ -> __,
            map -> (JsonNode) _mapper.convertValue(map, JsonNode.class));

    return this.getOutputTopic(bucket, job).<Validation<BasicMessageBean, JsonNode>>map(topic -> {
        if (_distributed_services.doesTopicExist(topic)) {
            // (ie someone is listening in on our output data, so duplicate it for their benefit)
            _distributed_services.produce(topic, obj_json.toString());
            return Validation.success(obj_json);
        } else {
            return Validation.fail(ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(),
                    "sendObjectToStreamingPipeline", "Bucket:job {0}:{1} topic {2} has no listeners",
                    bucket.map(b -> b.full_name()).orElse("(unknown)"), job.name(), topic));
        }
    }).orElseGet(() -> {
        return Validation.fail(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                "sendObjectToStreamingPipeline", "Bucket:job {0}:{1} has no output topic",
                bucket.map(b -> b.full_name()).orElse("(unknown)"), job.name()));
    });
}

From source file:com.ikanow.aleph2.analytics.services.AnalyticsContext.java

/** Returns a config object containing:
 *  - set up for any of the services described
 *  - all the rest of the configuration//from   w  w  w  . ja  v a  2  s .  co  m
 *  - the bucket bean ID
 *  SIDE EFFECT - SETS UP THE SERVICES SET 
 * @param services
 * @return
 */
protected Config setupServices(final Optional<DataBucketBean> maybe_bucket,
        final Optional<Set<Tuple2<Class<? extends IUnderlyingService>, Optional<String>>>> services) {
    // 
    // - set up for any of the services described
    // - all the rest of the configuration
    // - the bucket bean ID

    final Config full_config = ModuleUtils.getStaticConfig()
            .withoutPath(DistributedServicesPropertyBean.APPLICATION_NAME)
            .withoutPath("MongoDbManagementDbService.v1_enabled") // (special workaround for V1 sync service)
    ;

    final ImmutableSet<Tuple2<Class<? extends IUnderlyingService>, Optional<String>>> complete_services_set = ImmutableSet
            .<Tuple2<Class<? extends IUnderlyingService>, Optional<String>>>builder()
            .addAll(services.orElse(Collections.emptySet()))
            .add(Tuples._2T(ICoreDistributedServices.class, Optional.empty()))
            .add(Tuples._2T(IManagementDbService.class, Optional.empty()))
            .add(Tuples._2T(ISearchIndexService.class, Optional.empty())) //(need this because logging doesn't correctly report its service set, should fix at some point)
            .add(Tuples._2T(IStorageService.class, Optional.empty()))
            .add(Tuples._2T(ISecurityService.class, Optional.empty()))
            .add(Tuples._2T(ILoggingService.class, Optional.empty()))
            .add(Tuples._2T(IManagementDbService.class, IManagementDbService.CORE_MANAGEMENT_DB))
            .addAll(_mutable_state.extra_auto_context_libs)
            .addAll(maybe_bucket
                    .map(bucket -> DataServiceUtils.listUnderlyingServiceProviders(bucket.data_schema()))
                    .orElse(Collections.emptyList()))
            .build();

    if (_mutable_state.service_manifest_override.isSet()) {
        if (!complete_services_set.equals(_mutable_state.service_manifest_override.get())) {
            throw new RuntimeException(ErrorUtils.SERVICE_RESTRICTIONS);
        }
    } else {
        _mutable_state.service_manifest_override.set(complete_services_set);
    }
    return full_config;
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.TestElasticsearchIndexService.java

public void test_handleDeleteOrPurge(final DataBucketBean to_handle, Optional<String> primary_buffer,
        boolean delete_not_purge) throws InterruptedException, ExecutionException {
    System.out.println("****** Checking delete/purge");

    final String template_name = ElasticsearchIndexUtils.getBaseIndexName(to_handle, primary_buffer);
    final ICrudService<JsonNode> index_service_crud = _index_service.getDataService().flatMap(
            s -> s.getWritableDataService(JsonNode.class, to_handle, Optional.empty(), Optional.empty()))
            .flatMap(IDataWriteService::getCrudService).get();

    final ElasticsearchContext es_context = (ElasticsearchContext) index_service_crud
            .getUnderlyingPlatformDriver(ElasticsearchContext.class, Optional.empty()).get();

    // (Actually first off, check there's data and templates)
    // Data:/*  w  w  w  . j  ava2 s  . c om*/
    {
        final GetMappingsResponse gmr = es_context.client().admin().indices()
                .prepareGetMappings(template_name + "*").execute().actionGet();
        assertTrue("There are indexes", gmr.getMappings().keys().size() > 0);
    }
    // Templates:
    {
        final GetIndexTemplatesRequest gt_pre = new GetIndexTemplatesRequest().names(template_name);
        final GetIndexTemplatesResponse gtr_pre = _crud_factory.getClient().admin().indices()
                .getTemplates(gt_pre).actionGet();
        assertTrue(
                "Cache should contain the template: " + _index_service._bucket_template_cache.asMap().keySet(),
                _index_service._bucket_template_cache.asMap()
                        .containsKey(to_handle._id() + primary_buffer.map(s -> ":" + s).orElse("") + ":true"));
        assertEquals(1, gtr_pre.getIndexTemplates().size());
    }

    // Then, perform request
    final BasicMessageBean result = _index_service.getDataService().get()
            .handleBucketDeletionRequest(to_handle, Optional.empty(), delete_not_purge).get();
    assertEquals("Deletion should succeed: " + result.message(), true, result.success());

    // Check templates gone iff deleting not purging

    if (delete_not_purge) {
        final GetIndexTemplatesRequest gt = new GetIndexTemplatesRequest().names(template_name);
        final GetIndexTemplatesResponse gtr = _crud_factory.getClient().admin().indices().getTemplates(gt)
                .actionGet();
        assertTrue("No templates after deletion", gtr.getIndexTemplates().isEmpty());
    } else {
        final GetIndexTemplatesRequest gt2 = new GetIndexTemplatesRequest().names(template_name);
        final GetIndexTemplatesResponse gtr2 = _crud_factory.getClient().admin().indices().getTemplates(gt2)
                .actionGet();
        assertEquals(1, _index_service._bucket_template_cache.size());
        assertEquals(1, gtr2.getIndexTemplates().size());
    }

    // Check all files deleted

    // Check via mappings
    {
        final GetMappingsResponse gmr = es_context.client().admin().indices()
                .prepareGetMappings(template_name + "*").execute().actionGet();
        assertEquals(0, gmr.getMappings().keys().size());
    }
    // Check via index size (recreates templates)

    final ICrudService<JsonNode> index_service_crud_2 = _index_service.getDataService().flatMap(
            s -> s.getWritableDataService(JsonNode.class, to_handle, Optional.empty(), Optional.empty()))
            .flatMap(IDataWriteService::getCrudService).get();

    assertEquals(0, index_service_crud_2.countObjects().get().intValue());
}