Example usage for java.util Optional map

List of usage examples for java.util Optional map

Introduction

In this page you can find the example usage for java.util Optional map.

Prototype

public <U> Optional<U> map(Function<? super T, ? extends U> mapper) 

Source Link

Document

If a value is present, returns an Optional describing (as if by #ofNullable ) the result of applying the given mapping function to the value, otherwise returns an empty Optional .

Usage

From source file:alfio.util.Validator.java

private static boolean containsAllRequiredTranslations(EventModification eventModification,
        List<EventModification.AdditionalServiceText> descriptions) {
    Optional<EventModification> optional = Optional.ofNullable(eventModification);
    return !optional.isPresent() || optional.map(e -> ContentLanguage.findAllFor(e.getLocales()))
            .filter(l -> l.stream().allMatch(
                    l1 -> descriptions.stream().anyMatch(d -> d.getLocale().equals(l1.getLanguage()))))
            .isPresent();/*from  ww w  .j  av a2 s . c o m*/
}

From source file:alfio.util.EventUtil.java

private static MapSqlParameterSource buildTicketParams(int eventId, Date creation, Optional<TicketCategory> tc,
        int srcPriceCts, MapSqlParameterSource ps, Ticket.TicketStatus ticketStatus) {
    return ps.addValue("uuid", UUID.randomUUID().toString()).addValue("creation", creation)
            .addValue("categoryId", tc.map(TicketCategory::getId).orElse(null)).addValue("eventId", eventId)
            .addValue("status", ticketStatus.name()).addValue("srcPriceCts", srcPriceCts);
}

From source file:com.ikanow.aleph2.analytics.services.BatchEnrichmentContext.java

/** Utility to mutate objects
 * @param original_json//  www .  jav  a 2s .  c  om
 * @param mutations
 * @return
 */
protected static JsonNode handleMutations(JsonNode original_json, Optional<ObjectNode> mutations) {
    return mutations.map(o -> StreamSupport.<Map.Entry<String, JsonNode>>stream(
            Spliterators.spliteratorUnknownSize(o.fields(), Spliterator.ORDERED), false).reduce(original_json,
                    (acc, kv) -> ((ObjectNode) acc).set(kv.getKey(), kv.getValue()), (val1, val2) -> val2))
            .orElse(original_json);
}

From source file:com.ikanow.aleph2.distributed_services.utils.KafkaUtils.java

/**
 * Creates a consumer for a single topic with the currently configured Kafka instance.
 * The consumer group.id will be set to: <topic>__<from|a2>__<consumer_name|random_uuid>
 * //from   w ww .j av  a 2 s.c o m
 * WARNING: When a consumer is created, it starts its reading at now, so if you
 * previously produced on a topic, this consumer won't be able to see it.
 * 
 * This consumer should be closed once you are done reading.
 * 
 * Note: if you set the consumer_name you must be careful about creating consumers:
 * 1. multiple consumers with different names pointed to same topic == everyone gets all data from the same topic
 * 2. multiple consumers with different names pointed to different topics == everyone reads their own topics (like normal)
 * 3. multiple consumers with same names pointed to same topic == should round-robin (partitioning is not currently setup in aleph2 (TODO))
 * 4. multiple consumers with same names pointed to different topic == BROKEN (kafka issue)
 *
 * @param topic
 * @param consumer_name - if set then uses a specific consumer group instead of the central "system" consumer - this has the effect of copying the data instead of round-robining it
 * @return
 */
public static KafkaConsumer<String, String> getKafkaConsumer(final String topic, final Optional<String> from,
        final Optional<String> consumer_name) {
    final String groupid = topic + "__" + from.map(f -> f).orElse("a2") + "__"
            + consumer_name.map(name -> name).orElse(UuidUtils.get().getRandomUuid());
    final Properties new_properties = addGroupIdToProps(groupid);
    final KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(new_properties);
    consumer.subscribe(Arrays.asList(topic));
    return consumer;
}

From source file:com.baasbox.service.scripting.ScriptingService.java

private static ODocument updateStorageLocked(String name, boolean before, JsonCallback updaterFn)
        throws ScriptException {
    final ScriptsDao dao = ScriptsDao.getInstance();
    ODocument script = null;//from   w ww.j a  v  a 2s  .  c  om
    try {
        script = dao.getByNameLocked(name);
        if (script == null)
            throw new ScriptException("Script not found");
        ODocument retScript = before ? script.copy() : script;

        ODocument storage = script.<ODocument>field(ScriptsDao.LOCAL_STORAGE);

        Optional<ODocument> storage1 = Optional.ofNullable(storage);

        JsonNode current = storage1.map(ODocument::toJSON).map(Json.mapper()::readTreeOrMissing)
                .orElse(NullNode.getInstance());
        if (current.isMissingNode())
            throw new ScriptEvalException("Error reading local storage as json");

        JsonNode updated = updaterFn.call(current);
        ODocument result;
        if (updated == null || updated.isNull()) {
            script.removeField(ScriptsDao.LOCAL_STORAGE);
        } else {
            result = new ODocument().fromJSON(updated.toString());
            script.field(ScriptsDao.LOCAL_STORAGE, result);
        }
        dao.save(script);
        ODocument field = retScript.field(ScriptsDao.LOCAL_STORAGE);
        return field;
    } finally {
        if (script != null) {
            script.unlock();
        }
    }
}

From source file:nu.yona.server.device.rest.DeviceController.java

private static String optionalUuidToNullableString(Optional<UUID> optionalUuid) {
    return optionalUuid.map(UUID::toString).orElse(null);
}

From source file:com.ikanow.aleph2.aleph2_rest_utils.RestCrudFunctions.java

private static <T> Response handleCountRequest(final Optional<String> query_json,
        final ICrudService<T> crud_service, final Class<T> clazz)
        throws InterruptedException, ExecutionException {
    //get query or if there is none just return count           
    return query_json.map(json -> {
        try {/*  www  .  j  av a 2s  .  c o m*/
            _logger.debug("query: " + json);
            final QueryComponent<T> query = RestUtils.convertStringToQueryComponent(json, clazz,
                    Optional.empty());
            return Response.ok(RestUtils
                    .convertSingleObjectToJson(crud_service.countObjectsBySpec(query).get(), COUNT_FIELD_NAME)
                    .toString()).build();
        } catch (Exception e) {
            return Response.status(Status.BAD_REQUEST)
                    .entity(ErrorUtils.getLongForm("Error converting input stream to string: {0}", e)).build();
        }
    }).orElse(Response.ok(
            RestUtils.convertSingleObjectToJson(crud_service.countObjects().get(), COUNT_FIELD_NAME).toString())
            .build());
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Optional utility to respect the test spec's timeout
 * @param maybe_test_spec/*  w w w  . j  a v  a 2 s. com*/
 * @param on_timeout - mainly for testing
 */
public static void registerTestTimeout(final Optional<ProcessingTestSpecBean> maybe_test_spec,
        Runnable on_timeout) {

    maybe_test_spec.map(test_spec -> test_spec.max_run_time_secs()).ifPresent(max_run_time -> {
        CompletableFuture.runAsync(Lambdas.wrap_runnable_u(() -> {
            Thread.sleep(1500L * max_run_time); // (seconds, *1.5 for safety)
            System.out.println("Test timeout - exiting");
            on_timeout.run();
        }));
    });
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Util function to tidy up input bean
 * @param in//from  ww  w.j a va  2  s  .  com
 * @param maybe_test_spec
 * @return
 */
protected static Stream<AnalyticThreadJobInputBean> transformInputBean(
        final Stream<AnalyticThreadJobInputBean> in, final Optional<ProcessingTestSpecBean> maybe_test_spec) {
    final Optional<Long> debug_max = maybe_test_spec.map(t -> t.requested_num_objects());

    return in.filter(input -> Optional.ofNullable(input.enabled()).orElse(true)).map(Lambdas.wrap_u(input -> {
        // In the debug case, transform the input to add the max record limit
        // (also ensure the name is filled in)
        final AnalyticThreadJobInputBean input_with_test_settings = BeanTemplateUtils.clone(input)
                .with(AnalyticThreadJobInputBean::name, Optional.ofNullable(input.name()).orElseGet(() -> {
                    return Optional.ofNullable(input.resource_name_or_id()).orElse("") + ":"
                            + Optional.ofNullable(input.data_service()).orElse("");
                }))
                .with(AnalyticThreadJobInputBean::config, BeanTemplateUtils
                        .clone(Optional.ofNullable(input.config())
                                .orElseGet(() -> BeanTemplateUtils.build(AnalyticThreadJobInputConfigBean.class)
                                        .done().get()))
                        .with(AnalyticThreadJobInputConfigBean::test_record_limit_request, //(if not test, always null; else "input override" or "output default")
                                debug_max.map(max -> Optionals
                                        .of(() -> input.config().test_record_limit_request()).orElse(max))
                                        .orElse(null))
                        .done())
                .done();

        return input_with_test_settings;
    }));
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.HfdsDataWriteService.java

/** Wraps an output stream in one of the supported codecs
 * @param codec//from w w w.ja v  a  2  s  .com
 * @param original_output
 * @return
 */
public static OutputStream wrapOutputInCodec(final Optional<String> codec, final OutputStream original_output) {
    return codec.map(Lambdas.wrap_u(c -> {
        if (c.equals("gz")) {
            return new java.util.zip.GZIPOutputStream(original_output);
        } else if (c.equals("sz")) {
            return new org.xerial.snappy.SnappyOutputStream(original_output);
        } else if (c.equals("fr.sz")) {
            return new org.xerial.snappy.SnappyFramedOutputStream(original_output);
        } else
            return null; // (fallback to no codec)

    })).orElse(original_output);
}