Example usage for java.util Optional map

List of usage examples for java.util Optional map

Introduction

In this page you can find the example usage for java.util Optional map.

Prototype

public <U> Optional<U> map(Function<? super T, ? extends U> mapper) 

Source Link

Document

If a value is present, returns an Optional describing (as if by #ofNullable ) the result of applying the given mapping function to the value, otherwise returns an empty Optional .

Usage

From source file:com.bitbreeds.webrtc.sctp.impl.SCTPImpl.java

/**
 * @return message with acks//from   w  ww.  j  ava 2 s .c  o m
 */
public byte[] createSackMessage() {
    if (context == null) {
        return new byte[] {};
    }
    Optional<SCTPMessage> message = sackCreator.createSack(SCTPUtil.baseHeader(context));
    return message.map(SCTPMessage::toBytes).orElse(new byte[] {});
}

From source file:ai.grakn.engine.GraknEngineServer.java

public void startHTTP() {

    boolean passwordProtected = prop.getPropertyAsBool(GraknEngineConfig.PASSWORD_PROTECTED_PROPERTY, false);

    // TODO: Make sure controllers handle the null case
    Optional<String> secret = prop.tryProperty(GraknEngineConfig.JWT_SECRET_PROPERTY);
    @Nullable//from  w w w . ja  v  a2  s .c o  m
    JWTHandler jwtHandler = secret.map(JWTHandler::create).orElse(null);
    UsersHandler usersHandler = UsersHandler.create(prop.getProperty(GraknEngineConfig.ADMIN_PASSWORD_PROPERTY),
            factory);

    configureSpark(spark, prop, jwtHandler);

    // Start the websocket for Graql
    RemoteSession graqlWebSocket = passwordProtected ? RemoteSession.passwordProtected(usersHandler)
            : RemoteSession.create();
    spark.webSocket(REST.WebPath.REMOTE_SHELL_URI, graqlWebSocket);

    String defaultKeyspace = prop.getProperty(GraknEngineConfig.DEFAULT_KEYSPACE_PROPERTY);
    int postProcessingDelay = prop.getPropertyAsInt(GraknEngineConfig.POST_PROCESSING_TASK_DELAY);

    // Start all the controllers
    new GraqlController(factory, spark);
    new ConceptController(factory, spark);
    new DashboardController(factory, spark);
    new SystemController(factory, spark);
    new AuthController(spark, passwordProtected, jwtHandler, usersHandler);
    new UserController(spark, usersHandler);
    new CommitLogController(spark, defaultKeyspace, postProcessingDelay, taskManager);
    new TasksController(spark, taskManager);

    // This method will block until all the controllers are ready to serve requests
    spark.awaitInitialization();
}

From source file:com.ikanow.aleph2.analytics.hadoop.assets.SampleReduceEnrichmentModule.java

@Override
public void onObjectBatch(Stream<Tuple2<Long, IBatchRecord>> batch, Optional<Integer> batch_size,
        Optional<JsonNode> grouping_key) {

    // Just to make it simple 

    // 2 different cases:

    // 1) If I'm a combiner or a single-step reducer, then count the batchs
    //    and emit (key, count)
    // 2) If I'm the second stage of a combine-reduce then sum the counts

    Patterns.match(_stage.get()).andAct().when(s -> s == Stage.map, __ -> {
        batch.forEach(obj -> {//w  w  w  . j  a  v a  2 s. co m

            final JsonNode new_grouping_key = _key_fields.get().stream().reduce(_mapper.createObjectNode(),
                    (acc, v) -> {
                        final Optional<String> key_field = JsonUtils.getProperty(v, obj._2().getJson())
                                .filter(j -> j.isTextual()).map(j -> j.asText());
                        return key_field.map(kf -> acc.put(v.replaceAll("__+", "_").replace(".", "__"), kf))
                                .orElse(acc);
                    }, (acc1, acc2) -> acc1) // (not possible
            ;

            final ObjectNode to_output = _mapper.createObjectNode().put("count", 1);

            _logger.info("OUTPUT FROM MAP = " + to_output + " key " + new_grouping_key);

            _context.get().emitMutableObject(obj._1(), to_output, Optional.empty(),
                    Optional.of(new_grouping_key));

        });
    }).otherwise(s -> { // combine or reduce

        final long count = batch.map(b -> Optional.ofNullable(b._2().getJson().get("count"))
                .filter(j -> j.isNumber()).map(j -> j.asLong()).orElse(0L))
                .collect(Collectors.summingLong(l -> l));

        final ObjectNode to_output = ((s == Stage.reduce) ? ((ObjectNode) grouping_key.get().deepCopy())
                : _mapper.createObjectNode()).put("count", count);

        _logger.info("OUTPUT FROM COMBINE/REDUCE = " + to_output + " (stage=" + s + " key " + grouping_key);

        _context.get().emitMutableObject(0L, to_output, Optional.empty(),
                (s == Stage.reduce) ? Optional.empty() : grouping_key);
    });
}

From source file:com.bitbreeds.webrtc.sctp.impl.SCTPImpl.java

/**
 * Handle message and create a response/*from   w  w w  . j  a v  a2s  .c om*/
 * @param input the incoming message
 * @return a byte response, an empty array is equal to no response.
 */
public List<byte[]> handleRequest(byte[] input) {
    SCTPMessage inFullMessage = SCTPMessage.fromBytes(input);

    logger.debug("Input Parsed: " + inFullMessage);

    logger.debug("Flags: " + Hex.encodeHexString(new byte[] { input[13] }));

    SCTPHeader inHdr = inFullMessage.getHeader();
    List<SCTPChunk> inChunks = inFullMessage.getChunks();

    return inChunks.stream().map(chunk -> {
        MessageHandler handler = handlerMap.get(chunk.getType());
        if (handler != null) {

            Optional<SCTPMessage> out = handler.handleMessage(this, context, inHdr, chunk);
            return out.map(i -> SCTPUtil.addChecksum(i).toBytes()).orElse(new byte[] {});
        } else {
            logger.warn("Not handled messagetype: " + chunk.getType());
            return new byte[] {};
        }
    }).collect(Collectors.toList());
}

From source file:nu.yona.server.util.HibernateStatisticsService.java

public boolean isStatisticsEnabled() {
    Optional<Statistics> stats = getHibernateStatistics();
    return stats.map(Statistics::isStatisticsEnabled).orElse(false);
}

From source file:alfio.manager.support.CustomMessageManager.java

public Map<String, Object> generatePreview(String eventName, Optional<Integer> categoryId,
        List<MessageModification> input, String username) {
    Map<String, Object> result = new HashMap<>();
    Event event = eventManager.getSingleEvent(eventName, username);
    result.put("affectedUsers", categoryId.map(id -> ticketRepository.countAssignedTickets(event.getId(), id))
            .orElseGet(() -> ticketRepository.countAllAssigned(event.getId())));
    result.put("preview", preview(event, input, username));
    return result;
}

From source file:alfio.controller.api.admin.UsersApiController.java

@RequestMapping(value = "/users/current", method = GET)
public UserModification loadCurrentUser(Principal principal) {
    User user = userManager.findUserByUsername(principal.getName());
    Optional<Organization> userOrganization = userManager.findUserOrganizations(user.getUsername()).stream()
            .findFirst();/*from ww w  .j a  v  a  2s.  c  o m*/
    return new UserModification(user.getId(), userOrganization.map(Organization::getId).orElse(-1),
            userManager.getUserRole(user).name(), user.getUsername(), user.getFirstName(), user.getLastName(),
            user.getEmailAddress());
}

From source file:natalia.dymnikova.cluster.scheduler.impl.AkkaBackedRemoteObservable.java

private CompletableFuture<List<Optional<Address>>> findComputePool(final Remote operator,
        final Optional<InetSocketAddress> inetSocketAddress) {
    return inetSocketAddress.map(this::wrapAddressInFuture).orElseGet(() -> nodeSearcher.search(operator));
}

From source file:io.yields.math.framework.kpi.ExplorerJsonExporter.java

@Override
public void export(Explorer<?> explorer, File destinationFile) {

    ObjectMapper jsonMapper = getObjectMapper();

    try {//w ww . j a v  a2  s  .c  o  m

        List<String> variableNames = explorer.all().findFirst().map(propertyVerification -> propertyVerification
                .getVariables().entrySet().stream().map(Map.Entry::getKey).sorted()).get().collect(toList());

        List<Values> variableValues = explorer.all().map(
                propertyVerifications -> new Values(propertyVerifications.getVariables().entrySet().stream()
                        .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()))))
                .collect(Collectors.toList());

        StandardResult variables = new StandardResult(buildDefinitions(variableNames, string -> ""),
                variableValues);

        PropertyVerifications<?> verifications = explorer.all().findAny()
                .orElseThrow(IllegalArgumentException::new);
        List<Definition> propertyDefinitions = buildDefinitions(
                verifications.getResults().stream().map(PropertyVerification::getName).collect(toList()),
                name -> verifications.getResults().stream()
                        .filter(verification -> verification.getName().equals(name))
                        .map(verification -> verification.getProperty().map(Property::getExplanation)
                                .orElse(""))
                        .findAny().orElse(""));

        List<Values> propertiesValues = explorer.all()
                .map(propertyVerifications -> toValues(propertyVerifications)).collect(toList());

        StandardResult properties = new StandardResult(propertyDefinitions, propertiesValues);

        Optional<Stats> statsTemplate = explorer.getStats().stream().findAny();
        List<Definition> descriptorDefinitions = buildDefinitions(
                statsTemplate.map(Stats::getDescriptorNames).orElse(Collections.emptyList()),
                name -> statsTemplate.map(stats -> stats.getDescriptorExplanation(name)).orElse(""));

        List<Values> descriptorValues = explorer.getStats().stream().map(stats -> toDescriptorValues(stats))
                .collect(Collectors.toList());

        StandardResult descriptors = new StandardResult(descriptorDefinitions, descriptorValues);

        List<StatDefinition> statsDefinitions = buildStatDefinitions(explorer.getStats(),
                name -> statsTemplate.map(stats -> stats.getStatsExplanation(name)).orElse(""));

        List<Values> statsValues = explorer.getStats().stream().map(stats -> toStatsValues(stats))
                .collect(Collectors.toList());

        StatsResult statsDefinition = new StatsResult(statsDefinitions, statsValues);

        ScoreResult scoreResult = explorer.getScore();

        ExplorerResult result = new ExplorerResult(explorer.getMetadata(), variables, statsDefinition,
                descriptors, properties, scoreResult);

        jsonMapper.writeValue(destinationFile, result);

    } catch (IOException ioe) {
        throw new IllegalStateException(
                format("Could not write explorer file at %s", destinationFile.getAbsolutePath()), ioe);
    }

}

From source file:com.ikanow.aleph2.analytics.services.GraphBuilderEnrichmentService.java

@Override
public void onStageInitialize(IEnrichmentModuleContext context, DataBucketBean bucket,
        EnrichmentControlMetadataBean control, Tuple2<ProcessingStage, ProcessingStage> previous_next,
        Optional<List<String>> next_grouping_fields) {

    _context.set(context);//from  ww  w  . j a  v  a2s .  c  o m

    final GraphConfigBean dedup_config = BeanTemplateUtils
            .from(Optional.ofNullable(control.config()).orElse(Collections.emptyMap()), GraphConfigBean.class)
            .get();

    // Check if enabled
    final Optional<GraphSchemaBean> maybe_graph_schema = Optional
            .ofNullable(dedup_config.graph_schema_override()).map(Optional::of)
            .orElse(Optionals.of(() -> bucket.data_schema().graph_schema())); //(exists by construction)

    _enabled.set(maybe_graph_schema.map(gs -> Optional.ofNullable(gs.enabled()).orElse(true)).orElse(false));

    if (_enabled.get()) {
        // Get the configured graph db service's delegate and store it

        final GraphSchemaBean graph_schema = maybe_graph_schema.get(); //(exists by construction)

        context.getServiceContext()
                .getService(IGraphService.class, Optional.ofNullable(graph_schema.service_name()))
                .flatMap(graph_service -> graph_service.getUnderlyingPlatformDriver(
                        IEnrichmentBatchModule.class, Optional.of(this.getClass().getName())))
                .ifPresent(delegate -> _delegate.set(delegate));
        ;

        _delegate.optional().ifPresent(delegate -> delegate.onStageInitialize(context, bucket, control,
                previous_next, next_grouping_fields));
    }
}