Example usage for java.util Optional map

List of usage examples for java.util Optional map

Introduction

In this page you can find the example usage for java.util Optional map.

Prototype

public <U> Optional<U> map(Function<? super T, ? extends U> mapper) 

Source Link

Document

If a value is present, returns an Optional describing (as if by #ofNullable ) the result of applying the given mapping function to the value, otherwise returns an empty Optional .

Usage

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchHadoopUtils.java

@Test
public void test_getTimedIndexes() {
    final LinkedHashMultimap<String, String> test_in = LinkedHashMultimap.create();
    test_in.put("test_time_sliced_index__4633d78cd58b_2015.04.01", "test");
    test_in.put("test_time_sliced_index__4633d78cd58b_2015.06.01", "test");
    test_in.put("r__test_time_sliced_index__4633d78cd58b_2015.06.03", "test");
    test_in.put("r__test_time_sliced_index__4633d78cd58b", "test");

    // min + max/*from www. j a v a2  s. com*/
    {
        final Date now = Date
                .from(LocalDateTime.of(2015, 6, 12, 2, 0).atZone(ZoneOffset.systemDefault()).toInstant());

        AnalyticThreadJobInputConfigBean test = BeanTemplateUtils.build(AnalyticThreadJobInputConfigBean.class)
                .with(AnalyticThreadJobInputConfigBean::time_min, "12 days")
                .with(AnalyticThreadJobInputConfigBean::time_max, "8 days").done().get();

        AnalyticThreadJobInputBean input = BeanTemplateUtils.build(AnalyticThreadJobInputBean.class)
                .with(AnalyticThreadJobInputBean::config, test).done().get();

        final Optional<Stream<String>> res = ElasticsearchHadoopUtils.getTimedIndexes(input, test_in, now);

        assertEquals(
                Arrays.asList("test_time_sliced_index__4633d78cd58b_2015.06.01",
                        "r__test_time_sliced_index__4633d78cd58b_2015.06.03"),
                res.map(s -> s.collect(Collectors.toList())).get());
    }
    // neither
    {
        AnalyticThreadJobInputConfigBean test = BeanTemplateUtils.build(AnalyticThreadJobInputConfigBean.class)
                .done().get();

        AnalyticThreadJobInputBean input = BeanTemplateUtils.build(AnalyticThreadJobInputBean.class)
                .with(AnalyticThreadJobInputBean::config, test).done().get();

        final Optional<Stream<String>> res = ElasticsearchHadoopUtils.getTimedIndexes(input, test_in,
                new Date());

        assertEquals(Optional.empty(), res);
    }

}

From source file:org.kie.appformer.ala.wildfly.executor.AppFormerProvisioningHelper.java

private String findInternalDeploymentId(WildflyDriverManagementClient driverManagementClient,
        String deploymentId, String driverClassName) throws Exception {

    Optional<WildflyDriverDef> optional = driverManagementClient.getDeployedDrivers().stream()
            .filter(wildflyDriverDef -> wildflyDriverDef.getDeploymentName().equals(deploymentId)
                    || wildflyDriverDef.getDeploymentName().startsWith(deploymentId + "_" + driverClassName))
            .findFirst();//from www  .  j  a  va 2s . c o  m
    return optional.map(value -> value.getDeploymentName()).orElse(null);
}

From source file:ai.grakn.engine.backgroundtasks.taskstorage.GraknStateStorage.java

public String newState(String taskName, String createdBy, Date runAt, Boolean recurring, long interval,
        JSONObject configuration) {// w w  w  .ja va  2 s.  c  o m
    if (taskName == null || createdBy == null || runAt == null || recurring == null)
        return null;

    Var state = var(TASK_VAR).isa(SCHEDULED_TASK).has(STATUS, CREATED.toString()).has(TASK_CLASS_NAME, taskName)
            .has(CREATED_BY, createdBy).has(RUN_AT, runAt.getTime()).has(RECURRING, recurring)
            .has(RECUR_INTERVAL, interval);

    if (configuration != null)
        state.has(TASK_CONFIGURATION, configuration.toString());

    Optional<String> result = attemptCommitToSystemGraph((graph) -> {
        InsertQuery query = graph.graql().insert(state);
        String id = query.stream().findFirst().get().get(TASK_VAR).getId();

        LOG.debug("Created " + graph.getConcept(id));

        return id;
    }, true);

    return result.map(x -> x).orElse(null);
}

From source file:at.medevit.elexis.emediplan.core.internal.EMediplanServiceImpl.java

@Override
public void exportEMediplanPdf(Mandant author, Patient patient, List<Prescription> prescriptions,
        OutputStream output) {/*from w  w  w.  jav a 2 s .co  m*/
    if (prescriptions != null && !prescriptions.isEmpty() && output != null) {
        Optional<String> jsonString = getJsonString(author, patient, prescriptions);
        Optional<Image> qrCode = jsonString.map(json -> getQrCode(json)).orElse(Optional.empty());

        Optional<at.medevit.elexis.emediplan.core.model.print.Medication> jaxbModel = getJaxbModel(author,
                patient, prescriptions);
        jaxbModel.ifPresent(model -> {
            createPdf(qrCode, model, output);
        });
    }
}

From source file:ai.grakn.engine.controller.TasksController.java

private TaskState processTask(String className, String createdBy, String runAtTime, String intervalParam,
        String priorityParam) {//from  w  ww.  j a  v a  2  s .  c om
    TaskSchedule schedule;
    TaskState.Priority priority;
    try {
        // Get the schedule of the task
        Optional<Duration> optionalInterval = Optional.ofNullable(intervalParam).map(Long::valueOf)
                .map(Duration::ofMillis);
        Instant time = ofEpochMilli(parseLong(runAtTime));
        schedule = optionalInterval.map(interval -> recurring(time, interval)).orElse(TaskSchedule.at(time));

        // Get the priority of a task (default is low)
        priority = Optional.ofNullable(priorityParam).map(TaskState.Priority::valueOf)
                .orElse(DEFAULT_TASK_PRIORITY);
    } catch (Exception e) {
        throw GraknServerException.serverException(400, e);
    }

    // Get the class of this background task
    Class<?> clazz = getClass(className);
    return TaskState.of(clazz, createdBy, schedule, priority);
}

From source file:nu.yona.server.subscriptions.service.BuddyService.java

private void disconnectBuddyIfConnected(UserAnonymizedDto buddyUserAnonymized, UUID userAnonymizedId) {
    Optional<BuddyAnonymizedDto> buddyAnonymized = buddyUserAnonymized.getBuddyAnonymized(userAnonymizedId);
    buddyAnonymized.map(ba -> buddyAnonymizedRepository.findOne(ba.getId())).ifPresent(bae -> {
        bae.setDisconnected();/*  w ww  .  j  a v  a2 s  .  c o  m*/
        userAnonymizedService.updateUserAnonymized(buddyUserAnonymized.getId());
        // Notice: last status change time will not be set, as we are not able to reach the Buddy entity from here
        // Buddy will be removed anyway the first time the other user logs in
    });
    // Else: user who requested buddy relationship didn't process the accept message yet
}

From source file:net.pkhsolutions.pecsapp.control.ScalingPictureFileStorage.java

@NotNull
public Optional<BufferedImage> loadForLayout(@NotNull PictureDescriptor descriptor, @NotNull PageLayout layout)
        throws IOException {
    @NotNull
    Optional<BufferedImage> image = pictureFileStorage.load(descriptor, Optional.of(layout));
    if (!image.isPresent()) {
        LOGGER.debug("Could not find image for descriptor {} and layout {}, trying to find raw image",
                descriptor, layout);/*from   w w  w.j  ava 2  s .  co m*/
        // Try with the raw image
        image = pictureFileStorage.load(descriptor, Optional.empty());
        if (image.isPresent()) {
            LOGGER.debug("Found raw image, scaling it to layout {} and storing it", layout);
            // We don't have a size for this layout, so let's create one
            image = image.map(img -> pictureTransformer.scaleIfNecessary(img, layout));
            pictureFileStorage.store(descriptor, Optional.of(layout), image.get());
        }
    }
    return image;
}

From source file:natalia.dymnikova.cluster.scheduler.impl.FlowMerger.java

public Stage createMergeStages(final List<Stage> previous, final StageContainer currentStage,
        final Optional<Address> nextAddress) {
    // TODO: doesn't create two mergers in one host
    final List<Stage> children = new ArrayList<>();
    previous.stream().map(Stage::getAddress).distinct().forEach(address -> children.add(Stage.newBuilder()
            .setOperator(currentStage.remoteBytes).setAddress(address).setType(currentStage.stageType)
            .setId(-previous.stream().filter(s -> s.getAddress().equals(address)).findFirst().get().getId())
            .addAllStages(//from   w ww  . jav a2  s  .c o m
                    previous.stream().filter(stage -> stage.getAddress().equals(address)).collect(toList()))
            .build()));

    return nextAddress
            .map(addr -> Stage.newBuilder().setOperator(currentStage.remoteBytes).setAddress(addr.toString())
                    .setType(currentStage.stageType).setId(currentStage.id).addAllStages(children).build())
            .orElseThrow(() -> new NoSuchElementException("No candidate for stage " + currentStage.action));
}

From source file:com.example.app.profile.ui.resource.AbstractProfileResourcePropertyEditor.java

/**
 * Configure this component/* www  .  j ava  2s . c o  m*/
 *
 * @param value the value
 * @param resourceType the resource type
 * @param repo the repo
 * @param owner the owner
 */
protected void configure(@Nullable ResourceRepositoryItem value, @Nullable ResourceType resourceType,
        @Nullable Repository repo, @Nullable Profile owner) {
    Optional<Profile> oOwner = Optional.ofNullable(owner);
    if (value != null && !_repositoryDAO.isTransient(value)) {
        resourceType = value.getResource().getResourceType();
        oOwner = _repositoryDAO.getOwnerOfRepository(_repositoryDAO.getOwnerOfRepositoryItem(value));
    }
    assert resourceType != null : "ResourceType was null.  This should not happen unless the URL params are screwed up.";
    getValueEditor().setResourceType(resourceType);
    repo = oOwner.map(Profile::getRepository).orElse(repo);
    getValueEditor().setOwner(repo);
    final TimeZone tz = Event.getRequest().getTimeZone();
    _currentUser = _userDAO.getAssertedCurrentUser();
    _canEdit = oOwner
            .map(profile -> _profileDAO.canOperate(_currentUser, profile, tz, _mop.viewRepositoryResources()))
            .orElse(false)
            && oOwner.map(profile -> _profileDAO.canOperate(_currentUser, profile, tz,
                    _mop.modifyRepositoryResources())).orElse(false);

    final Repository fRepo = repo;
    _relation = value != null && value.getId() != null && value.getId() > 0
            ? _repositoryDAO.getRelation(repo, value).orElse(null)
            : ((Supplier<RepositoryItemRelation>) () -> {
                RepositoryItemRelation relation = new RepositoryItemRelation();
                relation.setRelationType(RepositoryItemRelationType.owned);
                relation.setRepository(fRepo);
                return relation;
            }).get();

    if (_canEdit && _relation != null) {
        getValueEditor().setValue(value);
        setSaved(value);
    } else {
        throw new IllegalArgumentException("Invalid Permissions To View Page");
    }
}

From source file:com.ikanow.aleph2.graph.titan.services.TitanGraphService.java

@SuppressWarnings("unchecked")
@Override/*from   w  ww .  ja va2  s.c om*/
public <T> Optional<T> getUnderlyingPlatformDriver(final Class<T> driver_class,
        final Optional<String> maybe_driver_options) {

    return Patterns.match(driver_class).<Optional<T>>andReturn()
            .when(clazz -> IEnrichmentBatchModule.class.isAssignableFrom(clazz) && maybe_driver_options
                    .map(driver_opts -> driver_opts
                            .equals("com.ikanow.aleph2.analytics.services.GraphBuilderEnrichmentService"))
                    .orElse(false), __ -> Optional.<T>of((T) new TitanGraphBuilderEnrichmentService()))
            .when(clazz -> TitanGraph.class.isAssignableFrom(clazz), __ -> Optional.<T>of((T) _titan))
            .otherwise(__ -> Optional.empty());
}