Example usage for java.util Optional orElse

List of usage examples for java.util Optional orElse

Introduction

In this page you can find the example usage for java.util Optional orElse.

Prototype

public T orElse(T other) 

Source Link

Document

If a value is present, returns the value, otherwise returns other .

Usage

From source file:com.yahoo.bullet.storm.FilterBoltTest.java

private byte[] getRawPayloadOfNthTuple(int tupleN) {
    // Position 1 is the raw data
    Optional<Object> data = collector.getMthElementFromNthTupleEmittedTo(TopologyConstants.DATA_STREAM, tupleN,
            1);/*ww  w  .  j ava2s .c  om*/
    return (byte[]) data.orElse(null);
}

From source file:com.epam.ta.reportportal.core.widget.impl.GetWidgetHandler.java

/**
 * Load widget content according filter type.
 * //ww w . ja v a 2 s .  c o m
 * @param userFilter
 * @param projectName
 * @param contentOptions
 * @return
 */
Map<String, List<ChartObject>> loadContentByFilterType(Optional<UserFilter> userFilter, String projectName,
        ContentOptions contentOptions) {
    // Log doesn't have any statistics, so currently unable to create any
    // widget with valid content for log
    Map<String, List<ChartObject>> content;
    if (userFilter.isPresent() && Log.class.equals(userFilter.get().getFilter().getTarget())) {
        content = new HashMap<>();
    } else {
        BuildFilterStrategy filterStrategy = buildFilterStrategy
                .get(GadgetTypes.findByName(contentOptions.getGadgetType()).get());
        expect(filterStrategy, notNull()).verify(UNABLE_LOAD_WIDGET_CONTENT,
                Suppliers.formattedSupplier("Unknown gadget type: '{}'.", contentOptions.getGadgetType()));
        content = filterStrategy.buildFilterAndLoadContent(userFilter.orElse(null), contentOptions,
                projectName);
    }
    return content;
}

From source file:org.zanata.page.DswidParamChecker.java

private Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
    if (insideInvoke) {
        return null;
    }/*from   w ww. j a va2s.c om*/
    insideInvoke = true;
    try {
        String url = driver.getCurrentUrl();
        String query = new URL(url).getQuery();
        Optional<String> dswid;
        if (query == null) {
            dswid = Optional.empty();
        } else {
            dswid = URLEncodedUtils.parse(query, UTF_8).stream().filter(p -> p.getName().equals("dswid"))
                    .map(NameValuePair::getValue).findFirst();
        }
        if (checkingDswids && oldDswid != null) {
            assert oldUrl != null;
            if (!dswid.isPresent()) {
                String msg = "missing dswid on transition from " + oldUrl + " to " + url;
                //                    throw new AssertionError(msg);
                log.warn(msg);
            } else {
                if (!oldDswid.equals(dswid.get())) {
                    throw new AssertionError("changed dswid on transition from " + oldUrl + " to " + url);
                }
            }
        }
        oldDswid = dswid.orElse(null);
        oldUrl = url;
        return null;
    } catch (MalformedURLException e) {
        // just ignore this URL entirely
        return null;
    } finally {
        insideInvoke = false;
    }
}

From source file:com.spotify.heroic.metadata.elasticsearch.ElasticsearchMetadataModule.java

@JsonCreator
public ElasticsearchMetadataModule(@JsonProperty("id") Optional<String> id,
        @JsonProperty("groups") Optional<Groups> groups,
        @JsonProperty("connection") Optional<ConnectionModule> connection,
        @JsonProperty("writesPerSecond") Optional<Double> writesPerSecond,
        @JsonProperty("rateLimitSlowStartSeconds") Optional<Long> rateLimitSlowStartSeconds,
        @JsonProperty("writeCacheDurationMinutes") Optional<Long> writeCacheDurationMinutes,
        @JsonProperty("deleteParallelism") Optional<Integer> deleteParallelism,
        @JsonProperty("templateName") Optional<String> templateName,
        @JsonProperty("backendType") Optional<String> backendType,
        @JsonProperty("configure") Optional<Boolean> configure) {
    this.id = id;
    this.groups = groups.orElseGet(Groups::empty).or(DEFAULT_GROUP);
    this.connection = connection.orElseGet(ConnectionModule::buildDefault);
    this.writesPerSecond = writesPerSecond.orElse(DEFAULT_WRITES_PER_SECOND);
    this.rateLimitSlowStartSeconds = rateLimitSlowStartSeconds.orElse(DEFAULT_RATE_LIMIT_SLOW_START_SECONDS);
    this.writeCacheDurationMinutes = writeCacheDurationMinutes.orElse(DEFAULT_WRITE_CACHE_DURATION_MINUTES);
    this.deleteParallelism = deleteParallelism.orElse(DEFAULT_DELETE_PARALLELISM);
    this.templateName = templateName.orElse(DEFAULT_TEMPLATE_NAME);
    this.backendTypeBuilder = backendType.flatMap(bt -> ofNullable(backendTypes.get(bt))).orElse(defaultSetup);
    this.configure = configure.orElse(false);
}

From source file:org.ow2.proactive.connector.iaas.cloud.provider.azure.AzureProvider.java

private VirtualMachine.DefinitionStages.WithWindowsCreateManaged configureWindowsVirtualMachine(
        Azure azureService, String instanceTag, Region region, ResourceGroup resourceGroup,
        InstanceCredentials instanceCredentials, VirtualMachineCustomImage image,
        Creatable<NetworkInterface> creatableNetworkInterface) {
    // Retrieve optional credentials
    Optional<String> optionalUsername = Optional.ofNullable(instanceCredentials)
            .map(InstanceCredentials::getUsername);
    Optional<String> optionalPassword = Optional.ofNullable(instanceCredentials)
            .map(InstanceCredentials::getPassword);

    // Prepare the VM with credentials
    return azureService.virtualMachines().define(instanceTag).withRegion(region)
            .withExistingResourceGroup(resourceGroup).withNewPrimaryNetworkInterface(creatableNetworkInterface)
            .withWindowsCustomImage(image.id()).withAdminUsername(optionalUsername.orElse(DEFAULT_USERNAME))
            .withAdminPassword(optionalPassword.orElse(DEFAULT_PASSWORD));
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.MongoDbCrudService.java

@Override
public CompletableFuture<Boolean> updateObjectBySpec(final QueryComponent<O> unique_spec,
        final Optional<Boolean> upsert, final UpdateComponent<O> update) {
    try {/*  w  w w  .  ja  va 2  s .  com*/
        final Tuple2<DBObject, DBObject> query_and_meta = MongoDbUtils.convertToMongoQuery(unique_spec);
        final DBObject update_object = MongoDbUtils.createUpdateObject(update);

        final WriteResult<O, K> wr = _state.coll.update(query_and_meta._1(), update_object,
                upsert.orElse(false), false);

        return CompletableFuture.completedFuture(wr.getN() > 0);
    } catch (Exception e) {
        return FutureUtils.<Boolean>returnError(e);
    }
}

From source file:org.springframework.web.servlet.mvc.method.annotation.ReactiveTypeHandler.java

/**
 * Process the given reactive return value and decide whether to adapt it
 * to a {@link ResponseBodyEmitter} or a {@link DeferredResult}.
 * @return an emitter for streaming or {@code null} if handled internally
 * with a {@link DeferredResult}.//www. j  a v  a2  s  .  c o m
 */
@Nullable
public ResponseBodyEmitter handleValue(Object returnValue, MethodParameter returnType,
        ModelAndViewContainer mav, NativeWebRequest request) throws Exception {

    Assert.notNull(returnValue, "Expected return value");
    ReactiveAdapter adapter = this.reactiveRegistry.getAdapter(returnValue.getClass());
    Assert.state(adapter != null, "Unexpected return value: " + returnValue);

    ResolvableType elementType = ResolvableType.forMethodParameter(returnType).getGeneric(0);
    Class<?> elementClass = elementType.resolve(Object.class);

    Collection<MediaType> mediaTypes = getMediaTypes(request);
    Optional<MediaType> mediaType = mediaTypes.stream().filter(MimeType::isConcrete).findFirst();

    if (adapter.isMultiValue()) {
        if (mediaTypes.stream().anyMatch(MediaType.TEXT_EVENT_STREAM::includes)
                || ServerSentEvent.class.isAssignableFrom(elementClass)) {
            SseEmitter emitter = new SseEmitter(STREAMING_TIMEOUT_VALUE);
            new SseEmitterSubscriber(emitter, this.taskExecutor).connect(adapter, returnValue);
            return emitter;
        }
        if (CharSequence.class.isAssignableFrom(elementClass)) {
            ResponseBodyEmitter emitter = getEmitter(mediaType.orElse(MediaType.TEXT_PLAIN));
            new TextEmitterSubscriber(emitter, this.taskExecutor).connect(adapter, returnValue);
            return emitter;
        }
        if (mediaTypes.stream().anyMatch(MediaType.APPLICATION_STREAM_JSON::includes)) {
            ResponseBodyEmitter emitter = getEmitter(MediaType.APPLICATION_STREAM_JSON);
            new JsonEmitterSubscriber(emitter, this.taskExecutor).connect(adapter, returnValue);
            return emitter;
        }
    }

    // Not streaming...
    DeferredResult<Object> result = new DeferredResult<>();
    new DeferredResultSubscriber(result, adapter, elementType).connect(adapter, returnValue);
    WebAsyncUtils.getAsyncManager(request).startDeferredResultProcessing(result, mav);

    return null;
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestHdfsDataWriteService.java

public void test_writerService_end2end(Optional<String> secondary, boolean is_transient)
        throws InterruptedException, ExecutionException {
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;
    HfdsDataWriteService<TestBean> write_service = getWriter(
            "/test/writer/end2end/" + secondary.orElse("current") + "/", secondary, is_transient);

    //(Tidy up)//w  w  w  .j  av  a2  s . c o m
    try {
        FileUtils.deleteDirectory(new File(temp_dir + "/data/" + write_service._bucket.full_name()));
    } catch (Exception e) {
    }

    // Check lazy initialization only kicks in once      
    Optional<IBatchSubservice<TestBean>> x = write_service.getBatchWriteSubservice();
    assertEquals(x.get(), write_service._writer.get());
    Optional<IBatchSubservice<TestBean>> y = write_service.getBatchWriteSubservice();
    assertEquals(x.get(), y.get());

    IBatchSubservice<TestBean> batch = x.get();

    // Set up properties for testing:
    batch.setBatchProperties(Optional.of(1000), Optional.of(1000L), Optional.of(Duration.ofSeconds(2L)),
            Optional.of(3));

    Thread.sleep(1000L);
    // Check there are now 3 threads
    assertEquals(3, write_service._writer.get()._state._workers.getActiveCount());

    for (int i = 0; i < 20; ++i) {
        TestBean emit = new TestBean("id" + i, "val" + i);
        if (0 == (i % 2)) {
            if (0 == ((i / 2) % 2)) {
                batch.storeObject(emit);
            } else {
                CompletableFuture<Supplier<Object>> cf = write_service.storeObject(emit);
                assertEquals(null, cf.get().get());
            }
        } else {
            if (0 == ((i / 2) % 2)) {
                batch.storeObjects(Arrays.asList(emit));
            } else {
                CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> cf = write_service
                        .storeObjects(Arrays.asList(emit));
                assertEquals(Collections.emptyList(), cf.get()._1().get());
                assertEquals(1L, cf.get()._2().get().longValue());
            }
        }
    }
    final String infix = is_transient ? IStorageService.TRANSIENT_DATA_SUFFIX_SECONDARY
            : IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY;
    final String infix_name = is_transient ? "testj-testm" : "";

    // Check that initially the files are stored locally
    File init_dir = new File((temp_dir + "/data/" + write_service._bucket.full_name() + infix
            + secondary.orElse("current") + "/" + infix_name + "/.spooldir/").replace("/", File.separator));
    File final_dir = new File((temp_dir + "/data/" + write_service._bucket.full_name() + infix
            + secondary.orElse("current") + "/" + infix_name + "/all_time/").replace("/", File.separator));

    {
        int ii = 1;
        for (; ii <= 50; ++ii) {
            Thread.sleep(250L);
            if (6 == init_dir.list().length) {
                break;
            }
        }
        System.out.println("(exited from file system check after " + ii * 2.5 + " s)");
    }

    assertEquals("Needs to have 6 files, including 3x .crc: " + Arrays.toString(init_dir.list()), 6,
            init_dir.list().length); //*2 because CRC
    assertTrue(
            "Nothing in final dir: " + (final_dir.exists() ? Arrays.toString(final_dir.list()) : "(non-exist)"),
            !final_dir.exists() || final_dir.list().length == 0);

    {
        int ii = 1;
        for (; ii <= 50; ++ii) {
            Thread.sleep(2500L);
            if (0 == init_dir.list().length) {
                break;
            }
        }
        System.out.println("(exited from file system check after " + ii * 2.5 + " s)");
    }

    assertEquals(0, init_dir.list().length); //*2 because CRC
    assertEquals(6, final_dir.list().length); //*2 because CRC      

    // Change batch properties so that will segment (also check number of threads reduces)
    batch.setBatchProperties(Optional.of(10), Optional.of(1000L), Optional.of(Duration.ofSeconds(5L)),
            Optional.of(1));
    List<TestBean> l1 = IntStream.range(0, 8).boxed().map(i -> new TestBean("id" + i, "val" + i))
            .collect(Collectors.toList());
    List<TestBean> l2 = IntStream.range(8, 15).boxed().map(i -> new TestBean("id" + i, "val" + i))
            .collect(Collectors.toList());

    batch.storeObjects(l1);
    Thread.sleep(750L);
    assertEquals(6, final_dir.list().length); //*2 because CRC      
    System.out.println("Found: 6 files: " + Arrays.stream(final_dir.list()).collect(Collectors.joining(";")));

    batch.storeObjects(l2);
    System.out.println("Added 7 more objects at " + new Date());
    for (int jj = 0; jj < 5; ++jj) {
        Thread.sleep(1500L);
        if (final_dir.list().length > 6)
            break;
    }
    System.out.println("(Check init dir cleared: "
            + Arrays.stream(init_dir.list()).collect(Collectors.joining(";")) + ")");
    assertEquals("Should have 8 files: " + Arrays.stream(final_dir.list()).collect(Collectors.joining(";")), 8,
            final_dir.list().length); //*2 because CRC   

    System.out.println("(Deleting datastore and checking it's empty)");
    assertTrue("Deleted datastore: ", write_service.deleteDatastore().get()); // (just quick test since this uses handleBucketDeletion which is tested elsewhere...)
    String[] final_dir_list = Optional.ofNullable(final_dir.list()).orElse(new String[0]);
    assertEquals("Should have 0 files: " + Arrays.stream(final_dir_list).collect(Collectors.joining(";")), 0,
            final_dir_list.length); //*2 because CRC   
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.MongoDbCrudService.java

@Override
public CompletableFuture<Long> updateObjectsBySpec(final QueryComponent<O> spec, final Optional<Boolean> upsert,
        final UpdateComponent<O> update) {
    try {//from  w  w  w. j a  va 2 s .  c  om
        final Tuple2<DBObject, DBObject> query_and_meta = MongoDbUtils.convertToMongoQuery(spec);
        final DBObject update_object = MongoDbUtils.createUpdateObject(update);

        final WriteResult<O, K> wr = _state.coll.update(query_and_meta._1(), update_object,
                upsert.orElse(false), true);

        return CompletableFuture.completedFuture((Long) (long) wr.getN());
    } catch (Exception e) {
        return FutureUtils.<Long>returnError(e);
    }
}

From source file:com.courtalon.gigaMvcGalerie.web.ImageController.java

@RequestMapping(value = "/images/data", method = RequestMethod.POST, produces = "application/json")
@ResponseBody//from w w w  .  j av a  2s.co  m
@JsonView(AssetOnly.class)
public Image upload(@RequestParam("file") MultipartFile file,
        @RequestParam("licenseId") Optional<Integer> licenseId,
        @RequestParam("sourceId") Optional<Integer> sourceId,
        @RequestParam("tagsId") Optional<List<Integer>> tagsId) {
    Image img = null;
    try {
        img = getImageRepository()
                .save(new Image(0, file.getOriginalFilename(), "", new Date(), file.getOriginalFilename(),
                        file.getContentType(), file.getSize(), DigestUtils.md5Hex(file.getInputStream())));

        getImageRepository().saveImageFile(img.getId(), file.getInputStream());
        img.addTag(getTagRepository().findByLibelleAndSystemTag(TagRepository.UPLOADED, true));
        img.setLicense(getLicenseTypeRepository().findOne(licenseId.orElse(LicenseType.NO_LICENSE_ID)));
        img.setSource(getAssetSourceRepository().findOne(sourceId.orElse(AssetSource.UNKOWN_SOURCE_ID)));
        final Image image = img;
        if (tagsId.isPresent()) {
            tagsId.get().forEach(id -> image.addTag(getTagRepository().findByIdAndSystemTag(id, false)));
        }
        getImageRepository().save(img);

    } catch (IOException e) {
        log.error(e);
        throw new HttpClientErrorException(HttpStatus.INTERNAL_SERVER_ERROR, "could not save uploaded image");
    }
    return img;
}