Example usage for java.util Optional of

List of usage examples for java.util Optional of

Introduction

In this page you can find the example usage for java.util Optional of.

Prototype

public static <T> Optional<T> of(T value) 

Source Link

Document

Returns an Optional describing the given non- null value.

Usage

From source file:com.uber.hoodie.common.util.TestCompactionUtils.java

@Test
public void testBuildFromFileSlice() {
    // Empty File-Slice with no data and log files
    FileSlice emptyFileSlice = new FileSlice("000", "empty1");
    HoodieCompactionOperation op = CompactionUtils.buildFromFileSlice(DEFAULT_PARTITION_PATHS[0],
            emptyFileSlice, Optional.of(metricsCaptureFn));
    testFileSliceCompactionOpEquality(emptyFileSlice, op, DEFAULT_PARTITION_PATHS[0]);

    // File Slice with data-file but no log files
    FileSlice noLogFileSlice = new FileSlice("000", "noLog1");
    noLogFileSlice.setDataFile(new TestHoodieDataFile("/tmp/noLog.parquet"));
    op = CompactionUtils.buildFromFileSlice(DEFAULT_PARTITION_PATHS[0], noLogFileSlice,
            Optional.of(metricsCaptureFn));
    testFileSliceCompactionOpEquality(noLogFileSlice, op, DEFAULT_PARTITION_PATHS[0]);

    //File Slice with no data-file but log files present
    FileSlice noDataFileSlice = new FileSlice("000", "noData1");
    noDataFileSlice/*from  w w  w.  j  av a  2 s.com*/
            .addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 1))));
    noDataFileSlice
            .addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 2))));
    op = CompactionUtils.buildFromFileSlice(DEFAULT_PARTITION_PATHS[0], noDataFileSlice,
            Optional.of(metricsCaptureFn));
    testFileSliceCompactionOpEquality(noDataFileSlice, op, DEFAULT_PARTITION_PATHS[0]);

    //File Slice with data-file and log files present
    FileSlice fileSlice = new FileSlice("000", "noData1");
    fileSlice.setDataFile(new TestHoodieDataFile("/tmp/noLog.parquet"));
    fileSlice.addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 1))));
    fileSlice.addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 2))));
    op = CompactionUtils.buildFromFileSlice(DEFAULT_PARTITION_PATHS[0], fileSlice,
            Optional.of(metricsCaptureFn));
    testFileSliceCompactionOpEquality(fileSlice, op, DEFAULT_PARTITION_PATHS[0]);
}

From source file:enmasse.controller.api.v3.amqp.AmqpFlavorsApiTest.java

@Test(expected = IllegalArgumentException.class)
public void testGetUnknown() throws IOException {
    doRequest("GET", "", Optional.of("unknown"));
}

From source file:org.lendingclub.mercator.aws.VPCScanner.java

@Override
public Optional<String> computeArn(JsonNode n) {

    String region = n.get(AWSScanner.AWS_REGION_ATTRIBUTE).asText();

    return Optional.of(String.format("arn:aws:ec2:%s:%s:vpc/%s", region,
            n.get(AccountScanner.ACCOUNT_ATTRIBUTE).asText(), n.get("aws_vpcId").asText()));
}

From source file:org.lendingclub.mercator.aws.NetworkInterfaceScanner.java

@Override
public Optional<String> computeArn(JsonNode n) {
    return Optional.of(createEc2Arn("network-interface", n.path("aws_networkInterfaceId").asText()));
}

From source file:org.openmhealth.shim.googlefit.mapper.GoogleFitPhysicalActivityDataPointMapper.java

/**
 * Maps a JSON response node from the Google Fit API to a {@link PhysicalActivity} measure.
 *
 * @param listNode an individual datapoint from the array in the Google Fit response
 * @return a {@link DataPoint} object containing a {@link PhysicalActivity} measure with the appropriate values from
 * the JSON node parameter, wrapped as an {@link Optional}
 *///from   ww w . j a v  a2 s.co  m
@Override
protected Optional<DataPoint<PhysicalActivity>> asDataPoint(JsonNode listNode) {

    JsonNode listValueNode = asRequiredNode(listNode, "value");
    long activityTypeId = asRequiredLong(listValueNode.get(0), "intVal");

    // This means that the activity was actually sleep, which should be captured using sleep duration, or
    // stationary, which should not be captured as it is the absence of activity
    if (sleepActivityTypes.contains((int) activityTypeId)
            || stationaryActivityTypes.contains((int) activityTypeId)) {

        return Optional.empty();
    }

    String activityName = googleFitDataTypes.get((int) activityTypeId);
    PhysicalActivity.Builder physicalActivityBuilder = new PhysicalActivity.Builder(activityName);
    setEffectiveTimeFrameIfPresent(physicalActivityBuilder, listNode);
    PhysicalActivity physicalActivity = physicalActivityBuilder.build();
    Optional<String> originSourceId = asOptionalString(listNode, "originDataSourceId");
    return Optional.of(newDataPoint(physicalActivity, originSourceId.orElse(null)));

}

From source file:com.formkiq.core.service.propertystore.PropertyStoreDatabase.java

@Override
public Optional<Certificate> retrieveCertificate() throws CertificateException {

    Optional<Certificate> result = Optional.empty();
    String certstring = this.propertyService.getProperty(null, CERTIFICATE_KEY);

    if (!isEmpty(certstring)) {

        CertificateFactory cf = CertificateFactory.getInstance("X.509");
        result = Optional.of(cf.generateCertificate(new ByteArrayInputStream(Strings.getBytes(certstring))));
    }// w  w  w  .  j  a  va  2  s.  c o  m

    return result;
}

From source file:com.ikanow.aleph2.storm.samples.topology.JavaScriptTopology2.java

@Override
public Tuple2<Object, Map<String, String>> getTopologyAndConfiguration(DataBucketBean bucket,
        IEnrichmentModuleContext context) {
    TopologyBuilder builder = new TopologyBuilder();

    String contextSignature = context.getEnrichmentContextSignature(Optional.of(bucket), Optional.empty());

    builder.setSpout("timer", new TimerSpout(3000L));
    JavaScriptProviderBean providerBean = BeanTemplateUtils
            .from(bucket.streaming_enrichment_topology().config(), JavaScriptProviderBean.class).get();
    if (providerBean == null) {
        providerBean = new JavaScriptProviderBean();
    }//w  ww. j  a  v a  2 s.c  om
    if (null == providerBean.getGlobalScript()) {
        providerBean.setGlobalScript(
                new PropertyBasedScriptProvider("/com/ikanow/aleph2/storm/samples/script/js/scripts.properties")
                        .getGlobalScript());
    }
    BeanBasedScriptProvider mapperScriptProvider = new BeanBasedScriptProvider(providerBean);
    BeanBasedScriptProvider folderScriptProvider = new BeanBasedScriptProvider(providerBean);

    final Collection<Tuple2<BaseRichSpout, String>> entry_points = context
            .getTopologyEntryPoints(BaseRichSpout.class, Optional.of(bucket));
    entry_points.forEach(spout_name -> builder.setSpout(spout_name._2(), spout_name._1()));
    entry_points.stream().reduce(
            builder.setBolt("mapperBolt", new JavaScriptMapperBolt(contextSignature, mapperScriptProvider)),
            (acc, v) -> acc.shuffleGrouping(v._2()), (acc1, acc2) -> acc1 // (not possible in practice)
    );
    builder.setBolt("folderBolt", new JavaScriptFolderBolt(contextSignature, folderScriptProvider))
            .shuffleGrouping("mapperBolt").shuffleGrouping("timer");
    builder.setBolt("out", context.getTopologyStorageEndpoint(BaseRichBolt.class, Optional.of(bucket)))
            .localOrShuffleGrouping("folderBolt");
    return new Tuple2<Object, Map<String, String>>(builder.createTopology(), new HashMap<String, String>());
}

From source file:io.syndesis.model.integration.Integration.java

@JsonIgnore
default Optional<IntegrationStatus> getStatus() {
    Optional<IntegrationRevision> deployedRevision = getDeployedRevision();

    return Optional.of(new IntegrationStatus.Builder()
            .currentState(//  www.ja va 2  s . c  o m
                    deployedRevision.map(r -> r.getCurrentState()).orElse(IntegrationRevisionState.Pending))
            .build());
}

From source file:com.teradata.tempto.fulfillment.table.hive.HiveTableDefinition.java

public static HiveTableDefinition hiveTableDefinition(TableHandle handle, String createTableDDLTemplate,
        HiveDataSource dataSource) {/*from w  ww.j  a  v a 2s  .co m*/
    return new HiveTableDefinition(handle, createTableDDLTemplate, Optional.of(dataSource), Optional.empty());
}

From source file:org.lendingclub.mercator.aws.KinesisScanner.java

@Override
public Optional<Double> getDefaultRateLimitPerSecond() {
    // kinesis describe operations are heavily rate limited on the AWS side
    return Optional.of(1d);
}