Example usage for java.util Optional ifPresent

List of usage examples for java.util Optional ifPresent

Introduction

In this page you can find the example usage for java.util Optional ifPresent.

Prototype

public void ifPresent(Consumer<? super T> action) 

Source Link

Document

If a value is present, performs the given action with the value, otherwise does nothing.

Usage

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkJsInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> bucket_logger = new SetOnce<>();
    final SetOnce<String> job_name = new SetOnce<>(); // (the string we'll use in logging activities)
    try {/*from   ww w .  j  a  v  a  2 s. c  om*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        bucket_logger.set(context.getLogger(context.getBucket()));
        job_name.set(context.getJob().map(j -> j.name()).orElse("no_name"));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();

        final String js_script = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting " + job_name.get());

        SparkConf spark_context = new SparkConf().setAppName(job_name.get());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());
            final JavaPairRDD<Object, Tuple2<Long, IBatchRecord>> all_inputs = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2)).orElse(null);

            // Load globals:
            ScriptEngineManager manager = new ScriptEngineManager();
            ScriptEngine engine = manager.getEngineByName("JavaScript");
            engine.put("_a2_global_context", context);
            engine.put("_a2_global_bucket", context.getBucket().get());
            engine.put("_a2_global_job", context.getJob().get());
            engine.put("_a2_global_config",
                    BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(config, JsonNode.class));
            engine.put("_a2_global_mapper", BeanTemplateUtils.configureMapper(Optional.empty()));
            //TODO (until bucket logger is serializable, don't allow anywhere)
            //engine.put("_a2_bucket_logger", bucket_logger.optional().orElse(null));
            engine.put("_a2_enrichment_name", job_name.get());
            engine.put("_a2_spark_inputs", inputs);
            engine.put("_a2_spark_inputs_all", all_inputs);
            engine.put("_a2_spark_context", jsc);

            Stream.concat(config.uploaded_lang_files().stream(),
                    Stream.of("aleph2_sparkjs_globals_before.js", ""))
                    .flatMap(Lambdas.flatWrap_i(import_path -> {
                        try {
                            if (import_path.equals("")) { // also import the user script just before here
                                return js_script;
                            } else
                                return IOUtils.toString(SparkJsInterpreterTopology.class.getClassLoader()
                                        .getResourceAsStream(import_path), "UTF-8");
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (script {1}): {2}",
                                                            job_name.get(), import_path, e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    })).forEach(Lambdas.wrap_consumer_i(script -> {
                        try {
                            engine.eval(script);
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (main script): {1}",
                                                            job_name.get(), e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    }));
            ;

            jsc.stop();

            //INFO:
            System.out.println("Finished " + job_name.get());
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));

        bucket_logger.optional().ifPresent(l -> l.log(Level.ERROR, ErrorUtils.lazyBuildMessage(false,
                () -> SparkJsInterpreterTopology.class.getSimpleName()
                        + job_name.optional().map(j -> "." + j).orElse(""),
                () -> job_name.optional().orElse("global") + ".main", () -> null,
                () -> ErrorUtils.get("Error on batch in job {0}: {1}",
                        job_name.optional().orElse("global") + ".main", t.getMessage()),
                () -> ImmutableMap.<String, Object>of("full_error", ErrorUtils.getLongForm("{0}", t)))));
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {// w  w w.  ja v  a2 s . c  om
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:Main.java

/**
 * @return transform the element into an arraylist containing the element, or an empty ArrayList if the element is empty
 *///www  .  j  a va 2 s.  co  m
@SafeVarargs
public static <E> List<E> asArrayList(Optional<E>... elements) {
    List<E> newArrayList = new ArrayList<>();
    for (Optional<E> element : elements) {
        element.ifPresent(newArrayList::add);
    }
    return newArrayList;
}

From source file:com.github.blindpirate.gogradle.util.CollectionUtils.java

public static <T> List<T> collectOptional(Optional<T>... optionals) {
    List<T> ret = new ArrayList<>();
    for (Optional<T> optional : optionals) {
        optional.ifPresent(ret::add);
    }/*  www .j a va  2s . c om*/
    return ret;
}

From source file:fi.hsl.parkandride.core.domain.PredictionResult.java

public static List<PredictionResult> from(Optional<PredictionBatch> batch) {
    List<PredictionResult> results = new ArrayList<>();
    batch.ifPresent(pb -> results.addAll(from(pb)));
    return results;
}

From source file:net.hamnaberg.json.Error.java

public static Error create(Optional<String> title, Optional<String> code, Optional<String> message) {
    final ObjectNode obj = JsonNodeFactory.instance.objectNode();
    title.ifPresent(value -> obj.put("title", value));
    code.ifPresent(value -> obj.put("code", value));
    message.ifPresent(value -> obj.put("message", value));
    return new Error(obj);
}

From source file:io.devcon5.pageobjects.tx.TransactionHelper.java

/**
 * Adds transaction support to the page. The transaction support captures execution time of methods annotated with
 * {@link io.devcon5.pageobjects.tx.Transaction}
 *
 * @param <T>//www . j av a  2 s.co  m
 *
 * @param transactionSupport
 *         the transactionSupport element to be enhanced.
 * @return
 */
@SuppressWarnings("unchecked")
public static <T extends TransactionSupport> T addTransactionSupport(TransactionSupport transactionSupport) {
    return (T) Enhancer.create(transactionSupport.getClass(),
            (MethodInterceptor) (obj, method, args, proxy) -> {
                final Optional<String> txName = getTxName(transactionSupport, method);
                try {
                    txName.ifPresent(transactionSupport::txBegin);
                    Object result = method.invoke(transactionSupport, args);
                    //dynamically enhance return values, if they are transactionSupport and not yet enhanced
                    //this is required, i.e. if method return 'this' or create new objects which will
                    //not be enhanced
                    if (!isCGLibProxy(result) && result instanceof TransactionSupport) {
                        result = addTransactionSupport(transactionSupport);
                    }
                    return result;
                } finally {
                    txName.ifPresent(transactionSupport::txEnd);
                }
            });
}

From source file:edu.jhu.hlt.concrete.stanford.ConcreteStanfordRunner.java

public static void prepareInputOutput(Path in, Path out) throws IOException {
    if (!Files.exists(in))
        throw new IOException(in.toString() + " does not exist. Ensure it exists and re-run this program.");

    Optional<Path> outPath = Optional.ofNullable(out.getParent());
    outPath.ifPresent(p -> {
        if (!Files.exists(p)) {
            LOGGER.debug("Attempting to create output directory: {}", outPath.toString());
            try {
                Files.createDirectories(p);
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }/*from   w w  w.  ja  v  a2 s.co m*/
        }
    });
}

From source file:net.hamnaberg.json.Property.java

public static Property value(String name, Optional<String> prompt, Optional<Value> value) {
    ObjectNode node = makeObject(name, prompt);
    value.ifPresent(val -> node.set("value", val.asJson()));
    return new Property(node);
}

From source file:gov.ca.cwds.cals.util.PlacementHomeUtil.java

private static StringBuilder composeFirstPartOfFacilityName(ApplicantDTO applicant) {
    StringBuilder sb = new StringBuilder();
    Optional<String> lastName = Optional.ofNullable(applicant.getLastName());
    Optional<String> firstName = Optional.ofNullable(applicant.getFirstName());
    lastName.ifPresent(ln -> {
        sb.append(ln);/* w  w w  . java  2 s . c  om*/
        if (firstName.isPresent()) {
            sb.append(", ");
        }
    });
    firstName.ifPresent(sb::append);
    return sb;
}