List of usage examples for java.util Optional ifPresent
public void ifPresent(Consumer<? super T> action)
From source file:Main.java
public static void main(String... args) { List<Integer> numbers = Arrays.asList(3, 4, 5, 1, 2); Optional<Integer> min = numbers.stream().reduce(Integer::min); min.ifPresent(System.out::println); }
From source file:Main.java
public static void main(String[] args) { // reduce numbers to their sum Stream<Integer> numbers = Stream.of(3, 5, 7, 9, 11); Optional<Integer> sum = numbers.reduce((x, y) -> x + y); sum.ifPresent(System.out::println); // reduce numbers to their sum with seed numbers = Stream.of(3, 5, 7, 9, 11); Integer sumWithSeed = numbers.reduce(0, (x, y) -> x + y); System.out.println(sumWithSeed); // reduce numbers to their sum with parallel stream Stream<String> words = Stream.of("All", "men", "are", "created", "equal"); Integer lengthOfAllWords = words.reduce(0, (total, word) -> total + word.length(), (total1, total2) -> total1 + total2); System.out.println(lengthOfAllWords); }
From source file:Main.java
public static void main(String[] args) { Optional<String> value = Optional.of("some value"); System.out.println(value.isPresent()); System.out.println(value.get()); String str = null;//w w w. j a v a2 s . c om // Optional.of(str); Optional<Integer> o = Optional.empty(); System.out.println(o.isPresent()); System.out.println(o.orElse(42)); List<Integer> results = new ArrayList<>(); Optional<Integer> second = Optional.of(3); second.ifPresent(results::add); // must operate via side-effects, // unfortunately... System.out.println(results); o = Optional.empty(); System.out.println(o.orElse(42)); o = Optional.of(42); System.out.println(o.get()); o = Optional.empty(); o.get(); }
From source file:Main.java
public static void main(final String[] args) { List<String> stringCollection = new ArrayList<>(); stringCollection.add("ddd2"); stringCollection.add("aaa2"); stringCollection.add("bbb1"); stringCollection.add("aaa1"); stringCollection.add("bbb3"); stringCollection.add("ccc"); stringCollection.add("bbb2"); stringCollection.add("ddd1"); Optional<String> reduced = stringCollection.stream().sorted().reduce((s1, s2) -> s1 + "#" + s2); reduced.ifPresent(System.out::println); }
From source file:Main.java
public static void main(String[] args) throws Exception { List<Dish> menu = Arrays.asList(new Dish("pork", false, 800, Dish.Type.MEAT), new Dish("beef", false, 700, Dish.Type.MEAT), new Dish("chicken", false, 400, Dish.Type.MEAT), new Dish("rice", true, 350, Dish.Type.OTHER), new Dish("pizza", true, 550, Dish.Type.OTHER), new Dish("prawns", false, 400, Dish.Type.FISH), new Dish("salmon", false, 450, Dish.Type.FISH)); // Filtering with predicate Optional<Dish> vegetarianMenu = menu.stream().filter(Dish::isVegetarian).findAny(); vegetarianMenu.ifPresent(d -> System.out.println(d.getName())); }
From source file:Main.java
public static void main(String[] args) { Optional<String> optional = Optional.of("bam"); optional.isPresent(); // true optional.get(); // "bam" optional.orElse("fallback"); // "bam" optional.ifPresent((s) -> System.out.println(s.charAt(0))); // "b" }
From source file:Main.java
public static void main(String[] args) { Optional<String> optional1 = Optional.empty(); Optional<String> optional2 = Optional.of("DEMO"); System.out.println("optional2.get = " + optional2.get()); System.out.println("optional1.orElse = " + optional1.orElse("Something else")); optional2.ifPresent(System.out::println); System.out.println("optional1.isPresent = " + optional1.isPresent()); }
From source file:com.ethercamp.harmony.Application.java
/** * Does one of:/* w ww. ja v a 2 s .c om*/ * - start Harmony peer; * - perform action and exit on completion. */ public static void main(String[] args) throws Exception { final List<String> actions = asList("importBlocks"); final Optional<String> foundAction = asList(args).stream().filter(arg -> actions.contains(arg)).findFirst(); if (foundAction.isPresent()) { foundAction.ifPresent(action -> System.out.println("Performing action: " + action)); Start.main(args); // system is expected to exit after action performed } else { if (!SystemProperties.getDefault().blocksLoader().equals("")) { SystemProperties.getDefault().setSyncEnabled(false); SystemProperties.getDefault().setDiscoveryEnabled(false); } ConfigurableApplicationContext context = SpringApplication.run(new Object[] { Application.class }, args); Ethereum ethereum = context.getBean(Ethereum.class); if (!SystemProperties.getDefault().blocksLoader().equals("")) { ethereum.getBlockLoader().loadBlocks(); } } }
From source file:com.ikanow.aleph2.analytics.spark.assets.SparkPassthroughTopology.java
public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException { try {//from w w w . j a va 2 s .co m final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils .initializeAleph2(args); final IAnalyticsContext context = aleph2_tuple._1(); final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2(); // Optional: make really really sure it exists after the specified timeout SparkTechnologyUtils.registerTestTimeout(test_spec, () -> { System.exit(0); }); //INFO: System.out.println("Starting SparkPassthroughTopology"); SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology"); final Optional<Double> sub_sample = test_spec .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1))) .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1))) .filter(d -> d > 0); //INFO: sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d)); test_spec.ifPresent(spec -> System.out .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString())); //DEBUG //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map) try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) { final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet()); final Optional<JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> input = inputs.values().stream() .reduce((acc1, acc2) -> acc1.union(acc2)); long written = input.map(in -> in.values()) .map(rdd -> sub_sample.map(sample -> rdd.sample(true, sample)).orElse(rdd)).map(rdd -> { return rdd.map(t2 -> { final Validation<BasicMessageBean, JsonNode> ret_val = context.emitObject( Optional.empty(), context.getJob().get(), Either.left(t2._2().getJson()), Optional.empty()); return ret_val; // (doesn't matter what I return, just want to count it up) }) //DEBUG: (print the output JSON on success and the error message on fail) //.map(val -> test_mode ? val.f().bind(f -> Validation.fail("FAIL: " + f.message())) : val) .count(); }).orElse(-1L); jsc.stop(); //INFO: System.out.println("Wrote: data_objects=" + written); } } catch (Throwable t) { System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t)); } }
From source file:com.ikanow.aleph2.analytics.spark.assets.SparkSqlTopology.java
public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException { try {/*from w ww . ja v a 2s . co m*/ final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils .initializeAleph2(args); final IAnalyticsContext context = aleph2_tuple._1(); final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2(); // Optional: make really really sure it exists after the specified timeout SparkTechnologyUtils.registerTestTimeout(test_spec, () -> { System.exit(0); }); final SparkTopologyConfigBean config = BeanTemplateUtils .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()), SparkTopologyConfigBean.class) .get(); final String sql_string = Optional.ofNullable(config.script()).orElse(""); //INFO: System.out.println("Starting SparkSqlTopology"); SparkConf spark_context = new SparkConf().setAppName("SparkSqlTopology"); final Optional<Double> sub_sample = test_spec .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1))) .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1))) .filter(d -> d > 0); //INFO: sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d)); test_spec.ifPresent(spec -> System.out .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString())); System.out.println("OPTIONS: sql = " + sql_string); //DEBUG //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map) try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) { SQLContext sql_context = new SQLContext(jsc); final Multimap<String, DataFrame> inputs = SparkTechnologyUtils.buildBatchSparkSqlInputs(context, test_spec, sql_context, Collections.emptySet()); //INFO System.out.println("Registered tables = " + inputs.keySet().toString()); final DataFrame filtered_df = sql_context.sql(sql_string); final String[] columns = filtered_df.columns(); // (have to do this here because columns() depends on transient code) final long written = filtered_df.javaRDD().map(row -> { final ObjectNode j = _mapper.createObjectNode(); //.put("message", row.toString()); (Don't think we want this now that we're using the columns) for (int ii = 0; ii < row.length(); ++ii) { j.set(columns[ii], _mapper.convertValue(row.get(ii), JsonNode.class)); } return context.emitObject(Optional.empty(), context.getJob().get(), Either.left(j), Optional.empty()); }).count(); //INFO: System.out.println("Wrote: data_objects=" + written); } } catch (Throwable t) { System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t)); } }