List of usage examples for java.util Optional map
public <U> Optional<U> map(Function<? super T, ? extends U> mapper)
From source file:com.ikanow.aleph2.analytics.spark.assets.SparkPassthroughTopology.java
public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException { try {/*from ww w . jav a 2s. c o m*/ final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils .initializeAleph2(args); final IAnalyticsContext context = aleph2_tuple._1(); final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2(); // Optional: make really really sure it exists after the specified timeout SparkTechnologyUtils.registerTestTimeout(test_spec, () -> { System.exit(0); }); //INFO: System.out.println("Starting SparkPassthroughTopology"); SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology"); final Optional<Double> sub_sample = test_spec .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1))) .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1))) .filter(d -> d > 0); //INFO: sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d)); test_spec.ifPresent(spec -> System.out .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString())); //DEBUG //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map) try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) { final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet()); final Optional<JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> input = inputs.values().stream() .reduce((acc1, acc2) -> acc1.union(acc2)); long written = input.map(in -> in.values()) .map(rdd -> sub_sample.map(sample -> rdd.sample(true, sample)).orElse(rdd)).map(rdd -> { return rdd.map(t2 -> { final Validation<BasicMessageBean, JsonNode> ret_val = context.emitObject( Optional.empty(), context.getJob().get(), Either.left(t2._2().getJson()), Optional.empty()); return ret_val; // (doesn't matter what I return, just want to count it up) }) //DEBUG: (print the output JSON on success and the error message on fail) //.map(val -> test_mode ? val.f().bind(f -> Validation.fail("FAIL: " + f.message())) : val) .count(); }).orElse(-1L); jsc.stop(); //INFO: System.out.println("Wrote: data_objects=" + written); } } catch (Throwable t) { System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t)); } }
From source file:com.ikanow.aleph2.analytics.spark.assets.SparkSqlTopology.java
public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException { try {// w w w . j a v a2 s . c o m final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils .initializeAleph2(args); final IAnalyticsContext context = aleph2_tuple._1(); final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2(); // Optional: make really really sure it exists after the specified timeout SparkTechnologyUtils.registerTestTimeout(test_spec, () -> { System.exit(0); }); final SparkTopologyConfigBean config = BeanTemplateUtils .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()), SparkTopologyConfigBean.class) .get(); final String sql_string = Optional.ofNullable(config.script()).orElse(""); //INFO: System.out.println("Starting SparkSqlTopology"); SparkConf spark_context = new SparkConf().setAppName("SparkSqlTopology"); final Optional<Double> sub_sample = test_spec .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1))) .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1))) .filter(d -> d > 0); //INFO: sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d)); test_spec.ifPresent(spec -> System.out .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString())); System.out.println("OPTIONS: sql = " + sql_string); //DEBUG //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map) try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) { SQLContext sql_context = new SQLContext(jsc); final Multimap<String, DataFrame> inputs = SparkTechnologyUtils.buildBatchSparkSqlInputs(context, test_spec, sql_context, Collections.emptySet()); //INFO System.out.println("Registered tables = " + inputs.keySet().toString()); final DataFrame filtered_df = sql_context.sql(sql_string); final String[] columns = filtered_df.columns(); // (have to do this here because columns() depends on transient code) final long written = filtered_df.javaRDD().map(row -> { final ObjectNode j = _mapper.createObjectNode(); //.put("message", row.toString()); (Don't think we want this now that we're using the columns) for (int ii = 0; ii < row.length(); ++ii) { j.set(columns[ii], _mapper.convertValue(row.get(ii), JsonNode.class)); } return context.emitObject(Optional.empty(), context.getJob().get(), Either.left(j), Optional.empty()); }).count(); //INFO: System.out.println("Wrote: data_objects=" + written); } } catch (Throwable t) { System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t)); } }
From source file:com.ikanow.aleph2.enrichment.utils.services.JsScriptEngineTestService.java
/** Entry point * @param args// w w w. j av a2s. co m * @throws IOException */ public static void main(String[] args) throws IOException { if (args.length < 3) { System.out .println("ARGS: <script-file> <input-file> <output-prefix> [{[len: <LEN>], [group: <GROUP>]}]"); } // STEP 1: load script file final String user_script = Files.toString(new File(args[0]), Charsets.UTF_8); // STEP 2: get a stream for the JSON file final InputStream io_stream = new FileInputStream(new File(args[1])); // STEP 3: set up control if applicable Optional<JsonNode> json = Optional.of("").filter(__ -> args.length > 3).map(__ -> args[3]) .map(Lambdas.wrap_u(j -> _mapper.readTree(j))); // STEP 4: set up the various objects final DataBucketBean bucket = Mockito.mock(DataBucketBean.class); final JsScriptEngineService service_under_test = new JsScriptEngineService(); final LinkedList<ObjectNode> emitted = new LinkedList<>(); final LinkedList<JsonNode> grouped = new LinkedList<>(); final LinkedList<JsonNode> externally_emitted = new LinkedList<>(); final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class, new Answer<Void>() { @SuppressWarnings("unchecked") public Void answer(InvocationOnMock invocation) { try { Object[] args = invocation.getArguments(); if (invocation.getMethod().getName().equals("emitMutableObject")) { final Optional<JsonNode> grouping = (Optional<JsonNode>) args[3]; if (grouping.isPresent()) { grouped.add(grouping.get()); } emitted.add((ObjectNode) args[1]); } else if (invocation.getMethod().getName().equals("externalEmit")) { final DataBucketBean to = (DataBucketBean) args[0]; final Either<JsonNode, Map<String, Object>> out = (Either<JsonNode, Map<String, Object>>) args[1]; externally_emitted .add(((ObjectNode) out.left().value()).put("__a2_bucket", to.full_name())); } } catch (Exception e) { e.printStackTrace(); } return null; } }); final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class) .with(EnrichmentControlMetadataBean::config, new LinkedHashMap<String, Object>( ImmutableMap.<String, Object>builder().put("script", user_script).build())) .done().get(); service_under_test.onStageInitialize(context, bucket, control, Tuples._2T(ProcessingStage.batch, ProcessingStage.grouping), Optional.empty()); final BeJsonParser json_parser = new BeJsonParser(); // Run the file through final Stream<Tuple2<Long, IBatchRecord>> json_stream = StreamUtils .takeUntil(Stream.generate(() -> json_parser.getNextRecord(io_stream)), i -> null == i) .map(j -> Tuples._2T(0L, new BatchRecord(j))); service_under_test.onObjectBatch(json_stream, json.map(j -> j.get("len")).map(j -> (int) j.asLong(0L)), json.map(j -> j.get("group"))); System.out.println("RESULTS: "); System.out.println("emitted: " + emitted.size()); System.out.println("grouped: " + grouped.size()); System.out.println("externally emitted: " + externally_emitted.size()); Files.write(emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "emit.json"), Charsets.UTF_8); Files.write(grouped.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "group.json"), Charsets.UTF_8); Files.write(externally_emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "external_emit.json"), Charsets.UTF_8); }
From source file:io.github.jhipster.web.util.ResponseUtil.java
/** * Wrap the optional into a {@link ResponseEntity} with an {@link HttpStatus#OK} status with the headers, * or if it's empty, it returns a {@link ResponseEntity} with {@link HttpStatus#NOT_FOUND}. * * @param <X> type of the response * @param maybeResponse response to return if present * @param header headers to be added to the response * @return response containing {@code maybeResponse} if present or {@link HttpStatus#NOT_FOUND} */// w w w .jav a 2s .c o m public static <X> ResponseEntity<X> wrapOrNotFound(Optional<X> maybeResponse, HttpHeaders header) { return maybeResponse.map(response -> ResponseEntity.ok().headers(header).body(response)) .orElse(new ResponseEntity<>(HttpStatus.NOT_FOUND)); }
From source file:info.archinnov.achilles.internals.strategy.naming.InternalNamingStrategy.java
static InternalNamingStrategy inferNamingStrategy(Optional<Strategy> strategy, InternalNamingStrategy defaultStrategy) { return strategy.map(x -> getNamingStrategy(x.naming())).orElse(defaultStrategy); }
From source file:com.github.horrorho.inflatabledonkey.file.FileStreamWriter.java
static InputStream decryptStream(InputStream in, Optional<XFileKey> keyCipher) { return keyCipher.map(kc -> decryptStream(in, kc)).orElse(in); }
From source file:com.github.horrorho.inflatabledonkey.file.FileStreamWriter.java
static boolean testSignature(Digest digest, Optional<byte[]> signature) { return signature.map(c -> testSignature(digest, c)).orElseGet(() -> { byte[] out = signature(digest); logger.debug("-- testSignature() - signature: 0x{}", Hex.toHexString(out)); return true; });/* ww w. jav a 2 s.c o m*/ }
From source file:io.github.carlomicieli.footballdb.starter.parsers.PlayerProfileParser.java
protected static String extractCollege(Optional<String> str) { return str.map(val -> { Matcher matcher = patternMatchString(collegePattern(), val); if (matcher.find()) { return matcher.group(1); }//from w ww .j a v a 2s . c om return null; }).orElse(null); }
From source file:io.github.carlomicieli.footballdb.starter.parsers.PlayerProfileParser.java
protected static String extractExp(Optional<String> str) { return str.map(val -> { Matcher matcher = patternMatchString(expPattern(), val); if (matcher.find()) { return matcher.group(1); }/* ww w.j av a 2 s. c om*/ return null; }).orElse(null); }
From source file:io.github.carlomicieli.footballdb.starter.parsers.PlayerProfileParser.java
protected static Map<String, String> extractBirth(Optional<String> str) { return str.map(val -> { Matcher matcher = patternMatchString(birthPattern(), val); Map<String, String> v = newMap(); if (matcher.find()) { v.put("birth_date", matcher.group(1)); v.put("city", matcher.group(2)); v.put("state", matcher.group(3)); }//from w w w. j ava2 s . c o m return unmodifiableMap(v); }).orElse(Collections.emptyMap()); }