List of usage examples for java.util Spliterator ORDERED
int ORDERED
To view the source code for java.util Spliterator ORDERED.
Click Source Link
From source file:com.ikanow.aleph2.analytics.storm.services.StreamingEnrichmentContextService.java
@Override public Validation<BasicMessageBean, JsonNode> emitImmutableObject(final long id, final JsonNode original_json, final Optional<ObjectNode> mutations, final Optional<AnnotationBean> annotations, final Optional<JsonNode> grouping_fields) { if (annotations.isPresent()) { throw new RuntimeException(ErrorUtils.NOT_YET_IMPLEMENTED); }//from w w w .j a v a 2 s. co m if (grouping_fields.isPresent()) { throw new RuntimeException(ErrorUtils.NOT_YET_IMPLEMENTED); } final JsonNode to_emit = mutations.map(o -> StreamSupport.<Map.Entry<String, JsonNode>>stream( Spliterators.spliteratorUnknownSize(o.fields(), Spliterator.ORDERED), false).reduce(original_json, (acc, kv) -> ((ObjectNode) acc).set(kv.getKey(), kv.getValue()), (val1, val2) -> val2)) .orElse(original_json); emitMutableObject(0L, (ObjectNode) to_emit, annotations, Optional.empty()); return Validation.success(to_emit); }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Get a set of field mappings from the "properties" section of a mapping * @param index//ww w. ja v a 2 s . co m * @return */ protected static LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> getProperties( final JsonNode index) { return Optional.ofNullable(index.get("properties")).filter(p -> !p.isNull()).map(p -> { if (!p.isObject()) throw new RuntimeException("properties must be object"); return p; }).map(p -> { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(p.fields(), Spliterator.ORDERED), false) .map(kv -> { if (!kv.getValue().has("type") && !kv.getValue().has("properties")) throw new RuntimeException(SearchIndexErrorUtils .get("field {0} must have a 'type' or 'properties' sub-field", kv.getKey())); return kv; }).collect( Collectors.<Map.Entry<String, JsonNode>, Either<String, Tuple2<String, String>>, JsonNode, LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>>toMap( kv -> Either.<String, Tuple2<String, String>>left(kv.getKey()), kv -> kv.getValue(), (v1, v2) -> v1, // (should never happen) () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); }).orElse(new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>()); }
From source file:ubicrypt.core.Utils.java
public static <T> Stream<T> toStream(final Iterator<T> iterator) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); }
From source file:com.simiacryptus.util.Util.java
/** * To iterator stream./*from ww w . ja v a 2 s. c om*/ * * @param <T> the type parameter * @param iterator the iterator * @return the stream */ public static <T> Stream<T> toIterator(@javax.annotation.Nonnull final Iterator<T> iterator) { return StreamSupport.stream(Spliterators.spliterator(iterator, 1, Spliterator.ORDERED), false); }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Get a set of field mappings from the "dynamic_templates" section of a mapping * @param index/*from ww w .j ava 2 s .c o m*/ * @return */ protected static LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> getTemplates( final JsonNode index, final JsonNode default_string_mapping, final Set<Either<String, Tuple2<String, String>>> already_processed) { return Optional.ofNullable(index.get("dynamic_templates")).filter(p -> !p.isNull()).map(p -> { if (!p.isArray()) throw new RuntimeException("dynamic_templates must be object"); return p; }).map(p -> { return StreamSupport .stream(Spliterators.spliteratorUnknownSize(p.elements(), Spliterator.ORDERED), false) .map(pf -> { if (!pf.isObject()) throw new RuntimeException("dynamic_templates[*] must be object"); return pf; }) .flatMap(pp -> StreamSupport .stream(Spliterators.spliteratorUnknownSize(pp.fields(), Spliterator.ORDERED), false)) .filter(kv -> !kv.getKey().equals(STRING_OVERRIDE_NAME) || !already_processed.contains(Either.right(Tuples._2T("*", "string")))) // (don't override a specified string) .map(kv -> !kv.getKey().equals(STRING_OVERRIDE_NAME) ? kv : Maps.immutableEntry(kv.getKey(), default_string_mapping)) //(special case - overwrite with system default) .collect( Collectors.<Map.Entry<String, JsonNode>, Either<String, Tuple2<String, String>>, JsonNode, LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>>toMap( kv -> Either.right(buildMatchPair(kv.getValue())), kv -> kv.getValue(), (v1, v2) -> v1, // (should never happen) () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); }).orElse(new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>()); }
From source file:com.simiacryptus.util.Util.java
/** * To stream stream.// ww w .j a v a2 s .c om * * @param <T> the type parameter * @param iterator the iterator * @param size the size * @param parallel the parallel * @return the stream */ public static <T> Stream<T> toStream(@javax.annotation.Nonnull final Iterator<T> iterator, final int size, final boolean parallel) { return StreamSupport.stream(Spliterators.spliterator(iterator, size, Spliterator.ORDERED), parallel); }
From source file:enumj.Enumerator.java
/** * Returns a sequential {@code Spliterator} iterating over the current * enumerator.//from w ww . j ava2 s . com * * @return the new {@link Spliterator}. * @see #asStream() */ public default Spliterator<E> asSpliterator() { Checks.ensureNonEnumerating(this); return Spliterators.spliteratorUnknownSize(this, Spliterator.ORDERED); }
From source file:com.simiacryptus.mindseye.lang.Tensor.java
/** * Coord stream stream.//ww w.j a v a2 s . co m * * @param parallel the safe * @return the stream */ @Nonnull public Stream<Coordinate> coordStream(boolean parallel) { //ConcurrentHashSet<Object> distinctBuffer = new ConcurrentHashSet<>(); //assert distinctBuffer.add(coordinate.copy()) : String.format("Duplicate: %s in %s", coordinate, distinctBuffer); return StreamSupport.stream(Spliterators.spliterator(new Iterator<Coordinate>() { int cnt = 0; @Nonnull Coordinate coordinate = new Coordinate(); @Nonnull int[] val = new int[dimensions.length]; @Nonnull int[] safeCopy = new int[dimensions.length]; @Override public boolean hasNext() { return cnt < length(); } @Nonnull @Override public synchronized Coordinate next() { if (0 < cnt) { for (int i = 0; i < val.length; i++) { if (++val[i] >= dimensions[i]) { val[i] = 0; } else { break; } } } System.arraycopy(val, 0, safeCopy, 0, val.length); coordinate.setIndex(cnt++); coordinate.setCoords(safeCopy); return parallel ? coordinate.copy() : coordinate; } }, length(), Spliterator.ORDERED), parallel); }
From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraAssignmentsRepository.java
@Override @Timed//from w w w. j a v a 2 s .c om public Assignment getAssignment(User.ID userID, Application.Name appName, Experiment.ID experimentID, Context context) { ListenableFuture<Result<ExperimentUserByUserIdContextAppNameExperimentId>> resultFuture = experimentUserIndexAccessor .asyncSelectBy(userID.toString(), appName.toString(), experimentID.getRawID(), context.toString()); Result<ExperimentUserByUserIdContextAppNameExperimentId> assignmentResult = UninterruptibleUtil .getUninterruptibly(resultFuture); Stream<ExperimentUserByUserIdContextAppNameExperimentId> assignmentResultStream = StreamSupport.stream( Spliterators.spliteratorUnknownSize(assignmentResult.iterator(), Spliterator.ORDERED), false); final Stream<Assignment.Builder> assignmentBuilderStream = assignmentResultStream.map(t -> { Assignment.Builder builder = Assignment.newInstance(Experiment.ID.valueOf(t.getExperimentId())) .withUserID(User.ID.valueOf(t.getUserId())).withContext(Context.valueOf(t.getContext())); if (nonNull(t.getBucket()) && !t.getBucket().trim().isEmpty()) { builder.withBucketLabel(Bucket.Label.valueOf(t.getBucket())); } return builder; }); Optional<Assignment> assignmentOptional = getAssignmentFromStream(experimentID, userID, context, assignmentBuilderStream); return assignmentOptional.isPresent() ? assignmentOptional.get() : null; }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_Buckets.java
/** Builds a V2 bucket out of a V1 source * @param src_json// w w w. ja v a2s . c o m * @return * @throws JsonParseException * @throws JsonMappingException * @throws IOException * @throws ParseException */ protected static DataBucketBean getBucketFromV1Source(final JsonNode src_json) throws JsonParseException, JsonMappingException, IOException, ParseException { // (think we'll use key instead of _id): //final String _id = safeJsonGet(JsonUtils._ID, src_json).asText(); final String key = safeJsonGet("key", src_json).asText(); final String created = safeJsonGet("created", src_json).asText(); final String modified = safeJsonGet("modified", src_json).asText(); final String title = safeJsonGet("title", src_json).asText(); final String description = safeJsonGet("description", src_json).asText(); final String owner_id = safeJsonGet("ownerId", src_json).asText(); final JsonNode tags = safeJsonGet("tags", src_json); // collection of strings //final JsonNode comm_ids = safeJsonGet("communityIds", src_json); // collection of strings final JsonNode px_pipeline = safeJsonGet("processingPipeline", src_json); // collection of JSON objects, first one should have data_bucket final JsonNode px_pipeline_first_el = ((ObjectNode) px_pipeline.get(0)) .without(Arrays.asList("test_params")); final JsonNode data_bucket_tmp = safeJsonGet("data_bucket", px_pipeline_first_el);// (WARNING: mutable, see below) final JsonNode scripting = safeJsonGet("scripting", data_bucket_tmp); // HANDLE SUBSTITUTION final String sub_prefix = Optional.ofNullable(scripting.get("sub_prefix")).map(x -> x.asText()) .orElse("$$SCRIPT_"); final String sub_suffix = Optional.ofNullable(scripting.get("sub_suffix")).map(x -> x.asText()) .orElse("$$"); final List<UnaryOperator<String>> search_replace = StreamSupport .stream(Spliterators.spliteratorUnknownSize(scripting.fieldNames(), Spliterator.ORDERED), false) .filter(f -> !f.equals("sub_prefix") && !f.equals("sub_suffix")) // (remove non language fields) .map(lang -> Tuples._2T(scripting.get(lang), lang)) // Get (separator regex, entire script, sub prefix) .map(scriptobj_lang -> Tuples._3T(safeJsonGet("separator_regex", scriptobj_lang._1()).asText(), safeJsonGet("script", scriptobj_lang._1()).asText(), sub_prefix + scriptobj_lang._2())) // Split each "entire script" up into blocks of format (bloc, lang) .<Stream<Tuple2<String, String>>>map(regex_script_lang -> Stream.concat( Stream.of(Tuples._2T(regex_script_lang._2(), regex_script_lang._3())), regex_script_lang._1().isEmpty() ? Stream.of(Tuples._2T(regex_script_lang._2(), regex_script_lang._3())) : Arrays.stream(regex_script_lang._2().split(regex_script_lang._1())) .<Tuple2<String, String>>map(s -> Tuples._2T(s, regex_script_lang._3())))) // Associate a per-lang index with each script block -> (replacement, string_sub) .<Tuple2<String, String>>flatMap(stream -> StreamUtils.zip(stream, Stream.iterate(0, i -> i + 1), (script_lang, i) -> Tuples._2T( script_lang._1().replace("\"", "\\\"").replace("\n", "\\n").replace("\r", "\\r"), i == 0 ? script_lang._2() + sub_suffix // (entire thing) : script_lang._2() + "_" + i + sub_suffix))) //(broken down components) .<UnaryOperator<String>>map(t2 -> (String s) -> s.replace(t2._2(), t2._1())) //(need to escape "s and newlines) .collect(Collectors.toList()); // Apply the list of transforms to the string ((ObjectNode) data_bucket_tmp).remove("scripting"); // (WARNING: mutable) final String data_bucket_str = search_replace.stream().reduce(data_bucket_tmp.toString(), (acc, s) -> s.apply(acc), (acc1, acc2) -> acc1); // Convert back to the bucket JSON final JsonNode data_bucket = ((ObjectNode) _mapper.readTree(data_bucket_str)) .without(Arrays.asList("test_params")); final DataBucketBean bucket = BeanTemplateUtils.build(data_bucket, DataBucketBean.class) .with(DataBucketBean::_id, getBucketIdFromV1SourceKey(key)) .with(DataBucketBean::created, parseJavaDate(created)) .with(DataBucketBean::modified, parseJavaDate(modified)).with(DataBucketBean::display_name, title) .with(DataBucketBean::description, description).with(DataBucketBean::owner_id, owner_id) .with(DataBucketBean::tags, StreamSupport.stream(tags.spliterator(), false).map(jt -> jt.asText()) .collect(Collectors.toSet())) .done().get(); return bucket; }