List of usage examples for java.util Collection stream
default Stream<E> stream()
From source file:de.bund.bfr.knime.gis.views.canvas.CanvasUtils.java
public static List<HighlightCondition> createCategorialHighlighting(Collection<? extends Element> elements, String property) {// ww w.j a v a 2 s.c o m Set<Object> categories = elements.stream().map(e -> e.getProperties().get(property)) .filter(Objects::nonNull).collect(Collectors.toCollection(LinkedHashSet::new)); List<HighlightCondition> conditions = new ArrayList<>(); int index = 0; for (Object category : KnimeUtils.ORDERING.sortedCopy(categories)) { Color color = COLORS[index++ % COLORS.length]; LogicalHighlightCondition condition = new LogicalHighlightCondition(property, LogicalHighlightCondition.Type.EQUAL, category.toString()); conditions.add(new AndOrHighlightCondition(condition, property + " = " + category, true, color, false, false, null, null)); } return conditions; }
From source file:com.samsung.sjs.theorysolver.TheorySolver.java
public static <Constraint, Model> void enumerateFixingSets(FixingSetFinder<Constraint> fixingSetFinder, Theory<Constraint, Model> theorySolver, Collection<Constraint> hardConstraints, Collection<Constraint> softConstraints, FixingSetListener<Constraint, Model> listener) { Collection<Constraint> constraints = new ArrayList<>(); Collection<Constraint> core = new ArrayList<>(); Collection<Constraint> fixingSet = new LinkedHashSet<>(); for (;;) {// w w w . j a va 2 s. c o m if (fixingSetFinder.currentFixingSet(fixingSet, listener) == FixingSetListener.Action.STOP) { return; } constraints.addAll(hardConstraints); softConstraints.stream().filter(c -> !fixingSet.contains(c)).forEach(constraints::add); Either<Model, Collection<Constraint>> result = theorySolver.check(constraints); if (result.left != null) { if (listener.onFixingSet(result.left, fixingSet) == FixingSetListener.Action.STOP) { return; } fixingSetFinder.addCore( constraints.stream().filter(softConstraints::contains).collect(Collectors.toList())); } else { result.right.stream().filter(softConstraints::contains).forEach(core::add); if (listener.onCore(core) == FixingSetListener.Action.STOP) { return; } assert core.stream().allMatch(c -> !fixingSet.contains(c)); fixingSetFinder.addCore(core); } core.clear(); constraints.clear(); fixingSet.clear(); } }
From source file:com.evolveum.midpoint.prism.PrismReferenceValue.java
@NotNull public static List<Referencable> asReferencables(@NotNull Collection<PrismReferenceValue> values) { return values.stream().map(prv -> prv.asReferencable()).collect(Collectors.toList()); }
From source file:com.evolveum.midpoint.prism.PrismReferenceValue.java
@NotNull public static List<PrismReferenceValue> asReferenceValues( @NotNull Collection<? extends Referencable> referencables) { return referencables.stream().map(ref -> ref.asReferenceValue()).collect(Collectors.toList()); }
From source file:com.evolveum.midpoint.prism.PrismReferenceValue.java
public static boolean containsOid(Collection<PrismReferenceValue> values, @NotNull String oid) { return values.stream().anyMatch(v -> oid.equals(v.getOid())); }
From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java
/** Validate the job * @param new_analytic_bucket/*w w w . j ava 2 s .c o m*/ * @param jobs * @return */ public static BasicMessageBean validateJobs(final DataBucketBean new_analytic_bucket, final Collection<AnalyticThreadJobBean> jobs) { //TODO (ALEPH-63): validate batch enrichment final LinkedList<String> mutable_errs = new LinkedList<>(); jobs.stream().forEach(job -> { if (null == job.config()) { mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM, new_analytic_bucket.full_name(), job.name(), "config")); } else { final SparkTopologyConfigBean config = BeanTemplateUtils .from(job.config(), SparkTopologyConfigBean.class).get(); if (SparkType.jvm == Optional.ofNullable(config.language()).orElse(SparkType.jvm)) { // JVM validation if (null == config.entry_point()) { mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM, new_analytic_bucket.full_name(), job.name(), "config.entry_point")); } } else if (SparkType.python == Optional.ofNullable(config.language()).orElse(SparkType.jvm)) { // JVM validation if ((null == config.entry_point()) && (null == config.script())) { mutable_errs.push(ErrorUtils.get(SparkErrorUtils.MISSING_PARAM, new_analytic_bucket.full_name(), job.name(), "config.entry_point|config.script")); } } } }); return ErrorUtils.buildMessage(mutable_errs.isEmpty(), SparkTechnologyUtils.class, "validateJobs", mutable_errs.stream().collect(Collectors.joining(";"))); }
From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_Buckets.java
/** Takes a collection of results from the management side-channel, and uses it to update a harvest node * @param key - source key / bucket id//from w w w . j a v a 2s. c o m * @param status_messages * @param source_db */ protected static CompletableFuture<Boolean> updateV1SourceStatus(final Date main_date, final String key, final Collection<BasicMessageBean> status_messages, final boolean set_approved_state, final ICrudService<JsonNode> source_db) { final String message_block = status_messages.stream().map(msg -> { return "[" + msg.date() + "] " + msg.source() + " (" + msg.command() + "): " + (msg.success() ? "INFO" : "ERROR") + ": " + msg.message(); }).collect(Collectors.joining("\n")); final boolean any_errors = status_messages.stream().anyMatch(msg -> !msg.success()); @SuppressWarnings("deprecation") final CommonUpdateComponent<JsonNode> update_1 = CrudUtils.update() .set("harvest.harvest_status", (any_errors ? "error" : "success")) .set("harvest.harvest_message", "[" + main_date.toGMTString() + "] Bucket synchronization:\n" + (message_block.isEmpty() ? "(no messages)" : message_block)); final UpdateComponent<JsonNode> update = set_approved_state ? update_1.set("isApproved", !any_errors) : update_1; final SingleQueryComponent<JsonNode> v1_query = CrudUtils.allOf().when("key", key); final CompletableFuture<Boolean> update_res = source_db.updateObjectBySpec(v1_query, Optional.empty(), update); return update_res; }
From source file:de.bund.bfr.knime.gis.views.canvas.CanvasUtils.java
public static <V extends Node> void applyNodeHighlights(RenderContext<V, Edge<V>> renderContext, Collection<V> nodes, HighlightConditionList nodeHighlightConditions, int nodeSize, Integer nodeMaxSize, String metaNodeProperty) { HighlightResult<V> result = getResult(nodes, nodeHighlightConditions); Set<V> metaNodes = nodes.stream().filter(n -> Boolean.TRUE.equals(n.getProperties().get(metaNodeProperty))) .collect(Collectors.toCollection(LinkedHashSet::new)); renderContext.setVertexShapeTransformer( JungUtils.newNodeShapeTransformer(nodeSize, nodeMaxSize, result.thicknessValues, result.shapes)); renderContext.setVertexFillPaintTransformer(JungUtils.newNodeFillTransformer(renderContext, result.colors)); renderContext.setVertexLabelTransformer(node -> result.labels.get(node)); renderContext.setVertexStrokeTransformer(JungUtils.newNodeStrokeTransformer(renderContext, metaNodes)); }
From source file:com.ikanow.aleph2.analytics.spark.services.EnrichmentPipelineService.java
/** Selects a set of enrichment/transform pipeline elements from the current job, and creates a service out of them * @param aleph2_context/*from ww w . j a v a 2 s. c o m*/ * @param names * @return */ public static EnrichmentPipelineService select(final IAnalyticsContext aleph2_context, boolean emit_when_done, final Collection<String> names) { return new EnrichmentPipelineService(aleph2_context, emit_when_done, names.stream().toArray(String[]::new)); }
From source file:org.eclipse.winery.repository.Utils.java
public static boolean containsNodeTypes(TServiceTemplate serviceTemplate, Collection<QName> nodeTypes) { return nodeTypes.stream().allMatch(nodeType -> Utils.containsNodeType(serviceTemplate, nodeType)); }