List of usage examples for com.google.common.collect Sets cartesianProduct
public static <B> Set<List<B>> cartesianProduct(Set<? extends B>... sets)
From source file:com.facebook.presto.cassandra.CassandraSplitManager.java
private static Set<List<Object>> getPartitionKeysSet(CassandraTable table, TupleDomain<ColumnHandle> tupleDomain) { ImmutableList.Builder<Set<Object>> partitionColumnValues = ImmutableList.builder(); for (CassandraColumnHandle columnHandle : table.getPartitionKeyColumns()) { Domain domain = tupleDomain.getDomains().get().get(columnHandle); // if there is no constraint on a partition key, return an empty set if (domain == null) { return ImmutableSet.of(); }//from w ww .j av a 2 s. c o m // todo does cassandra allow null partition keys? if (domain.isNullAllowed()) { return ImmutableSet.of(); } Set<Object> values = domain.getValues().getValuesProcessor().transform(ranges -> { ImmutableSet.Builder<Object> columnValues = ImmutableSet.builder(); for (Range range : ranges.getOrderedRanges()) { // if the range is not a single value, we can not perform partition pruning if (!range.isSingleValue()) { return ImmutableSet.of(); } Object value = range.getSingleValue(); CassandraType valueType = columnHandle.getCassandraType(); columnValues.add(valueType.validatePartitionKey(value)); } return columnValues.build(); }, discreteValues -> { if (discreteValues.isWhiteList()) { return ImmutableSet.copyOf(discreteValues.getValues()); } return ImmutableSet.of(); }, allOrNone -> ImmutableSet.of()); partitionColumnValues.add(values); } return Sets.cartesianProduct(partitionColumnValues.build()); }
From source file:org.sosy_lab.cpachecker.util.predicates.RCNFManager.java
private BooleanFormula expandClause(final BooleanFormula input) { return bfmgr.visit(input, new DefaultBooleanFormulaVisitor<BooleanFormula>() { @Override//from www .j a v a 2s . com protected BooleanFormula visitDefault() { return input; } @Override public BooleanFormula visitOr(List<BooleanFormula> operands) { long sizeAfterExpansion = 1; List<Set<BooleanFormula>> asConjunctions = new ArrayList<>(); for (BooleanFormula op : operands) { Set<BooleanFormula> out = bfmgr.toConjunctionArgs(op, true); try { sizeAfterExpansion = LongMath.checkedMultiply(sizeAfterExpansion, out.size()); } catch (ArithmeticException ex) { sizeAfterExpansion = expansionResultSizeLimit + 1; break; } asConjunctions.add(out); } if (sizeAfterExpansion <= expansionResultSizeLimit) { // Perform recursive expansion. Set<List<BooleanFormula>> product = Sets.cartesianProduct(asConjunctions); Set<BooleanFormula> newArgs = new HashSet<>(product.size()); for (List<BooleanFormula> l : product) { newArgs.add(bfmgr.or(l)); } return bfmgr.and(newArgs); } else { return bfmgr.or(operands); } } }); }
From source file:com.ge.research.semtk.sparqlX.parallel.SparqlParallelQueries.java
public JSONObject returnFusedResults() throws Throwable, Exception { // Make sure that all the queries returned successfully, else rethrow whatever exception occurred for (SparqlSubquery subquery : subqueries) { Throwable t = subquery.getException(); if (t != null) { throw t; }/*ww w . j a v a2 s. c o m*/ } // Collect all the vars from the subqueries into a multiset so we will know which vars are unique and won't need to be renamed HashMultiset<String> multiSet = HashMultiset.create(); for (SparqlSubquery subquery : subqueries) { ArrayList<String> columnsInResponse = subquery.getColumnNamesInResponse(); multiSet.addAll(columnsInResponse); } // Build unique var names from the columns to return and the subqueries' suffixes JSONArray vars = new JSONArray(); Set<String> distinctVars = new LinkedHashSet<>(); for (SparqlSubquery subquery : subqueries) { String suffix = subquery.getResultsColumnNameSuffix(); ArrayList<String> columnsInResponse = subquery.getColumnNamesInResponse(); for (String column : columnsToReturn) { if (columnsInResponse.contains(column)) { if (multiSet.count(column) == 1) { distinctVars.add(column); } else if (columnsToFuseOn.contains(column)) { distinctVars.add(column); } else { distinctVars.add(column + suffix); } } } } for (String var : distinctVars) { vars.add(var); } // Collect the distinct bindings from the subqueries into sets by the columns to fuse on. Map<String/*fusedCols*/, Map<String/*suffix*/, Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>>>> fusedColsToBindings = new LinkedHashMap<>(); StringBuilder sb = new StringBuilder(); for (SparqlSubquery subquery : subqueries) { // Get the subquery's bindings String suffix = subquery.getResultsColumnNameSuffix(); Table resultsTable = subquery.getResponseTable(); if ((resultsTable != null) && (resultsTable.getNumRows() != 0)) { System.out.println("Query " + suffix + " has " + resultsTable.getNumRows() + " rows"); ArrayList<String> columnsInResponse = subquery.getColumnNamesInResponse(); for (ArrayList<String> row : resultsTable.getRows()) { // Compose the lookup key from the columns to fuse on sb.setLength(0); for (String column : columnsToFuseOn) { if (columnsInResponse.contains(column)) { String value = (String) row.get(resultsTable.getColumnIndex(column)); if (sb.length() > 0) sb.append("\t"); sb.append(value); } } String fusedCols = sb.toString(); // Add the binding to the set of distinct bindings for the same fusedCols and same suffix Map<String/*suffix*/, Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>>> suffixToBindings = fusedColsToBindings .get(fusedCols); if (suffixToBindings == null) { suffixToBindings = new LinkedHashMap<>(); fusedColsToBindings.put(fusedCols, suffixToBindings); } Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>> distinctBindings = suffixToBindings .get(suffix); if (distinctBindings == null) { distinctBindings = new LinkedHashSet<>(); suffixToBindings.put(suffix, distinctBindings); } distinctBindings.add(new Pair<>(subquery, row)); } } else { System.out.println("Query " + suffix + " has 0 rows"); } } //System.out.println ("XXXXXXXXXXXXXXXXXXXXXXXXXXX GOT HERE XXXXXXXXXXXXXXXXXXXXX"); // Build the array of output bindings with unique var names int numOutputBindings = 0; //JsonArrayBuilder outputBindings = Json.createArrayBuilder(); JSONArray outputBindings = new JSONArray(); ArrayList<ArrayList<String>> outputRows = new ArrayList<ArrayList<String>>(); ArrayList<String> outputColumnNames = new ArrayList<String>(); for (Map<String/*suffix*/, Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>>> suffixToBindings : fusedColsToBindings .values()) { // Collect each subquery's set of distinct bindings for the same fusedCols List<Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>>> fusedColsBindings = new ArrayList<>(); for (SparqlSubquery subquery : subqueries) { String suffix = subquery.getResultsColumnNameSuffix(); Set<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>> distinctBindings = suffixToBindings .get(suffix); if (fusedColsBindings != null && distinctBindings != null) { fusedColsBindings.add(distinctBindings); } else if (!isSubqueryOptional) { // Don't output any bindings for this particular fusedCols fusedColsBindings = null; } } if (fusedColsBindings != null) { // Iterate over every possible list that can be formed by choosing one element from each of the above sets in order Set<List<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>>> allFusedColsBindings = Sets .cartesianProduct(fusedColsBindings); for (List<Pair<SparqlSubquery/*subquery*/, ArrayList<String>/*binding*/>> fusedColsBinding : allFusedColsBindings) { // Create the new output binding //JsonObjectBuilder outputBinding = Json.createObjectBuilder(); ArrayList<String> outputRow = new ArrayList<String>(); String addedCols = ","; for (Pair<SparqlSubquery/*subquery*/, ArrayList<String>> rowPair : fusedColsBinding) { // Add each var to the output binding, uniquified by the subquery's suffix when needed String suffix = rowPair.getFirst().getResultsColumnNameSuffix(); ArrayList<String> columnsInResponse = rowPair.getFirst().getColumnNamesInResponse(); for (String column : columnsToReturn) { if (columnsInResponse.contains(column)) { if (addedCols.indexOf("," + column + ",") >= 0) continue; String var = rowPair.getSecond() .get(rowPair.getFirst().getResponseTable().getColumnIndex(column)); if (multiSet.count(column) == 1) { outputRow.add(var); addedCols += column + ","; if (numOutputBindings == 0) outputColumnNames.add(column); } else if (columnsToFuseOn.contains(column)) { outputRow.add(var); addedCols += column + ","; if (numOutputBindings == 0) outputColumnNames.add(column); } else { outputRow.add(var); addedCols += column + suffix + ","; if (numOutputBindings == 0) outputColumnNames.add(column + suffix); } } } } outputRows.add(outputRow); numOutputBindings++; } } } String[] outputColumnNamesArray = (String[]) outputColumnNames.toArray(new String[0]); String[] outputColumnTypes = new String[outputColumnNamesArray.length]; for (int i = 0; i < outputColumnTypes.length; i++) outputColumnTypes[i] = "String"; Table outputTable = new Table(outputColumnNamesArray, outputColumnTypes, outputRows); System.out.println("Fused response has " + numOutputBindings + " results"); gResultTable = new TableResultSet(true); // System.out.println(outputTable.toCSVString()); gResultTable.addResults(outputTable); return gResultTable.toJson(); }
From source file:org.sosy_lab.cpachecker.util.templates.TemplatePrecision.java
/** * Generate all linear expressions of size up to {@code maxExpressionSize} * with coefficients in {@code allowedCoefficients}, * over the variables returned by {@link #getVarsForNode(CFANode)}. *//*from w ww . jav a2 s.co m*/ private Set<Template> generateTemplates(final CFANode node) { Set<ASimpleDeclaration> varsForNode = getVarsForNode(node); Set<CIdExpression> vars = varsForNode.stream().filter(this::shouldProcessVariable) .map(d -> new CIdExpression(FileLocation.DUMMY, (CSimpleDeclaration) d)) .collect(Collectors.toSet()); int maxLength = Math.min(maxExpressionSize, vars.size()); allowedCoefficients = allowedCoefficients.stream().filter(x -> !x.equals(Rational.ZERO)) .collect(Collectors.toSet()); // Copy the {@code vars} multiple times for the cartesian product. List<Set<CIdExpression>> lists = Collections.nCopies(maxLength, vars); // All lists of size {@code maxExpressionSize}. Set<List<CIdExpression>> product = Sets.cartesianProduct(lists); // Eliminate duplicates, and ensure that all combinations are unique. // As a by-product, produces the expressions of all sizes less than // {@code maxExpressionSize} as well. Set<Set<CIdExpression>> combinations = product.stream().map(HashSet<CIdExpression>::new) .collect(Collectors.toSet()); Set<Template> returned = new HashSet<>(); for (Set<CIdExpression> variables : combinations) { // For of every variable: instantiate with every coefficient. List<List<LinearExpression<CIdExpression>>> out = variables .stream().map(x -> allowedCoefficients.stream() .map(coeff -> LinearExpression.monomial(x, coeff)).collect(Collectors.toList())) .collect(Collectors.toList()); // Convert to a list of all possible linear expressions. List<LinearExpression<CIdExpression>> linearExpressions = Lists.cartesianProduct(out).stream() .map(list -> list.stream().reduce(LinearExpression.empty(), LinearExpression::add)) .collect(Collectors.toList()); Set<Template> generated = filterToSameType(filterRedundantExpressions(linearExpressions)).stream() .filter(t -> !t.isEmpty()).map(Template::of).collect(Collectors.toSet()); returned.addAll(generated); } if (generateDifferences) { returned.addAll(generateDifferenceTemplates(vars)); } return returned; }
From source file:org.eclipse.gemoc.execution.engine.coordinator.commons.HeterogeneousEngine.java
/** * TODO: this only work for two coordinated engines... not sure how to generalize that * //from w w w. ja v a 2s . co m * @return * @throws SimulationException */ public ArrayList<HeterogeneousLogicalStep> computeHeterogeneousLogicalStep() throws SimulationException { ArrayList<HeterogeneousLogicalStep> res = new ArrayList<HeterogeneousLogicalStep>(); // // IConcurrentExecutionEngine engine0 = _coordinatedEngines.get(0); // IConcurrentExecutionEngine engine1 = _coordinatedEngines.get(1); // ArrayList<Set<ExtendedLogicalStep>> allLogicalSteps = new ArrayList<Set<ExtendedLogicalStep>>( _coordinatedEngines.size()); for (int i = 0; i < _coordinatedEngines.size(); i++) { IConcurrentExecutionEngine engine = _coordinatedEngines.get(i); List<ExtendedLogicalStep> possibleSteps = extendLogicalSteps(engine.getPossibleLogicalSteps(), i); Set<ExtendedLogicalStep> engineLogicalSteps = new HashSet<ExtendedLogicalStep>(possibleSteps); allLogicalSteps.add(engineLogicalSteps); } Set<List<ExtendedLogicalStep>> cartesianProductOfLogicalSteps = Sets.cartesianProduct(allLogicalSteps); for (List<ExtendedLogicalStep> steps : cartesianProductOfLogicalSteps) { for (int i = 0; i < steps.size(); i++) { ExtendedLogicalStep stepToAdd = steps.get(i); addConstraintsFromOneStepOfOneEngine(stepToAdd.solverIndex, stepToAdd); } if (_coordinationSolver.getSolverWrapper().getSolver().hasSolution()) { HeterogeneousLogicalStep theStepsUsed = new HeterogeneousLogicalStep(); theStepsUsed.logicalSteps.addAll(steps); res.add(theStepsUsed); } else { // System.out.println("incompatible couple of LS: "+logicalStepNumberIn0+" ; "+logicalStepNumberIn1); } _coordinationSolver.revertForceClockEffect(); } // // // List<LogicalStep> logicalStepsOf0 = engine0.getPossibleLogicalSteps(); // logicalStepsOf0.add(emptyLogicalStep); //to allow stalling/stuttering an engine // List<LogicalStep> logicalStepsOf1 = engine1.getPossibleLogicalSteps(); // logicalStepsOf1.add(emptyLogicalStep); //to allow stalling/stuttering an engine // // for(int logicalStepNumberIn0 = 0; logicalStepNumberIn0 < logicalStepsOf0.size(); logicalStepNumberIn0++ ){ // for(int logicalStepNumberIn1 = 0; logicalStepNumberIn1 < logicalStepsOf1.size(); logicalStepNumberIn1++ ){ // addConstraintsFromOneStepOfOneEngine(0, logicalStepsOf0.get(logicalStepNumberIn0)); // addConstraintsFromOneStepOfOneEngine(1, logicalStepsOf1.get(logicalStepNumberIn1)); // // if (_coordinationSolver.hasSolution()){ // HeterogeneousLogicalStep theStepsUsed = new HeterogeneousLogicalStep(); // theStepsUsed.logicalSteps.add(logicalStepsOf0.get(logicalStepNumberIn0)); // theStepsUsed.stepPositions.add(logicalStepNumberIn0); // theStepsUsed.logicalSteps.add(logicalStepsOf1.get(logicalStepNumberIn1)); // theStepsUsed.stepPositions.add(logicalStepNumberIn1); // res.add(theStepsUsed); // }else{ //// System.out.println("incompatible couple of LS: "+logicalStepNumberIn0+" ; "+logicalStepNumberIn1); // } // _coordinationSolver.revertForceClockEffect(); // } // } return res; }
From source file:org.tensorics.core.tensor.Positions.java
private static Iterable<Position> cartesianProduct(List<Set<?>> coordinateSets) { Set<List<Object>> cartesianProduct = Sets.cartesianProduct(ImmutableList.copyOf(coordinateSets)); return cartesianProduct.stream().map(l -> Position.of(new HashSet<>(l))).collect(toSet()); }
From source file:org.apache.flink.compiler.dag.SingleInputNode.java
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq) { final PlanNode inputSource = in.getSource(); for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) { boolean validCombination = true; boolean requiresPipelinebreaker = false; // check whether the broadcast inputs use the same plan candidate at the branching point for (int i = 0; i < broadcastChannelsCombination.size(); i++) { NamedChannel nc = broadcastChannelsCombination.get(i); PlanNode bcSource = nc.getSource(); // check branch compatibility against input if (!areBranchCompatible(bcSource, inputSource)) { validCombination = false; break; }/*from w w w . ja v a2s. c o m*/ // check branch compatibility against all other broadcast variables for (int k = 0; k < i; k++) { PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource(); if (!areBranchCompatible(bcSource, otherBcSource)) { validCombination = false; break; } } // check if there is a common predecessor and whether there is a dam on the way to all common predecessors if (this.hereJoinedBranches != null) { for (OptimizerNode brancher : this.hereJoinedBranches) { PlanNode candAtBrancher = in.getSource().getCandidateAtBranchPoint(brancher); if (candAtBrancher == null) { // closed branch between two broadcast variables continue; } SourceAndDamReport res = in.getSource().hasDamOnPathDownTo(candAtBrancher); if (res == NOT_FOUND) { throw new CompilerException("Bug: Tracing dams for deadlock detection is broken."); } else if (res == FOUND_SOURCE) { requiresPipelinebreaker = true; break; } else if (res == FOUND_SOURCE_AND_DAM) { // good } else { throw new CompilerException(); } } } } if (!validCombination) { continue; } if (requiresPipelinebreaker) { in.setTempMode(in.getTempMode().makePipelineBreaker()); } final SingleInputPlanNode node = dps.instantiate(in, this); node.setBroadcastInputs(broadcastChannelsCombination); // compute how the strategy affects the properties GlobalProperties gProps = in.getGlobalProperties().clone(); LocalProperties lProps = in.getLocalProperties().clone(); gProps = dps.computeGlobalProperties(gProps); lProps = dps.computeLocalProperties(lProps); // filter by the user code field copies gProps = gProps.filterByNodesConstantSet(this, 0); lProps = lProps.filterByNodesConstantSet(this, 0); // apply node.initProperties(gProps, lProps); node.updatePropertiesWithUniqueSets(getUniqueFields()); target.add(node); } }
From source file:eu.stratosphere.compiler.dag.SingleInputNode.java
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq) { final PlanNode inputSource = in.getSource(); for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) { boolean validCombination = true; // check whether the broadcast inputs use the same plan candidate at the branching point for (int i = 0; i < broadcastChannelsCombination.size(); i++) { NamedChannel nc = broadcastChannelsCombination.get(i); PlanNode bcSource = nc.getSource(); // check branch compatibility against input if (!areBranchCompatible(bcSource, inputSource)) { validCombination = false; break; }/*w w w.java2 s . co m*/ // check branch compatibility against all other broadcast variables for (int k = 0; k < i; k++) { PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource(); if (!areBranchCompatible(bcSource, otherBcSource)) { validCombination = false; break; } } } if (!validCombination) { continue; } final SingleInputPlanNode node = dps.instantiate(in, this); node.setBroadcastInputs(broadcastChannelsCombination); // compute how the strategy affects the properties GlobalProperties gProps = in.getGlobalProperties().clone(); LocalProperties lProps = in.getLocalProperties().clone(); gProps = dps.computeGlobalProperties(gProps); lProps = dps.computeLocalProperties(lProps); // filter by the user code field copies gProps = gProps.filterByNodesConstantSet(this, 0); lProps = lProps.filterByNodesConstantSet(this, 0); // apply node.initProperties(gProps, lProps); node.updatePropertiesWithUniqueSets(getUniqueFields()); target.add(node); } }
From source file:com.google.devtools.build.lib.packages.ImplicitOutputsFunction.java
/** * Substitutes attribute-placeholders in a template string, producing all possible combinations. * * @param template the template string, may contain named placeholders for rule attributes, like * <code>%{name}</code> or <code>%{deps}</code> * @param rule the rule whose attributes the placeholders correspond to * @param placeholdersInTemplate if specified, will contain all placeholders found in the * template; may contain duplicates/*ww w . jav a2 s.c o m*/ * @return all possible combinations of the attributes referenced by the placeholders, * substituted into the template; empty if any of the placeholders expands to no values */ public static ImmutableList<String> substitutePlaceholderIntoTemplate(String template, AttributeMap rule, AttributeValueGetter attributeGetter, @Nullable List<String> placeholdersInTemplate) { List<String> placeholders = (placeholdersInTemplate == null) ? Lists.<String>newArrayList() : placeholdersInTemplate; String formatStr = createPlaceholderSubstitutionFormatString(template, placeholders); if (placeholders.isEmpty()) { return ImmutableList.of(template); } List<Set<String>> values = Lists.newArrayListWithCapacity(placeholders.size()); for (String placeholder : placeholders) { Set<String> attrValues = attributeGetter.get(rule, placeholder); if (attrValues.isEmpty()) { return ImmutableList.<String>of(); } values.add(attrValues); } ImmutableList.Builder<String> out = new ImmutableList.Builder<>(); for (List<String> combination : Sets.cartesianProduct(values)) { out.add(String.format(formatStr, combination.toArray())); } return out.build(); }
From source file:org.apache.flink.optimizer.dag.SingleInputNode.java
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq) { final PlanNode inputSource = in.getSource(); for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) { boolean validCombination = true; boolean requiresPipelinebreaker = false; // check whether the broadcast inputs use the same plan candidate at the branching point for (int i = 0; i < broadcastChannelsCombination.size(); i++) { NamedChannel nc = broadcastChannelsCombination.get(i); PlanNode bcSource = nc.getSource(); // check branch compatibility against input if (!areBranchCompatible(bcSource, inputSource)) { validCombination = false; break; }/* w ww . j ava 2 s.c o m*/ // check branch compatibility against all other broadcast variables for (int k = 0; k < i; k++) { PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource(); if (!areBranchCompatible(bcSource, otherBcSource)) { validCombination = false; break; } } // check if there is a common predecessor and whether there is a dam on the way to all common predecessors if (in.isOnDynamicPath() && this.hereJoinedBranches != null) { for (OptimizerNode brancher : this.hereJoinedBranches) { PlanNode candAtBrancher = in.getSource().getCandidateAtBranchPoint(brancher); if (candAtBrancher == null) { // closed branch between two broadcast variables continue; } SourceAndDamReport res = in.getSource().hasDamOnPathDownTo(candAtBrancher); if (res == NOT_FOUND) { throw new CompilerException("Bug: Tracing dams for deadlock detection is broken."); } else if (res == FOUND_SOURCE) { requiresPipelinebreaker = true; break; } else if (res == FOUND_SOURCE_AND_DAM) { // good } else { throw new CompilerException(); } } } } if (!validCombination) { continue; } if (requiresPipelinebreaker) { in.setTempMode(in.getTempMode().makePipelineBreaker()); } final SingleInputPlanNode node = dps.instantiate(in, this); node.setBroadcastInputs(broadcastChannelsCombination); // compute how the strategy affects the properties GlobalProperties gProps = in.getGlobalProperties().clone(); LocalProperties lProps = in.getLocalProperties().clone(); gProps = dps.computeGlobalProperties(gProps); lProps = dps.computeLocalProperties(lProps); // filter by the user code field copies gProps = gProps.filterBySemanticProperties(getSemanticPropertiesForGlobalPropertyFiltering(), 0); lProps = lProps.filterBySemanticProperties(getSemanticPropertiesForLocalPropertyFiltering(), 0); // apply node.initProperties(gProps, lProps); node.updatePropertiesWithUniqueSets(getUniqueFields()); target.add(node); } }