List of usage examples for com.google.common.collect Sets cartesianProduct
public static <B> Set<List<B>> cartesianProduct(Set<? extends B>... sets)
From source file:io.prestosql.plugin.cassandra.CassandraPartitionManager.java
private List<CassandraPartition> getCassandraPartitions(CassandraTable table, TupleDomain<ColumnHandle> tupleDomain) { if (tupleDomain.isNone()) { return ImmutableList.of(); }//from ww w .java2s . co m List<Set<Object>> partitionKeysList = getPartitionKeysList(table, tupleDomain); Set<List<Object>> filterList = Sets.cartesianProduct(partitionKeysList); // empty filters means, all partitions if (filterList.isEmpty()) { return cassandraSession.getPartitions(table, ImmutableList.of()); } return cassandraSession.getPartitions(table, partitionKeysList); }
From source file:org.prebake.core.GlobRelation.java
public @Nullable ImmutableList<Solution> allPossibleSolutions() { ImmutableList<String> keys; ImmutableList<Set<String>> valueSets; {/* ww w . ja v a2s . c o m*/ ImmutableList.Builder<String> kb = ImmutableList.builder(); ImmutableList.Builder<Set<String>> vb = ImmutableList.builder(); for (Param p : parameters.values()) { if (p.allowedValues == null) { return null; } kb.add(p.name); vb.add(p.allowedValues); } keys = kb.build(); valueSets = vb.build(); } Map<String, String> bindings = Maps.newLinkedHashMap(); ImmutableList.Builder<Solution> solutions = ImmutableList.builder(); int n = keys.size(); for (List<String> values : Sets.cartesianProduct(valueSets)) { for (int i = 0; i < n; ++i) { bindings.put(keys.get(i), values.get(i)); solutions.add(withParameterValues(bindings)); } } return solutions.build(); }
From source file:edu.cmu.lti.oaqa.ecd.phase.BasePhaseLoader.java
@SuppressWarnings("unchecked") private List<AnalysisEngineDescription> createInnerPipeline(List<Map<String, Object>> pipeline) throws ResourceInitializationException, IOException { List<AnalysisEngineDescription> options = Lists.newArrayList(); List<Set<AnalysisEngineDescription>> sets = Lists.newArrayList(); for (Map<String, Object> map : pipeline) { try {/* www. jav a2 s .co m*/ Map.Entry<String, Object> me = getFirst(map); String type = me.getKey(); Object o = me.getValue(); if (o instanceof String) { String component = ((String) o).trim(); ResourceHandle handle = ResourceHandle.newHandle(type, component); Set<AnalysisEngineDescription> local = Sets.newLinkedHashSet(doLoadOptions(handle)); sets.add(local); } else if (o instanceof Iterable) { Iterable<Object> components = (Iterable<Object>) o; Set<AnalysisEngineDescription> local = Sets.newLinkedHashSet(); for (Object o2 : components) { if (o2 instanceof Map) { Map<String, Object> component = (Map<String, Object>) o2; List<AnalysisEngineDescription> aes = new ArrayList<AnalysisEngineDescription>(); loadOption(component, aes); local.addAll(aes); } else { throw new IllegalArgumentException( "Illegal experiment descriptor, all options must be specified as a pair 'key: value'"); } } sets.add(local); } else { throw new IllegalArgumentException( "Illegal experiment descriptor, must contain either an iterable or a string"); } } catch (Exception e) { System.err.printf("Unable to load option %s caused by:\n", map); Throwable cause = e.getCause(); if (cause != null) { cause.printStackTrace(); } else { e.printStackTrace(); } } } // AED equality is based on equality of the MetaDataObject attributes Set<List<AnalysisEngineDescription>> product = Sets.cartesianProduct(sets); for (List<AnalysisEngineDescription> local : product) { AggregateBuilder builder = new AggregateBuilder(); List<String> names = Lists.newArrayList(); for (AnalysisEngineDescription aeDesc : local) { builder.add(aeDesc); names.add(aeDesc.getAnalysisEngineMetaData().getName()); } String aeName = Joiner.on(";").join(names); AnalysisEngineDescription aee = builder.createAggregateDescription(); aee.getAnalysisEngineMetaData().setName(String.format("pipeline:(%s)", aeName)); options.add(aee); } return options; }
From source file:com.github.benmanes.caffeine.cache.LocalCacheFactoryGenerator.java
private Set<List<Object>> combinations() { Set<Boolean> options = ImmutableSet.of(true, false); List<Set<Boolean>> sets = new ArrayList<>(); for (int i = 0; i < featureByIndex.length; i++) { sets.add(options);/*from www . j a v a 2 s . co m*/ } return Sets.cartesianProduct(sets); }
From source file:fr.tpt.aadl.launch.AADLInspectorSchedulingAnalysis.java
public List<EGNode> performAnalysis(ComponentInstance cpu, RamsesConfiguration config, AnalysisErrorReporterManager errorReporter, final IProgressMonitor monitor) throws AnalysisException { this._config = config; this._monitor = monitor; /* XXX: test avec plusieurs cpu * cpu per cpu//from ww w . j a va 2 s . c o m * Modify transformation to copy only selected CPU instances into subcomponent; * propagate to properties. Use EMFUtils.allInsta... to filter out. * propagate to connections (bus, etc...) * Extract one configuration per CPU. * model to exhibit = merge des listes de EGNode slectionner. */ EGAnalyzer ega = new EGAnalyzer(_instantiator, _predefinedResourcesManager); Map<ComponentInstance, List<EGNode>> wcetMap = ega.extractWCETModelsMap(cpu); List<Set<EGNode>> listForProduct = new ArrayList<Set<EGNode>>(); for (ComponentInstance thread : wcetMap.keySet()) { Set<EGNode> tmp = new LinkedHashSet<EGNode>(wcetMap.get(thread)); List<EGNode> toRemove = new ArrayList<EGNode>(); for (EGNode t : tmp) if (!t.hasCapacity()) toRemove.add(t); tmp.removeAll(toRemove); listForProduct.add(tmp); } Set<List<EGNode>> res = Sets.cartesianProduct(listForProduct); size = res.size(); String message = "INFO: " + size + " executions of AADLInspector schedulability" + "analysis will be executed."; _logger.trace(message); if (size == 0) { message = "At least one task has no worst case execution time defined in the " + "analysis model"; _logger.error(message); ServiceProvider.SYS_ERR_REP.error(message, false); return null; } // Launch analysis for each configuration // If one is not schedulable, exhibit this one // otherwise exhibit one that maximizes CPU usage int iter = 0; final Thread[] aadlInspectorThreads = new Thread[size]; final AADLInspectorSchedulingAnalysis app = this; for (List<EGNode> egNodeList : res) { String outputPath; if (config.getAadlInspectorOutputDir() != null) outputPath = config.getAadlInspectorOutputDir().getAbsolutePath(); else outputPath = config.getRamsesOutputDir().getAbsolutePath(); File tmpDir = new File(outputPath + "/wcet_" + iter); if (!tmpDir.exists()) tmpDir.mkdir(); // Execute analysis in several threads aadlInspectorThreads[iter] = new AADLInspectorAnalysisThread(app, egNodeList, tmpDir, cpu, "wcet_" + iter, "automatic"); iter++; } Command cmd = new Command() { int status = Command.UNSET; @Override public int run() throws Exception { for (Thread t : aadlInspectorThreads) { t.start(); } // Wait all the thread end. synchronized (app) { app.wait(); } status = Command.OK; return Command.OK; } @Override public boolean isCanceled() { return monitor.isCanceled(); } @Override public String getLabel() { return null; } @Override public int getStatus() { return status; } @Override public Process getProcess() { // TODO Auto-generated method stub return null; } }; int exitStatus; WaitMonitor wm = new WaitMonitor(cmd); wm.start(); try { exitStatus = wm.waitAndCheck(500); } catch (Exception e) { killThreads(aadlInspectorThreads); String msg = "AADL inspector monitoring has been interrupted"; _logger.fatal(msg, e); throw new RuntimeException(msg, e); } switch (exitStatus) { case Command.FATAL: { Exception e = wm.getCaughtException(); String msg = "AADL Inspector has failed"; _logger.fatal(msg, e); killThreads(aadlInspectorThreads); throw new RuntimeException(msg, e); } case Command.ERROR: { // TODO } case Command.CANCEL: { killThreads(aadlInspectorThreads); String msg = "AADL Inspector has been canceled"; _logger.trace(msg); throw new OperationCanceledException(msg); } case Command.OK: { String msg = "AADL Inspector process done"; _logger.trace(msg); break; } default: { String errMsg = "AADL Inspector error: unknown exit code"; _logger.error(errMsg); ServiceProvider.SYS_ERR_REP.error(errMsg, true); } } int i = 0; String outputModelId = ""; List<EGNode> resultingEGNodeList = new ArrayList<EGNode>(); for (ComponentInstance ci : responseTimeResultList.keySet()) { List<EGNode> tmp = null; for (ResponseTimeResult ufr : responseTimeResultList.get(ci)) { double maxResponseTime = 0.0; boolean schedulable = true; double responseTimeSum = 0; if (ufr.isResult() == false) { tmp = new ArrayList<EGNode>(); tmp.addAll(this.analysisResult.get(ufr)); outputModelId = "wcet_" + (i + 1); schedulable = false; break; } for (TaskResponseTimeResult rt : ufr.getResponseTimes().values()) responseTimeSum += rt.worst; if (responseTimeSum >= maxResponseTime && schedulable) { tmp = new ArrayList<EGNode>(); tmp.addAll(this.analysisResult.get(ufr)); maxResponseTime = responseTimeSum; outputModelId = "wcet_" + (i + 1); } i++; } if (tmp != null) resultingEGNodeList.addAll(tmp); else { String errMsg = "AADL Inspector error: results show a schedulable" + " task set, tasks have execution time, but the sum of tasks response time is null"; _logger.error(errMsg); ServiceProvider.SYS_ERR_REP.error(errMsg, true); } } return resultingEGNodeList; }
From source file:org.apache.calcite.plan.RexImplicationChecker.java
/** Returns whether the predicate {@code first} (not a conjunction) * implies {@code second}. *//*w ww. j ava2 s. com*/ private boolean implies2(RexNode first, RexNode second) { if (second.isAlwaysFalse()) { // f cannot imply s return false; } // E.g. "x is null" implies "x is null". if (RexUtil.eq(first, second)) { return true; } // Several things imply "IS NOT NULL" switch (second.getKind()) { case IS_NOT_NULL: // Suppose we know that first is strong in second; that is, // the if second is null, then first will be null. // Then, first being not null implies that second is not null. // // For example, first is "x > y", second is "x". // If we know that "x > y" is not null, we know that "x" is not null. final RexNode operand = ((RexCall) second).getOperands().get(0); final Strong strong = new Strong() { @Override public boolean isNull(RexNode node) { return RexUtil.eq(node, operand) || super.isNull(node); } }; if (strong.isNull(first)) { return true; } } final InputUsageFinder firstUsageFinder = new InputUsageFinder(); final InputUsageFinder secondUsageFinder = new InputUsageFinder(); RexUtil.apply(firstUsageFinder, ImmutableList.<RexNode>of(), first); RexUtil.apply(secondUsageFinder, ImmutableList.<RexNode>of(), second); // Check Support if (!checkSupport(firstUsageFinder, secondUsageFinder)) { LOGGER.warn("Support for checking {} => {} is not there", first, second); return false; } ImmutableList.Builder<Set<Pair<RexInputRef, RexNode>>> usagesBuilder = ImmutableList.builder(); for (Map.Entry<RexInputRef, InputRefUsage<SqlOperator, RexNode>> entry : firstUsageFinder.usageMap .entrySet()) { ImmutableSet.Builder<Pair<RexInputRef, RexNode>> usageBuilder = ImmutableSet.builder(); if (entry.getValue().usageList.size() > 0) { for (final Pair<SqlOperator, RexNode> pair : entry.getValue().usageList) { usageBuilder.add(Pair.of(entry.getKey(), pair.getValue())); } usagesBuilder.add(usageBuilder.build()); } } final Set<List<Pair<RexInputRef, RexNode>>> usages = Sets.cartesianProduct(usagesBuilder.build()); for (List<Pair<RexInputRef, RexNode>> usageList : usages) { // Get the literals from first conjunction and executes second conjunction // using them. // // E.g., for // x > 30 ⇒ x > 10, // we will replace x by 30 in second expression and execute it i.e., // 30 > 10 // // If it's true then we infer implication. final DataContext dataValues = VisitorDataContext.of(rowType, usageList); if (!isSatisfiable(second, dataValues)) { return false; } } return true; }
From source file:org.eclipse.incquery.runtime.matchers.psystem.rewriters.PQueryFlattener.java
/** * Creates the flattened bodies based on the caller body and the called (and already flattened) disjunctions * // w ww . j a va2 s .co m * @param pBody the body to flatten * @param flattenedDisjunctions the * @param flattenedCalls * @return */ private Set<PBody> createSetOfFlatPBodies(PBody pBody, List<Set<PBody>> flattenedBodies, List<PositivePatternCall> flattenedCalls) { PQuery pQuery = pBody.getPattern(); // The members of this set are lists containing bodies in conjunction // Ordering is not important within the list, only the cartesian product function requires a list Set<List<PBody>> conjunctBodyLists = Sets.cartesianProduct(flattenedBodies); // The result set containing the merged conjuncted bodies Set<PBody> conjunctedBodies = Sets.<PBody>newHashSet(); for (List<PBody> bodyList : conjunctBodyLists) { PBodyCopier copier = createBodyCopier(pQuery, flattenedCalls, bodyList); int i = 0; HierarchicalName hierarchicalNamingTool = new HierarchicalName(); for (PBody calledBody : bodyList) { // Merge each called body hierarchicalNamingTool.setCallCount(i++); copier.mergeBody(calledBody, hierarchicalNamingTool, new ExportedParameterFilter()); } // Merge the caller's constraints to the conjunct body copier.mergeBody(pBody); PBody copiedBody = copier.getCopiedBody(); copiedBody.setStatus(PQueryStatus.OK); conjunctedBodies.add(copiedBody); } return conjunctedBodies; }
From source file:edu.cmu.lti.oaqa.ecd.phase.BasePhaseLoader.java
private Set<List<Object>> doCartesianProduct(AnyObject crossParams) { List<Set<Object>> sets = Lists.newArrayList(); // input parameters List<String> names = Lists.newArrayList(); // parameter names for (AnyTuple tuple : crossParams.getTuples()) { Set<Object> params = Sets.newHashSet(); String key = tuple.getKey(); names.add(key);//from w w w. java 2 s. c om @SuppressWarnings("unchecked") Iterable<Object> values = (Iterable<Object>) tuple.getObject(); for (Object value : values) { params.add(value); } sets.add(params); } Set<List<Object>> product = Sets.cartesianProduct(sets); return product; }
From source file:seaclouds.planner.Optimizer.java
/** * This method shows how to interact with the parser to generate an Abstract Deployment Plan from the AAM and the result of the matchmaking * This method implements a Full search - Cartesian, and uses the private methods shown above. * @param aam Abstract Application Model * @param matches Results of the matchmaking, it consists of a map of <AAM module String, list of cloud offerings that matches the AAM module) * @return Abstract Deployment Plan/*from ww w . j a v a 2 s. com*/ */ public List<IToscaEnvironment> optimize(IToscaEnvironment aam, Map<String, List<INodeType>> matches) { List<String> labels = new ArrayList<>(); List<Set<INodeType>> values = new ArrayList<>(); for (Map.Entry<String, List<INodeType>> matchable : matches.entrySet()) { labels.add(matchable.getKey()); values.add(new HashSet<>(matchable.getValue())); } Set<List<INodeType>> allSolutions = Sets.cartesianProduct(values); PriorityQueue<Solution> solutions = new PriorityQueue<>(); for (List<INodeType> solution : allSolutions) { Iterator<INodeType> it1 = solution.iterator(); Iterator<String> it2 = labels.iterator(); HashMap<String, INodeType> s = new HashMap<>(); while (it1.hasNext() && it2.hasNext()) s.put(it2.next(), it1.next()); solutions.offer(new Solution(aam, s)); while (solutions.size() > wantedSolutions) solutions.poll(); } List<IToscaEnvironment> ret = new ArrayList<>(); for (Solution solution : solutions) { ret.add(createADP(aam, solution.value)); } return ret; }
From source file:seaclouds.utils.toscamodel.examples.OptimizerExample.java
/** * This method shows how to interact with the parser to generate an Abstract Deployment Plan from the AAM and the result of the matchmaking * This method implements a Full search - Cartesian, and uses the private methods shown above. * @param aam Abstract Application Model * @param matches Results of the matchmaking, it consists of a map of <AAM module String, list of cloud offerings that matches the AAM module) * @return Abstract Deployment Plan/*from w w w. j av a 2 s . c o m*/ */ public List<IToscaEnvironment> optimizeFullSearchCartesian(IToscaEnvironment aam, Map<String, List<INodeType>> matches) { List<String> labels = new ArrayList<>(); List<Set<INodeType>> values = new ArrayList<>(); for (Map.Entry<String, List<INodeType>> matchable : matches.entrySet()) { labels.add(matchable.getKey()); values.add(new HashSet<>(matchable.getValue())); } Set<List<INodeType>> allSolutions = Sets.cartesianProduct(values); PriorityQueue<Solution> solutions = new PriorityQueue<>(); for (List<INodeType> solution : allSolutions) { Iterator<INodeType> it1 = solution.iterator(); Iterator<String> it2 = labels.iterator(); HashMap<String, INodeType> s = new HashMap<>(); while (it1.hasNext() && it2.hasNext()) s.put(it2.next(), it1.next()); solutions.offer(new Solution(aam, s)); while (solutions.size() > wantedSolutions) solutions.poll(); } List<IToscaEnvironment> ret = new ArrayList<>(); for (Solution solution : solutions) { ret.add(createADP(aam, solution.value)); } return ret; }