List of usage examples for com.google.common.collect Multimap asMap
Map<K, Collection<V>> asMap();
From source file:org.jetbrains.kotlin.resolve.DeclarationsChecker.java
private void checkSupertypesForConsistency(@NotNull ClassifierDescriptor classifierDescriptor, @NotNull PsiElement sourceElement) { Multimap<TypeConstructor, TypeProjection> multimap = SubstitutionUtils .buildDeepSubstitutionMultimap(classifierDescriptor.getDefaultType()); for (Map.Entry<TypeConstructor, Collection<TypeProjection>> entry : multimap.asMap().entrySet()) { Collection<TypeProjection> projections = entry.getValue(); if (projections.size() > 1) { TypeConstructor typeConstructor = entry.getKey(); DeclarationDescriptor declarationDescriptor = typeConstructor.getDeclarationDescriptor(); assert declarationDescriptor instanceof TypeParameterDescriptor : declarationDescriptor; TypeParameterDescriptor typeParameterDescriptor = (TypeParameterDescriptor) declarationDescriptor; // Immediate arguments of supertypes cannot be projected Set<KotlinType> conflictingTypes = Sets.newLinkedHashSet(); for (TypeProjection projection : projections) { conflictingTypes.add(projection.getType()); }//from ww w . java 2 s .c o m removeDuplicateTypes(conflictingTypes); if (conflictingTypes.size() > 1) { DeclarationDescriptor containingDeclaration = typeParameterDescriptor .getContainingDeclaration(); assert containingDeclaration instanceof ClassDescriptor : containingDeclaration; if (sourceElement instanceof KtClassOrObject) { KtDelegationSpecifierList delegationSpecifierList = ((KtClassOrObject) sourceElement) .getDelegationSpecifierList(); assert delegationSpecifierList != null; // trace.getErrorHandler().genericError(delegationSpecifierList.getNode(), "Type parameter " + typeParameterDescriptor.getName() + " of " + containingDeclaration.getName() + " has inconsistent values: " + conflictingTypes); trace.report(INCONSISTENT_TYPE_PARAMETER_VALUES.on(delegationSpecifierList, typeParameterDescriptor, (ClassDescriptor) containingDeclaration, conflictingTypes)); } else if (sourceElement instanceof KtTypeParameter) { trace.report(INCONSISTENT_TYPE_PARAMETER_BOUNDS.on((KtTypeParameter) sourceElement, typeParameterDescriptor, (ClassDescriptor) containingDeclaration, conflictingTypes)); } } } } }
From source file:net.minecrell.dandelion.ui.MainController.java
private void openFile(Path openedPath) throws IOException { closeFile();/*from w w w . j a v a2 s . c o m*/ // Create Fernflower context this.decompiler = new DandelionDecompiler(openedPath); Multimap<ImmutableList<String>, PackageElement> elements = TreeMultimap.create(ListComparator.get(), Ordering.natural()); this.decompiler.getClasses().forEach(path -> { if (path.lastIndexOf('$') >= 0) { return; } PackageElement element = PackageElement.fromPath(PackageElement.Type.CLASS, path); System.out.println("Found class: " + element.getPath()); elements.put(element.getPackage(), element); }); for (String path : this.decompiler.getResources()) { if (path.endsWith(CLASS_EXTENSION)) { continue; } PackageElement element = PackageElement.fromPath(PackageElement.Type.RESOURCE, path); System.out.println("Found resource: " + element.getPath()); elements.put(element.getPackage(), element); } elements.asMap().forEach((pack, packageElements) -> { TreeItem<PackageElement> root = new TreeItem<>(new PackageElement(PackageElement.Type.PACKAGE, pack)); for (PackageElement element : packageElements) { root.getChildren().add(new TreeItem<>(element)); } this.packageRoot.getChildren().add(root); }); }
From source file:com.facebook.swift.codec.metadata.ThriftStructMetadataBuilder.java
private void normalizeThriftFields(ThriftCatalog catalog) { // assign all fields an id (if possible) Set<String> fieldsWithConflictingIds = inferThriftFieldIds(); // group fields by id Multimap<Optional<Short>, FieldMetadata> fieldsById = Multimaps.index(fields, getThriftFieldId()); for (Entry<Optional<Short>, Collection<FieldMetadata>> entry : fieldsById.asMap().entrySet()) { Collection<FieldMetadata> fields = entry.getValue(); // fields must have an id if (!entry.getKey().isPresent()) { for (String fieldName : newTreeSet(transform(fields, getOrExtractThriftFieldName()))) { // only report errors for fields that don't have conflicting ids if (!fieldsWithConflictingIds.contains(fieldName)) { metadataErrors.addError("ThriftStruct %s fields %s do not have an id", structName, newTreeSet(transform(fields, getOrExtractThriftFieldName()))); }/* w w w. j ava 2 s .c o m*/ } continue; } short fieldId = entry.getKey().get(); // assure all fields for this ID have the same name String fieldName = extractFieldName(fieldId, fields); for (FieldMetadata field : fields) { field.setName(fieldName); } // verify fields have a supported java type and all fields // for this ID have the same thrift type verifyFieldType(fieldId, fieldName, fields, catalog); } }
From source file:eu.itesla_project.iidm.network.impl.NetworkImpl.java
@Override public void merge(Network other) { NetworkImpl otherNetwork = (NetworkImpl) other; // this check must not be done on the number of state but on the size // of the internal state array because the network can have only // one state but an internal array with a size greater that one and // some re-usable states if (stateManager.getStateArraySize() != 1 || otherNetwork.stateManager.getStateArraySize() != 1) { throw new RuntimeException("Merging of multi-states network is not supported"); }//from w w w .j ava2 s. c om long start = System.currentTimeMillis(); // check mergeability Multimap<Class<? extends Identifiable>, String> intersection = objectStore .intersection(otherNetwork.objectStore); for (Map.Entry<Class<? extends Identifiable>, Collection<String>> entry : intersection.asMap().entrySet()) { Class<? extends Identifiable> clazz = entry.getKey(); if (clazz == DanglingLineImpl.class) { // fine for dangling lines continue; } Collection<String> objs = entry.getValue(); if (objs.size() > 0) { throw new RuntimeException("The following object(s) of type " + clazz.getSimpleName() + " exist(s) in both networks: " + objs); } } class LineMerge { String id; String voltageLevel1; String voltageLevel2; String xnode; String bus1; String bus2; String connectableBus1; String connectableBus2; Integer node1; Integer node2; class HalfLineMerge { String id; String name; float r; float x; float g1; float g2; float b1; float b2; float xnodeP; float xnodeQ; } final HalfLineMerge half1 = new HalfLineMerge(); final HalfLineMerge half2 = new HalfLineMerge(); CurrentLimits limits1; CurrentLimits limits2; float p1; float q1; float p2; float q2; Country country1; Country country2; } // try to find dangling lines couples Map<String, DanglingLine> dl1byXnodeCode = new HashMap<>(); for (DanglingLine dl1 : getDanglingLines()) { if (dl1.getUcteXnodeCode() != null) { dl1byXnodeCode.put(dl1.getUcteXnodeCode(), dl1); } } List<LineMerge> lines = new ArrayList<>(); for (DanglingLine dl2 : Lists.newArrayList(other.getDanglingLines())) { DanglingLine dl1 = getDanglingLine(dl2.getId()); if (dl1 == null) { // mapping by ucte xnode code if (dl2.getUcteXnodeCode() != null) { dl1 = dl1byXnodeCode.get(dl2.getUcteXnodeCode()); } } else { // mapping by id if (dl1.getUcteXnodeCode() != null && dl2.getUcteXnodeCode() != null && !dl1.getUcteXnodeCode().equals(dl2.getUcteXnodeCode())) { throw new RuntimeException("Dangling line couple " + dl1.getId() + " have inconsistent Xnodes (" + dl1.getUcteXnodeCode() + "!=" + dl2.getUcteXnodeCode() + ")"); } } if (dl1 != null) { LineMerge l = new LineMerge(); l.id = dl1.getId().compareTo(dl2.getId()) < 0 ? dl1.getId() + " + " + dl2.getId() : dl2.getId() + " + " + dl1.getId(); Terminal t1 = dl1.getTerminal(); Terminal t2 = dl2.getTerminal(); VoltageLevel vl1 = t1.getVoltageLevel(); VoltageLevel vl2 = t2.getVoltageLevel(); l.voltageLevel1 = vl1.getId(); l.voltageLevel2 = vl2.getId(); l.xnode = dl1.getUcteXnodeCode(); l.half1.id = dl1.getId(); l.half1.name = dl1.getName(); l.half1.r = dl1.getR(); l.half1.x = dl1.getX(); l.half1.g1 = dl1.getG(); l.half1.g2 = 0; l.half1.b1 = dl1.getB(); l.half1.b2 = 0; l.half1.xnodeP = dl1.getP0(); l.half1.xnodeQ = dl1.getQ0(); l.half2.id = dl2.getId(); l.half2.name = dl2.getName(); l.half2.r = dl2.getR(); l.half2.x = dl2.getX(); l.half2.g1 = dl2.getG(); l.half2.g2 = 0; l.half2.b1 = dl2.getB(); l.half2.b2 = 0; l.half2.xnodeP = dl2.getP0(); l.half2.xnodeQ = dl2.getQ0(); l.limits1 = dl1.getCurrentLimits(); l.limits2 = dl2.getCurrentLimits(); if (t1.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b1 = t1.getBusBreakerView().getBus(); if (b1 != null) { l.bus1 = b1.getId(); } l.connectableBus1 = t1.getBusBreakerView().getConnectableBus().getId(); } else { l.node1 = t1.getNodeBreakerView().getNode(); } if (t2.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b2 = t2.getBusBreakerView().getBus(); if (b2 != null) { l.bus2 = b2.getId(); } l.connectableBus2 = t2.getBusBreakerView().getConnectableBus().getId(); } else { l.node2 = t2.getNodeBreakerView().getNode(); } l.p1 = t1.getP(); l.q1 = t1.getQ(); l.p2 = t2.getP(); l.q2 = t2.getQ(); l.country1 = vl1.getSubstation().getCountry(); l.country2 = vl2.getSubstation().getCountry(); lines.add(l); // remove the 2 dangling lines dl1.remove(); dl2.remove(); } } // do not forget to remove the other network from its store!!! otherNetwork.objectStore.remove(otherNetwork); // merge the stores objectStore.merge(otherNetwork.objectStore); // fix network back reference of the other network objects otherNetwork.ref.setRef(ref); Multimap<Boundary, LineMerge> mergedLineByBoundary = HashMultimap.create(); for (LineMerge lm : lines) { LOGGER.debug("Replacing dangling line couple '{}' (xnode={}, country1={}, country2={}) by a line", lm.id, lm.xnode, lm.country1, lm.country2); TieLineAdderImpl la = newTieLine().setId(lm.id).setVoltageLevel1(lm.voltageLevel1) .setVoltageLevel2(lm.voltageLevel2).line1().setId(lm.half1.id).setName(lm.half1.name) .setR(lm.half1.r).setX(lm.half1.x).setG1(lm.half1.g1).setG2(lm.half1.g2).setB1(lm.half1.b1) .setB2(lm.half1.b2).setXnodeP(lm.half1.xnodeP).setXnodeQ(lm.half1.xnodeQ).line2() .setId(lm.half2.id).setName(lm.half2.name).setR(lm.half2.r).setX(lm.half2.x).setG1(lm.half2.g1) .setG2(lm.half2.g2).setB1(lm.half2.b1).setB2(lm.half2.b2).setXnodeP(lm.half2.xnodeP) .setXnodeQ(lm.half2.xnodeQ).setUcteXnodeCode(lm.xnode); if (lm.bus1 != null) { la.setBus1(lm.bus1); } la.setConnectableBus1(lm.connectableBus1); if (lm.bus2 != null) { la.setBus2(lm.bus2); } la.setConnectableBus2(lm.connectableBus2); if (lm.node1 != null) { la.setNode1(lm.node1); } if (lm.node2 != null) { la.setNode2(lm.node2); } TieLineImpl l = la.add(); l.setCurrentLimits(Side.ONE, (CurrentLimitsImpl) lm.limits1); l.setCurrentLimits(Side.TWO, (CurrentLimitsImpl) lm.limits2); l.getTerminal1().setP(lm.p1).setQ(lm.q1); l.getTerminal2().setP(lm.p2).setQ(lm.q2); mergedLineByBoundary.put(new Boundary(lm.country1, lm.country2), lm); } if (lines.size() > 0) { LOGGER.info("{} dangling line couples have been replaced by a line: {}", lines.size(), mergedLineByBoundary.asMap().entrySet().stream() .map(e -> e.getKey() + ": " + e.getValue().size()).collect(Collectors.toList())); } // update the source format if (!sourceFormat.equals(otherNetwork.sourceFormat)) { sourceFormat = "hybrid"; } // change the network id setId(getId() + " + " + otherNetwork.getId()); LOGGER.info("Merging of {} done in {} ms", id, (System.currentTimeMillis() - start)); }
From source file:net.shibboleth.idp.saml.attribute.mapping.AbstractSAMLAttributesMapper.java
/** * Constructor to create the mapping from an existing resolver. <br/> * This code inverts the {@link AttributeEncoder} (internal attribute -> SAML Attributes) into * {@link AttributeMapper} (SAML [RequestedAttributes] -> internal [Requested] Attributes). <br/> * to generate the {@link AbstractSAMLAttributeMapper} (with no * {@link AbstractSAMLAttributeMapper#getAttributeIds()}. These are accumulated into a {@link Multimap}, where the * key is the {@link AbstractSAMLAttributeMapper} and the values are the (IdP) attribute names. The collection of * {@link AttributeMapper}s can then be extracted from the map, and the appropriate internal names added (these * being the value of the {@link Multimap}) * //from w w w .ja v a2 s .c o m * @param resolver The resolver * @param id The it * @param mapperFactory A factory to generate new mappers of the correct type. */ public AbstractSAMLAttributesMapper(@Nonnull final AttributeResolver resolver, @Nonnull @NotEmpty final String id, @Nonnull final Supplier<AbstractSAMLAttributeMapper<InType, OutType>> mapperFactory) { setId(id); final Multimap<AbstractSAMLAttributeMapper<InType, OutType>, String> theMappers; theMappers = HashMultimap.create(); for (final AttributeDefinition attributeDef : resolver.getAttributeDefinitions().values()) { for (final AttributeEncoder encode : attributeDef.getAttributeEncoders()) { if (encode instanceof AttributeMapperProcessor) { // There is an appropriate reverse mapper final AttributeMapperProcessor factory = (AttributeMapperProcessor) encode; final AbstractSAMLAttributeMapper<InType, OutType> mapper = mapperFactory.get(); factory.populateAttributeMapper(mapper); theMappers.put(mapper, attributeDef.getId()); } } } mappers = new ArrayList<>(theMappers.values().size()); for (final Entry<AbstractSAMLAttributeMapper<InType, OutType>, Collection<String>> entry : theMappers .asMap().entrySet()) { final AbstractSAMLAttributeMapper<InType, OutType> mapper = entry.getKey(); mapper.setAttributeIds(new ArrayList<>(entry.getValue())); mappers.add(mapper); } }
From source file:net.shibboleth.idp.saml.attribute.mapping.AbstractSAMLAttributeDesignatorsMapper.java
/** * Constructor to create the mapping from an existing resolver. * /*from w ww. j av a 2 s .c o m*/ * <p>This code inverts the {@link AttributeEncoder} (internal attribute -> SAML Attributes) into * {@link AttributeMapper} (SAML [AttributeDesignators] -> internal [Requested] Attributes). <br/> * to generate the {@link AbstractSAMLAttributeDesignatorMapper} (with no * {@link AbstractSAMLAttributeDesignatorMapper#getAttributeIds()}. These are accumulated into a {@link Multimap}, * where the key is the {@link AbstractSAMLAttributeDesignatorMapper} and the values are the (IdP) attribute names. * The collection of {@link AttributeMapper}s can then be extracted from the map, and the appropriate internal names * added (these being the value of the {@link Multimap})</p> * * @param resolver the resolver * @param id the ID * @param mapperFactory factory to generate new mappers of the correct type. */ public AbstractSAMLAttributeDesignatorsMapper(@Nonnull final AttributeResolver resolver, @Nonnull @NotEmpty final String id, @Nonnull final Supplier<AbstractSAMLAttributeDesignatorMapper<OutType>> mapperFactory) { setId(id); mappers = Collections.emptyList(); final Multimap<AbstractSAMLAttributeDesignatorMapper<OutType>, String> theMappers = HashMultimap.create(); for (final AttributeDefinition attributeDef : resolver.getAttributeDefinitions().values()) { for (final AttributeEncoder encoder : attributeDef.getAttributeEncoders()) { if (encoder instanceof AttributeDesignatorMapperProcessor) { // There is an appropriate reverse mapper. final AttributeDesignatorMapperProcessor factory = (AttributeDesignatorMapperProcessor) encoder; final AbstractSAMLAttributeDesignatorMapper<OutType> mapper = mapperFactory.get(); factory.populateAttributeMapper(mapper); theMappers.put(mapper, attributeDef.getId()); } } } mappers = new ArrayList<>(theMappers.values().size()); for (final Entry<AbstractSAMLAttributeDesignatorMapper<OutType>, Collection<String>> entry : theMappers .asMap().entrySet()) { final AbstractSAMLAttributeDesignatorMapper<OutType> mapper = entry.getKey(); mapper.setAttributeIds(new ArrayList<>(entry.getValue())); mappers.add(mapper); } }
From source file:org.apache.calcite.rel.logical.LogicalWindow.java
/** * Creates a LogicalWindow by parsing a {@link RexProgram}. *///ww w . j a v a 2 s . com public static RelNode create(RelOptCluster cluster, RelTraitSet traitSet, RelBuilder relBuilder, RelNode child, final RexProgram program) { final RelDataType outRowType = program.getOutputRowType(); // Build a list of distinct groups, partitions and aggregate // functions. final Multimap<WindowKey, RexOver> windowMap = LinkedListMultimap.create(); final int inputFieldCount = child.getRowType().getFieldCount(); final Map<RexLiteral, RexInputRef> constantPool = new HashMap<>(); final List<RexLiteral> constants = new ArrayList<>(); // Identify constants in the expression tree and replace them with // references to newly generated constant pool. RexShuttle replaceConstants = new RexShuttle() { @Override public RexNode visitLiteral(RexLiteral literal) { RexInputRef ref = constantPool.get(literal); if (ref != null) { return ref; } constants.add(literal); ref = new RexInputRef(constantPool.size() + inputFieldCount, literal.getType()); constantPool.put(literal, ref); return ref; } }; // Build a list of groups, partitions, and aggregate functions. Each // aggregate function will add its arguments as outputs of the input // program. final Map<RexOver, RexOver> origToNewOver = new IdentityHashMap<>(); for (RexNode agg : program.getExprList()) { if (agg instanceof RexOver) { final RexOver origOver = (RexOver) agg; final RexOver newOver = (RexOver) origOver.accept(replaceConstants); origToNewOver.put(origOver, newOver); addWindows(windowMap, newOver, inputFieldCount); } } final Map<RexOver, Window.RexWinAggCall> aggMap = new HashMap<>(); List<Group> groups = new ArrayList<>(); for (Map.Entry<WindowKey, Collection<RexOver>> entry : windowMap.asMap().entrySet()) { final WindowKey windowKey = entry.getKey(); final List<RexWinAggCall> aggCalls = new ArrayList<>(); for (RexOver over : entry.getValue()) { final RexWinAggCall aggCall = new RexWinAggCall(over.getAggOperator(), over.getType(), toInputRefs(over.operands), aggMap.size(), over.isDistinct()); aggCalls.add(aggCall); aggMap.put(over, aggCall); } RexShuttle toInputRefs = new RexShuttle() { @Override public RexNode visitLocalRef(RexLocalRef localRef) { return new RexInputRef(localRef.getIndex(), localRef.getType()); } }; groups.add(new Group(windowKey.groupSet, windowKey.isRows, windowKey.lowerBound.accept(toInputRefs), windowKey.upperBound.accept(toInputRefs), windowKey.orderKeys, aggCalls)); } // Figure out the type of the inputs to the output program. // They are: the inputs to this rel, followed by the outputs of // each window. final List<Window.RexWinAggCall> flattenedAggCallList = new ArrayList<>(); final List<Map.Entry<String, RelDataType>> fieldList = new ArrayList<Map.Entry<String, RelDataType>>( child.getRowType().getFieldList()); final int offset = fieldList.size(); // Use better field names for agg calls that are projected. final Map<Integer, String> fieldNames = new HashMap<>(); for (Ord<RexLocalRef> ref : Ord.zip(program.getProjectList())) { final int index = ref.e.getIndex(); if (index >= offset) { fieldNames.put(index - offset, outRowType.getFieldNames().get(ref.i)); } } for (Ord<Group> window : Ord.zip(groups)) { for (Ord<RexWinAggCall> over : Ord.zip(window.e.aggCalls)) { // Add the k-th over expression of // the i-th window to the output of the program. String name = fieldNames.get(over.i); if (name == null || name.startsWith("$")) { name = "w" + window.i + "$o" + over.i; } fieldList.add(Pair.of(name, over.e.getType())); flattenedAggCallList.add(over.e); } } final RelDataType intermediateRowType = cluster.getTypeFactory().createStructType(fieldList); // The output program is the windowed agg's program, combined with // the output calc (if it exists). RexShuttle shuttle = new RexShuttle() { public RexNode visitOver(RexOver over) { // Look up the aggCall which this expr was translated to. final Window.RexWinAggCall aggCall = aggMap.get(origToNewOver.get(over)); assert aggCall != null; assert RelOptUtil.eq("over", over.getType(), "aggCall", aggCall.getType(), Litmus.THROW); // Find the index of the aggCall among all partitions of all // groups. final int aggCallIndex = flattenedAggCallList.indexOf(aggCall); assert aggCallIndex >= 0; // Replace expression with a reference to the window slot. final int index = inputFieldCount + aggCallIndex; assert RelOptUtil.eq("over", over.getType(), "intermed", intermediateRowType.getFieldList().get(index).getType(), Litmus.THROW); return new RexInputRef(index, over.getType()); } public RexNode visitLocalRef(RexLocalRef localRef) { final int index = localRef.getIndex(); if (index < inputFieldCount) { // Reference to input field. return localRef; } return new RexLocalRef(flattenedAggCallList.size() + index, localRef.getType()); } }; final LogicalWindow window = LogicalWindow.create(traitSet, child, constants, intermediateRowType, groups); // The order that the "over" calls occur in the groups and // partitions may not match the order in which they occurred in the // original expression. // Add a project to permute them. final List<RexNode> rexNodesWindow = new ArrayList<>(); for (RexNode rexNode : program.getExprList()) { rexNodesWindow.add(rexNode.accept(shuttle)); } final List<RexNode> refToWindow = toInputRefs(rexNodesWindow); final List<RexNode> projectList = new ArrayList<>(); for (RexLocalRef inputRef : program.getProjectList()) { final int index = inputRef.getIndex(); final RexInputRef ref = (RexInputRef) refToWindow.get(index); projectList.add(ref); } return relBuilder.push(window).project(projectList, outRowType.getFieldNames()).build(); }
From source file:ai.grakn.graql.internal.reasoner.atom.binary.Relation.java
/** * infer relation types that this relation atom can potentially have * NB: entity types and role types are treated separately as they behave differently: * entity types only play the explicitly defined roles (not the relevant part of the hierarchy of the specified role) * @return list of relation types this atom can have ordered by the number of compatible role types *//*from w w w .ja v a 2s .c o m*/ public List<RelationType> inferPossibleRelationTypes(Answer sub) { //look at available role types Multimap<RelationType, RoleType> compatibleTypesFromRoles = getCompatibleRelationTypesWithRoles( getExplicitRoleTypes(), new RoleTypeConverter()); //look at entity types Map<Var, Type> varTypeMap = getParentQuery().getVarTypeMap(); //explicit types Set<Type> types = getRolePlayers().stream().filter(varTypeMap::containsKey).map(varTypeMap::get) .collect(toSet()); //types deduced from substitution inferEntityTypes(sub).forEach(types::add); Multimap<RelationType, RoleType> compatibleTypesFromTypes = getCompatibleRelationTypesWithRoles(types, new TypeConverterImpl()); Multimap<RelationType, RoleType> compatibleTypes; //intersect relation types from roles and types if (compatibleTypesFromRoles.isEmpty()) { compatibleTypes = compatibleTypesFromTypes; } else if (!compatibleTypesFromTypes.isEmpty()) { compatibleTypes = multimapIntersection(compatibleTypesFromTypes, compatibleTypesFromRoles); } else { compatibleTypes = compatibleTypesFromRoles; } return compatibleTypes.asMap().entrySet().stream().sorted(Comparator.comparing(e -> -e.getValue().size())) .map(Map.Entry::getKey) .filter(t -> Sets.intersection(getSuperTypes(t), compatibleTypes.keySet()).isEmpty()) .collect(Collectors.toList()); }
From source file:org.apache.calcite.materialize.LatticeSuggester.java
/** Adds a query. * * <p>It may fit within an existing lattice (or lattices). Or it may need a * new lattice, or an extension to an existing lattice. * * @param r Relational expression for a query * * @return A list of join graphs: usually 1; more if the query contains a * cartesian product; zero if the query graph is cyclic */// w ww.ja v a2 s.c o m public List<Lattice> addQuery(RelNode r) { // Push filters into joins and towards leaves final HepPlanner planner = new HepPlanner(PROGRAM, null, true, null, RelOptCostImpl.FACTORY); planner.setRoot(r); final RelNode r2 = planner.findBestExp(); final Query q = new Query(space); final Frame frame = frame(q, r2); if (frame == null) { return ImmutableList.of(); } final AttributedDirectedGraph<TableRef, StepRef> g = AttributedDirectedGraph.create(new StepRef.Factory()); final Multimap<Pair<TableRef, TableRef>, IntPair> map = LinkedListMultimap.create(); for (TableRef tableRef : frame.tableRefs) { g.addVertex(tableRef); } for (Hop hop : frame.hops) { map.put(Pair.of(hop.source.t, hop.target.t), IntPair.of(hop.source.c, hop.target.c)); } for (Map.Entry<Pair<TableRef, TableRef>, Collection<IntPair>> e : map.asMap().entrySet()) { final TableRef source = e.getKey().left; final TableRef target = e.getKey().right; final StepRef stepRef = q.stepRef(source, target, ImmutableList.copyOf(e.getValue())); g.addVertex(stepRef.source()); g.addVertex(stepRef.target()); g.addEdge(stepRef.source(), stepRef.target(), stepRef.step, stepRef.ordinalInQuery); } // If the join graph is cyclic, we can't use it. final Set<TableRef> cycles = new CycleDetector<>(g).findCycles(); if (!cycles.isEmpty()) { return ImmutableList.of(); } // Translate the query graph to mutable nodes final Map<TableRef, MutableNode> nodes = new IdentityHashMap<>(); final Map<List, MutableNode> nodesByParent = new HashMap<>(); final List<MutableNode> rootNodes = new ArrayList<>(); for (TableRef tableRef : TopologicalOrderIterator.of(g)) { final List<StepRef> edges = g.getInwardEdges(tableRef); final MutableNode node; switch (edges.size()) { case 0: node = new MutableNode(tableRef.table); rootNodes.add(node); break; case 1: final StepRef edge = edges.get(0); final MutableNode parent = nodes.get(edge.source()); final List key = ImmutableList.of(parent, tableRef.table, edge.step.keys); final MutableNode existingNode = nodesByParent.get(key); if (existingNode == null) { node = new MutableNode(tableRef.table, parent, edge.step); nodesByParent.put(key, node); } else { node = existingNode; } break; default: for (StepRef edge2 : edges) { final MutableNode parent2 = nodes.get(edge2.source()); final MutableNode node2 = new MutableNode(tableRef.table, parent2, edge2.step); parent2.children.add(node2); } node = null; break; } nodes.put(tableRef, node); } // Transcribe the hierarchy of mutable nodes to immutable nodes final List<Lattice> lattices = new ArrayList<>(); for (MutableNode rootNode : rootNodes) { if (rootNode.isCyclic()) { continue; } final CalciteSchema rootSchema = CalciteSchema.createRootSchema(false); final Lattice.Builder latticeBuilder = new Lattice.Builder(space, rootSchema, rootNode); final List<MutableNode> flatNodes = new ArrayList<>(); rootNode.flatten(flatNodes); for (MutableMeasure measure : frame.measures) { for (ColRef arg : measure.arguments) { if (arg == null) { // Cannot handle expressions, e.g. "sum(x + 1)" yet return ImmutableList.of(); } } latticeBuilder.addMeasure(new Lattice.Measure(measure.aggregate, measure.distinct, measure.name, Lists.transform(measure.arguments, colRef -> { if (colRef instanceof BaseColRef) { final BaseColRef baseColRef = (BaseColRef) colRef; final MutableNode node = nodes.get(baseColRef.t); final int table = flatNodes.indexOf(node); return latticeBuilder.column(table, baseColRef.c); } else if (colRef instanceof DerivedColRef) { final DerivedColRef derivedColRef = (DerivedColRef) colRef; final String alias = deriveAlias(measure, derivedColRef); return latticeBuilder.expression(derivedColRef.e, alias, derivedColRef.tableAliases()); } else { throw new AssertionError("expression in measure"); } }))); } for (int i = 0; i < frame.columnCount; i++) { final ColRef c = frame.column(i); if (c instanceof DerivedColRef) { final DerivedColRef derivedColRef = (DerivedColRef) c; final Lattice.Column expression = latticeBuilder.expression(derivedColRef.e, derivedColRef.alias, derivedColRef.tableAliases()); } } final Lattice lattice0 = latticeBuilder.build(); final Lattice lattice1 = findMatch(lattice0, rootNode); lattices.add(lattice1); } return ImmutableList.copyOf(lattices); }
From source file:com.google.caliper.ConsoleReport.java
ConsoleReport(Run run, Arguments arguments) { this.run = run; unitMap.put(MeasurementType.TIME, arguments.getTimeUnit()); unitMap.put(MeasurementType.INSTANCE, arguments.getInstanceUnit()); unitMap.put(MeasurementType.MEMORY, arguments.getMemoryUnit()); if (arguments.getMeasureMemory()) { orderedMeasurementTypes = Arrays.asList(MeasurementType.TIME, MeasurementType.INSTANCE, MeasurementType.MEMORY); } else {//from ww w . j a v a 2 s .c om orderedMeasurementTypes = Arrays.asList(MeasurementType.TIME); } if (arguments.getPrimaryMeasurementType() != null) { this.type = arguments.getPrimaryMeasurementType(); } else { this.type = MeasurementType.TIME; } double min = Double.POSITIVE_INFINITY; double max = 0; Multimap<String, String> nameToValues = LinkedHashMultimap.create(); List<Variable> variablesBuilder = new ArrayList<Variable>(); for (Entry<Scenario, ScenarioResult> entry : this.run.getMeasurements().entrySet()) { Scenario scenario = entry.getKey(); double d = entry.getValue().getMeasurementSet(type).medianUnits(); min = Math.min(min, d); max = Math.max(max, d); for (Entry<String, String> variable : scenario.getVariables().entrySet()) { String name = variable.getKey(); nameToValues.put(name, variable.getValue()); } } for (Entry<String, Collection<String>> entry : nameToValues.asMap().entrySet()) { Variable variable = new Variable(entry.getKey(), entry.getValue()); variablesBuilder.add(variable); } /* * Figure out how much influence each variable has on the measured value. * We sum the measurements taken with each value of each variable. For * variable that have influence on the measurement, the sums will differ * by value. If the variable has little influence, the sums will be similar * to one another and close to the overall average. We take the standard * deviation across each variable's collection of sums. Higher standard * deviation implies higher influence on the measured result. */ double sumOfAllMeasurements = 0; for (ScenarioResult measurement : this.run.getMeasurements().values()) { sumOfAllMeasurements += measurement.getMeasurementSet(type).medianUnits(); } for (Variable variable : variablesBuilder) { int numValues = variable.values.size(); double[] sumForValue = new double[numValues]; for (Entry<Scenario, ScenarioResult> entry : this.run.getMeasurements().entrySet()) { Scenario scenario = entry.getKey(); sumForValue[variable.index(scenario)] += entry.getValue().getMeasurementSet(type).medianUnits(); } double mean = sumOfAllMeasurements / sumForValue.length; double stdDeviationSquared = 0; for (double value : sumForValue) { double distance = value - mean; stdDeviationSquared += distance * distance; } variable.stdDeviation = Math.sqrt(stdDeviationSquared / numValues); } this.variables = new StandardDeviationOrdering().reverse().sortedCopy(variablesBuilder); this.scenarios = new ByVariablesOrdering().sortedCopy(this.run.getMeasurements().keySet()); this.maxValue = max; this.logMinValue = Math.log(min); this.logMaxValue = Math.log(max); EnumMap<MeasurementType, Integer> digitsBeforeDecimalMap = new EnumMap<MeasurementType, Integer>( MeasurementType.class); EnumMap<MeasurementType, Integer> decimalPointMap = new EnumMap<MeasurementType, Integer>( MeasurementType.class); for (MeasurementType measurementType : orderedMeasurementTypes) { double maxForType = 0; double minForType = Double.POSITIVE_INFINITY; for (Entry<Scenario, ScenarioResult> entry : this.run.getMeasurements().entrySet()) { double d = entry.getValue().getMeasurementSet(measurementType).medianUnits(); minForType = Math.min(minForType, d); maxForType = Math.max(maxForType, d); } unitMap.put(measurementType, getUnit(unitMap.get(measurementType), measurementType, minForType)); divideByMap.put(measurementType, (double) getUnits(measurementType).get(unitMap.get(measurementType))); int numDigitsInMin = ceil(Math.log10(minForType)); decimalDigitsMap.put(measurementType, ceil(Math.max(0, ceil(Math.log10(divideByMap.get(measurementType))) + 3 - numDigitsInMin))); digitsBeforeDecimalMap.put(measurementType, Math.max(1, ceil(Math.log10(maxForType / divideByMap.get(measurementType))))); decimalPointMap.put(measurementType, decimalDigitsMap.get(measurementType) > 0 ? 1 : 0); measurementColumnLengthMap.put(measurementType, Math.max(maxForType > 0 ? digitsBeforeDecimalMap.get(measurementType) + decimalPointMap.get(measurementType) + decimalDigitsMap.get(measurementType) : 1, unitMap.get(measurementType).trim().length())); } this.printScore = arguments.printScore(); }