List of usage examples for java.util Stack peek
public synchronized E peek()
From source file:org.apache.tajo.plan.rewrite.rules.ProjectionPushDownRule.java
public LogicalNode visitJoin(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode node, Stack<LogicalNode> stack) throws TajoException { Context newContext = new Context(context); String joinQualReference = null; if (node.hasJoinQual()) { for (EvalNode eachQual : AlgebraicUtil.toConjunctiveNormalFormArray(node.getJoinQual())) { if (eachQual instanceof BinaryEval) { BinaryEval binaryQual = (BinaryEval) eachQual; for (int i = 0; i < 2; i++) { EvalNode term = binaryQual.getChild(i); pushDownIfComplexTermInJoinCondition(newContext, eachQual, term); }/*from w w w. j a v a2s . com*/ } } joinQualReference = newContext.addExpr(node.getJoinQual()); newContext.addNecessaryReferences(node.getJoinQual()); } String[] referenceNames = null; if (node.hasTargets()) { referenceNames = new String[node.getTargets().size()]; int i = 0; for (Iterator<Target> it = getFilteredTarget(node.getTargets(), context.requiredSet); it.hasNext();) { Target target = it.next(); referenceNames[i++] = newContext.addExpr(target); } } stack.push(node); LogicalNode left = visit(newContext, plan, block, node.getLeftChild(), stack); LogicalNode right = visit(newContext, plan, block, node.getRightChild(), stack); stack.pop(); Schema merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema()); node.setInSchema(merged); if (node.hasJoinQual()) { Target target = context.targetListMgr.getTarget(joinQualReference); if (newContext.targetListMgr.isEvaluated(joinQualReference)) { throw new TajoInternalError( "Join condition must be evaluated in the proper Join Node: " + joinQualReference); } else { node.setJoinQual(target.getEvalTree()); newContext.targetListMgr.markAsEvaluated(target); } } LinkedHashSet<Target> projectedTargets = Sets.newLinkedHashSet(); for (Iterator<String> it = getFilteredReferences(context.targetListMgr.getNames(), context.requiredSet); it .hasNext();) { String referenceName = it.next(); Target target = context.targetListMgr.getTarget(referenceName); if (context.targetListMgr.isEvaluated(referenceName)) { Target fieldReference = new Target(new FieldEval(target.getNamedColumn())); // here, we assume that every exprs are specified at the on clause // because all filters have been moved to appropriate logical nodes during the filter push down phase if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, fieldReference.getEvalTree(), node, stack.peek().getType() != NodeType.JOIN)) { projectedTargets.add(fieldReference); } } else if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, target.getEvalTree(), node, stack.peek().getType() != NodeType.JOIN)) { projectedTargets.add(target); context.targetListMgr.markAsEvaluated(target); } } node.setTargets(new ArrayList<>(projectedTargets)); LogicalPlanner.verifyProjectedFields(block, node); return node; }
From source file:org.apache.tajo.plan.LogicalPlanner.java
@Override public LogicalNode visitJoin(PlanContext context, Stack<Expr> stack, Join join) throws TajoException { // Phase 1: Init LogicalPlan plan = context.plan;// w ww . j av a 2 s . c o m QueryBlock block = context.queryBlock; if (join.hasQual()) { ExprNormalizedResult normalizedResult = normalizer.normalize(context, join.getQual(), true); block.namedExprsMgr.addExpr(normalizedResult.baseExpr); if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) { throw makeSyntaxError("Filter condition cannot include aggregation function"); } } //////////////////////////////////////////////////////// // Visit and Build Child Plan //////////////////////////////////////////////////////// stack.push(join); LogicalNode left = visit(context, stack, join.getLeft()); LogicalNode right = visit(context, stack, join.getRight()); stack.pop(); //////////////////////////////////////////////////////// JoinNode joinNode = context.queryBlock.getNodeFromExpr(join); joinNode.setJoinType(join.getJoinType()); joinNode.setLeftChild(left); joinNode.setRightChild(right); // Set A merged input schema Schema merged; if (join.isNatural()) { merged = getNaturalJoinSchema(left, right); } else { merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema()); } joinNode.setInSchema(merged); // Create EvalNode for a search condition. EvalNode joinCondition = null; if (join.hasQual()) { EvalNode evalNode = exprAnnotator.createEvalNode(context, join.getQual(), NameResolvingMode.LEGACY); joinCondition = context.evalOptimizer.optimize(context, evalNode); } // If the query involves a subquery, the stack can be empty. // In this case, this join is the top most one within a query block. boolean isTopMostJoin = stack.isEmpty() ? true : stack.peek().getType() != OpType.Join; List<String> newlyEvaluatedExprs = getNewlyEvaluatedExprsForJoin(context, joinNode, isTopMostJoin); List<Target> targets = TUtil.newList(PlannerUtil.schemaToTargets(merged)); for (String newAddedExpr : newlyEvaluatedExprs) { targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true)); } joinNode.setTargets(targets.toArray(new Target[targets.size()])); // Determine join conditions if (join.isNatural()) { // if natural join, it should have the equi-join conditions by common column names EvalNode njCond = getNaturalJoinCondition(joinNode); joinNode.setJoinQual(njCond); } else if (join.hasQual()) { // otherwise, the given join conditions are set joinNode.setJoinQual(joinCondition); } return joinNode; }
From source file:org.apache.tajo.engine.planner.rewrite.FilterPushDownRule.java
@Override public LogicalNode visitJoin(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode joinNode, Stack<LogicalNode> stack) throws PlanningException { // here we should stop selection pushdown on the null supplying side(s) of an outer join // get the two operands of the join operation as well as the join type JoinType joinType = joinNode.getJoinType(); EvalNode joinQual = joinNode.getJoinQual(); if (joinQual != null && LogicalPlanner.isOuterJoin(joinType)) { BinaryEval binaryEval = (BinaryEval) joinQual; // if both are fields if (binaryEval.getLeftExpr().getType() == EvalType.FIELD && binaryEval.getRightExpr().getType() == EvalType.FIELD) { String leftTableName = ((FieldEval) binaryEval.getLeftExpr()).getQualifier(); String rightTableName = ((FieldEval) binaryEval.getRightExpr()).getQualifier(); List<String> nullSuppliers = Lists.newArrayList(); Set<String> leftTableSet = Sets .newHashSet(PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getLeftChild())); Set<String> rightTableSet = Sets .newHashSet(PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getRightChild())); // some verification if (joinType == JoinType.FULL_OUTER) { nullSuppliers.add(leftTableName); nullSuppliers.add(rightTableName); // verify that these null suppliers are indeed in the left and right sets if (!rightTableSet.contains(nullSuppliers.get(0)) && !leftTableSet.contains(nullSuppliers.get(0))) { throw new InvalidQueryException("Incorrect Logical Query Plan with regard to outer join"); }/* ww w. j a va 2s.c o m*/ if (!rightTableSet.contains(nullSuppliers.get(1)) && !leftTableSet.contains(nullSuppliers.get(1))) { throw new InvalidQueryException("Incorrect Logical Query Plan with regard to outer join"); } } else if (joinType == JoinType.LEFT_OUTER) { nullSuppliers.add(((RelationNode) joinNode.getRightChild()).getCanonicalName()); //verify that this null supplier is indeed in the right sub-tree if (!rightTableSet.contains(nullSuppliers.get(0))) { throw new InvalidQueryException("Incorrect Logical Query Plan with regard to outer join"); } } else if (joinType == JoinType.RIGHT_OUTER) { if (((RelationNode) joinNode.getRightChild()).getCanonicalName().equals(rightTableName)) { nullSuppliers.add(leftTableName); } else { nullSuppliers.add(rightTableName); } // verify that this null supplier is indeed in the left sub-tree if (!leftTableSet.contains(nullSuppliers.get(0))) { throw new InvalidQueryException("Incorrect Logical Query Plan with regard to outer join"); } } } } // get evals from ON clause List<EvalNode> onConditions = new ArrayList<EvalNode>(); if (joinNode.hasJoinQual()) { onConditions .addAll(Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual()))); } boolean isTopMostJoin = stack.peek().getType() != NodeType.JOIN; List<EvalNode> outerJoinPredicationEvals = new ArrayList<EvalNode>(); List<EvalNode> outerJoinFilterEvalsExcludePredication = new ArrayList<EvalNode>(); if (LogicalPlanner.isOuterJoin(joinNode.getJoinType())) { // TAJO-853 // In the case of top most JOIN, all filters except JOIN condition aren't pushed down. // That filters are processed by SELECTION NODE. Set<String> nullSupplyingTableNameSet; if (joinNode.getJoinType() == JoinType.RIGHT_OUTER) { nullSupplyingTableNameSet = TUtil .newHashSet(PlannerUtil.getRelationLineage(joinNode.getLeftChild())); } else { nullSupplyingTableNameSet = TUtil .newHashSet(PlannerUtil.getRelationLineage(joinNode.getRightChild())); } Set<String> preservedTableNameSet; if (joinNode.getJoinType() == JoinType.RIGHT_OUTER) { preservedTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getRightChild())); } else { preservedTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getLeftChild())); } List<EvalNode> removedFromFilter = new ArrayList<EvalNode>(); for (EvalNode eachEval : context.pushingDownFilters) { if (EvalTreeUtil.isJoinQual(block, eachEval, true)) { outerJoinPredicationEvals.add(eachEval); removedFromFilter.add(eachEval); } else { Set<Column> columns = EvalTreeUtil.findUniqueColumns(eachEval); boolean canPushDown = true; for (Column eachColumn : columns) { if (nullSupplyingTableNameSet.contains(eachColumn.getQualifier())) { canPushDown = false; break; } } if (!canPushDown) { outerJoinFilterEvalsExcludePredication.add(eachEval); removedFromFilter.add(eachEval); } } } context.pushingDownFilters.removeAll(removedFromFilter); for (EvalNode eachOnEval : onConditions) { if (EvalTreeUtil.isJoinQual(eachOnEval, true)) { // If join condition, processing in the JoinNode. outerJoinPredicationEvals.add(eachOnEval); } else { // If Eval has a column which belong to Preserved Row table, not using to push down but using JoinCondition Set<Column> columns = EvalTreeUtil.findUniqueColumns(eachOnEval); boolean canPushDown = true; for (Column eachColumn : columns) { if (preservedTableNameSet.contains(eachColumn.getQualifier())) { canPushDown = false; break; } } if (canPushDown) { context.pushingDownFilters.add(eachOnEval); } else { outerJoinPredicationEvals.add(eachOnEval); } } } } else { context.pushingDownFilters.addAll(onConditions); } LogicalNode left = joinNode.getLeftChild(); LogicalNode right = joinNode.getRightChild(); List<EvalNode> notMatched = new ArrayList<EvalNode>(); // Join's input schema = right child output columns + left child output columns Map<EvalNode, EvalNode> transformedMap = findCanPushdownAndTransform(context, block, joinNode, left, notMatched, null, true, 0); context.setFiltersTobePushed(transformedMap.keySet()); visit(context, plan, block, left, stack); context.setToOrigin(transformedMap); context.addFiltersTobePushed(notMatched); notMatched.clear(); transformedMap = findCanPushdownAndTransform(context, block, joinNode, right, notMatched, null, true, left.getOutSchema().size()); context.setFiltersTobePushed(new HashSet<EvalNode>(transformedMap.keySet())); visit(context, plan, block, right, stack); context.setToOrigin(transformedMap); context.addFiltersTobePushed(notMatched); notMatched.clear(); List<EvalNode> matched = Lists.newArrayList(); if (LogicalPlanner.isOuterJoin(joinNode.getJoinType())) { matched.addAll(outerJoinPredicationEvals); } else { for (EvalNode eval : context.pushingDownFilters) { if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, eval, joinNode, isTopMostJoin)) { matched.add(eval); } } } EvalNode qual = null; if (matched.size() > 1) { // merged into one eval tree qual = AlgebraicUtil.createSingletonExprFromCNF(matched.toArray(new EvalNode[matched.size()])); } else if (matched.size() == 1) { // if the number of matched expr is one qual = matched.get(0); } if (qual != null) { joinNode.setJoinQual(qual); if (joinNode.getJoinType() == JoinType.CROSS) { joinNode.setJoinType(JoinType.INNER); } context.pushingDownFilters.removeAll(matched); } context.pushingDownFilters.addAll(outerJoinFilterEvalsExcludePredication); return joinNode; }
From source file:org.sakaiproject.message.impl.BaseMessageService.java
/** * {@inheritDoc}//from w w w . j a v a 2 s . com */ public String archive(String siteId, Document doc, Stack stack, String archivePath, List attachments) { // prepare the buffer for the results log StringBuilder results = new StringBuilder(); // start with an element with our very own (service) name Element element = doc.createElement(serviceName()); ((Element) stack.peek()).appendChild(element); stack.push(element); // get the channel associated with this site String channelRef = channelReference(siteId, SiteService.MAIN_CONTAINER); results.append("archiving " + getLabel() + " channel " + channelRef + ".\n"); try { // do the channel MessageChannel channel = getChannel(channelRef); Element containerElement = channel.toXml(doc, stack); stack.push(containerElement); // do the messages in the channel Iterator messages = channel.getMessages(null, true).iterator(); while (messages.hasNext()) { Message msg = (Message) messages.next(); msg.toXml(doc, stack); // collect message attachments MessageHeader header = msg.getHeader(); List atts = header.getAttachments(); for (int i = 0; i < atts.size(); i++) { Reference ref = (Reference) atts.get(i); // if it's in the attachment area, and not already in the list if ((ref.getReference().startsWith("/content/attachment/")) && (!attachments.contains(ref))) { attachments.add(ref); } } } // archive the synoptic tool options archiveSynopticOptions(siteId, doc, element); stack.pop(); } catch (Exception any) { M_log.warn("archve: exception archiving messages for service: " + serviceName() + " channel: " + channelRef); } stack.pop(); return results.toString(); }
From source file:org.sakaiproject.assignment.impl.AssignmentServiceImpl.java
@Override public String archive(String siteId, Document doc, Stack<Element> stack, String archivePath, List<Reference> attachments) { String message = "archiving " + getLabel() + " context " + Entity.SEPARATOR + siteId + Entity.SEPARATOR + SiteService.MAIN_CONTAINER + ".\n"; log.debug(message);/*from w w w . j a va2 s.c o m*/ // start with an element with our very own (service) name Element element = doc.createElement(AssignmentService.class.getName()); stack.peek().appendChild(element); stack.push(element); Collection<Assignment> assignments = getAssignmentsForContext(siteId); for (Assignment assignment : assignments) { String xml = assignmentRepository.toXML(assignment); try { InputSource in = new InputSource(new StringReader(xml)); Document assignmentDocument = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); Element assignmentElement = assignmentDocument.getDocumentElement(); Node assignmentNode = doc.importNode(assignmentElement, true); element.appendChild(assignmentNode); } catch (Exception e) { log.warn("could not append assignment {} to archive, {}", assignment.getId(), e.getMessage()); } } stack.pop(); return message; }
From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSpreaded.java
protected long _reorganizeSpace(Connection con, SequencerEngine seq, FxTreeMode sourceMode, FxTreeMode destMode, long nodeId, boolean includeNodeId, BigInteger overrideSpacing, BigInteger overrideLeft, FxTreeNodeInfo insertParent, int insertPosition, BigInteger insertSpace, BigInteger insertBoundaries[], int depthDelta, Long destinationNode, boolean createMode, boolean createKeepIds, boolean disableSpaceOptimization) throws FxTreeException { long firstCreatedNodeId = -1; FxTreeNodeInfoSpreaded nodeInfo;//from w w w .j a v a 2 s . co m try { nodeInfo = (FxTreeNodeInfoSpreaded) getTreeNodeInfo(con, sourceMode, nodeId); } catch (Exception e) { return -1; } if (!nodeInfo.isSpaceOptimizable() && !disableSpaceOptimization) { // The Root node and cant be optimize any more ... so all we can do is fail :-/ // This should never really happen if (nodeId == ROOT_NODE) { return -1; } //System.out.println("### UP we go, depthDelta=" + depthDelta); return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId, overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries, depthDelta, destinationNode, createMode, createKeepIds, false); } BigInteger spacing = nodeInfo.getDefaultSpacing(); if (overrideSpacing != null && (overrideSpacing.compareTo(spacing) < 0 || overrideLeft != null)) { // override spacing unless it is greater OR overrideLeft is specified (in that case we // have to use the spacing for valid tree ranges) spacing = overrideSpacing; } else { if (spacing.compareTo(GO_UP) < 0 && !createMode && !disableSpaceOptimization) { return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId, overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries, depthDelta, destinationNode, createMode, createKeepIds, false); } } if (insertBoundaries != null && insertPosition == -1) { insertPosition = 0; // insertPosition cannot be negative } Statement stmt = null; PreparedStatement ps = null; ResultSet rs; BigInteger left = overrideLeft == null ? nodeInfo.getLeft() : overrideLeft; BigInteger right = null; String includeNode = includeNodeId ? "=" : ""; long counter = 0; long newId = -1; try { final long start = System.currentTimeMillis(); String createProps = createMode ? ",PARENT,REF,NAME,TEMPLATE" : ""; String sql = " SELECT ID," + StorageManager.getIfFunction( // compute total child count only when the node has children "CHILDCOUNT = 0", "0", "(SELECT COUNT(*) FROM " + getTable(sourceMode) + " WHERE LFT > NODE.LFT AND RGT < NODE.RGT)") + // 3 4 5 6 ", CHILDCOUNT, LFT AS LFTORD,RGT,DEPTH" + createProps + " FROM (SELECT ID,CHILDCOUNT,LFT,RGT,DEPTH" + createProps + " FROM " + getTable(sourceMode) + " WHERE " + "LFT>" + includeNode + nodeInfo.getLeft() + " AND LFT<" + includeNode + nodeInfo.getRight() + ") NODE " + "ORDER BY LFTORD ASC"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); if (createMode) { // 1 2 3 4 5 6 7 8 ps = con.prepareStatement( "INSERT INTO " + getTable(destMode) + " (ID,PARENT,DEPTH,DIRTY,REF,TEMPLATE,LFT,RGT," + //9 10 11 "CHILDCOUNT,NAME,MODIFIED_AT) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)"); } else { ps = con.prepareStatement("UPDATE " + getTable(sourceMode) + " SET LFT=?,RGT=?,DEPTH=? WHERE ID=?"); } long id; int total_childs; int direct_childs; BigInteger nextLeft; int lastDepth = nodeInfo.getDepth() + (includeNodeId ? 0 : 1); int depth; BigInteger _rgt; BigInteger _lft; Long ref = null; String data = null; String name = ""; Stack<Long> currentParent = null; if (createMode) { currentParent = new Stack<Long>(); currentParent.push(destinationNode); } //System.out.println("Spacing:"+SPACING); while (rs.next()) { //System.out.println("------------------"); id = rs.getLong(1); total_childs = rs.getInt(2); direct_childs = rs.getInt(3); _lft = getNodeBounds(rs, 4); _rgt = getNodeBounds(rs, 5); depth = rs.getInt(6); if (createMode) { // Reading these properties is slow, only do it when needed ref = rs.getLong(8); if (rs.wasNull()) ref = null; name = rs.getString(9); data = rs.getString(10); if (rs.wasNull()) data = null; } left = left.add(spacing).add(BigInteger.ONE); // Handle depth differences if (lastDepth - depth > 0) { BigInteger depthDifference = spacing.add(BigInteger.ONE); left = left.add(depthDifference.multiply(BigInteger.valueOf(lastDepth - depth))); } if (createMode) { if (lastDepth < depth) { currentParent.push(newId); } else if (lastDepth > depth) { for (int p = 0; p < (lastDepth - depth); p++) currentParent.pop(); } } right = left.add(spacing).add(BigInteger.ONE); // add child space if needed if (total_childs > 0) { BigInteger childSpace = spacing.multiply(BigInteger.valueOf(total_childs * 2)); childSpace = childSpace.add(BigInteger.valueOf((total_childs * 2) - 1)); right = right.add(childSpace); nextLeft = left; } else { nextLeft = right; } if (insertBoundaries != null) { // insert gap at requested position // If we're past the gap, keep adding the insert space to left/right because the added // space is never "injected" into the loop, i.e. without adding it the left/right boundaries of // nodes after the gap would be too far to the left. if (_lft.compareTo(insertBoundaries[0]) > 0) { left = left.add(insertSpace); } if (_rgt.compareTo(insertBoundaries[0]) > 0) { right = right.add(insertSpace); } } // sanity checks if (left.compareTo(right) >= 0) { throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right, "left greater than right"); } if (insertParent != null && right.compareTo((BigInteger) insertParent.getRight()) > 0) { throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right, "wrote past parent node bounds"); } // Update the node if (createMode) { newId = createKeepIds ? id : seq.getId(destMode.getSequencer()); if (firstCreatedNodeId == -1) firstCreatedNodeId = newId; // Create the main entry ps.setLong(1, newId); ps.setLong(2, currentParent.peek()); ps.setLong(3, depth + depthDelta); ps.setBoolean(4, destMode != FxTreeMode.Live); //only flag non-live tree's dirty if (ref == null) { ps.setNull(5, java.sql.Types.NUMERIC); } else { ps.setLong(5, ref); } if (data == null) { ps.setNull(6, java.sql.Types.VARCHAR); } else { ps.setString(6, data); } // System.out.println("=> id:"+newId+" left:"+left+" right:"+right); setNodeBounds(ps, 7, left); setNodeBounds(ps, 8, right); ps.setInt(9, direct_childs); ps.setString(10, name); ps.setLong(11, System.currentTimeMillis()); ps.addBatch(); } else { setNodeBounds(ps, 1, left); setNodeBounds(ps, 2, right); ps.setInt(3, depth + depthDelta); ps.setLong(4, id); ps.addBatch(); // ps.executeBatch(); // ps.clearBatch(); } // Prepare variables for the next node left = nextLeft; lastDepth = depth; counter++; // Execute batch every 10000 items to avoid out of memory if (counter % 10000 == 0) { ps.executeBatch(); ps.clearBatch(); } } rs.close(); stmt.close(); stmt = null; ps.executeBatch(); if (LOG.isDebugEnabled()) { final long time = System.currentTimeMillis() - start; LOG.debug("Tree reorganization of " + counter + " items completed in " + time + " ms (spaceLen=" + spacing + ")"); } return firstCreatedNodeId; } catch (FxApplicationException e) { throw e instanceof FxTreeException ? (FxTreeException) e : new FxTreeException(e); } catch (SQLException e) { String next = ""; if (e.getNextException() != null) next = " next:" + e.getNextException().getMessage(); if (StorageManager.isDuplicateKeyViolation(e)) throw new FxTreeException(LOG, e, "ex.tree.reorganize.duplicateKey"); throw new FxTreeException(LOG, e, "ex.tree.reorganize.failed", counter, left, right, e.getMessage() + next); } catch (Exception e) { throw new FxTreeException(e); } finally { try { if (stmt != null) stmt.close(); } catch (Throwable t) { /*ignore*/} try { if (ps != null) ps.close(); } catch (Throwable t) { /*ignore*/} } }
From source file:org.jsweet.transpiler.Java2TypeScriptTranslator.java
private void printBlockStatements(List<JCStatement> statements) { for (JCStatement statement : statements) { if (context.options.isDebugMode()) { JCMethodDecl methodDecl = getParent(JCMethodDecl.class); if (isDebugMode(methodDecl)) { int s = statement.getStartPosition(); int e = statement.getEndPosition(diagnosticSource.getEndPosTable()); if (e == -1) { e = s;//from w w w . ja v a 2 s .c o m } printIndent().print("yield { row: ").print("" + diagnosticSource.getLineNumber(s)) .print(", column: " + diagnosticSource.getColumnNumber(s, false)) .print(", statement: \""); print(StringEscapeUtils.escapeJson(statement.toString())).print("\""); final Stack<List<String>> locals = new Stack<>(); try { new TreeScanner() { public void scan(JCTree tree) { if (tree == statement) { throw new RuntimeException(); } boolean contextChange = false; if (tree instanceof JCBlock || tree instanceof JCEnhancedForLoop || tree instanceof JCLambda || tree instanceof JCForLoop || tree instanceof JCDoWhileLoop) { locals.push(new ArrayList<>()); contextChange = true; } if (tree instanceof JCVariableDecl) { locals.peek().add(((JCVariableDecl) tree).name.toString()); } super.scan(tree); if (contextChange) { locals.pop(); } } }.scan(methodDecl.body); } catch (Exception end) { // swallow } List<String> accessibleLocals = new ArrayList<>(); for (List<String> l : locals) { accessibleLocals.addAll(l); } if (!accessibleLocals.isEmpty()) { print(", locals: "); print("{"); for (String local : accessibleLocals) { print("" + local + ": " + local + ", "); } removeLastChars(2); print("}"); } print(" };").println(); } } printBlockStatement(statement); } }
From source file:org.lambdamatic.analyzer.ast.LambdaExpressionReader.java
/** * Reads the current {@link InsnNode} instruction and returns a {@link Statement} or {@code null} * if the instruction is not a full statement (in that case, the instruction is stored in the * given Expression {@link Stack}).//w ww .j av a 2 s .com * * @param insnNode the instruction to read * @param expressionStack the expression stack to put on or pop from. * @param localVariables the local variables * @return a {@link List} of {@link Statement} or empty list if no {@link Statement} was created * after reading the current instruction. * @see <a href="https://en.wikipedia.org/wiki/Java_bytecode_instruction_listings">Java bytcode * instruction listings on Wikipedia</a> */ private List<Statement> readInstruction(final InsnCursor insnCursor, final Stack<Expression> expressionStack, final List<CapturedArgument> capturedArguments, final LocalVariables localVariables) { final List<Statement> statements = new ArrayList<>(); final AbstractInsnNode insnNode = insnCursor.getCurrent(); switch (insnNode.getOpcode()) { // return a reference from a method case Opcodes.ARETURN: // return an integer from a method case Opcodes.IRETURN: statements.add(new ReturnStatement(expressionStack.pop())); break; // return void from method case Opcodes.RETURN: // wrap all pending expressions into ExpressionStatements while (!expressionStack.isEmpty()) { final Expression pendingExpression = expressionStack.pop(); statements.add(new ExpressionStatement(pendingExpression)); } break; // push a null reference onto the stack case Opcodes.ACONST_NULL: expressionStack.add(new NullLiteral()); break; // load the int value 0 onto the stack case Opcodes.ICONST_0: // applies for byte, short, int and boolean expressionStack.add(new NumberLiteral(0)); break; // load the int value 1 onto the stack case Opcodes.ICONST_1: // applies for byte, short, int and boolean expressionStack.add(new NumberLiteral(1)); break; // load the int value 2 onto the stack case Opcodes.ICONST_2: expressionStack.add(new NumberLiteral(2)); break; // load the int value 3 onto the stack case Opcodes.ICONST_3: expressionStack.add(new NumberLiteral(3)); break; // load the int value 4 onto the stack case Opcodes.ICONST_4: expressionStack.add(new NumberLiteral(4)); break; // load the int value 5 onto the stack case Opcodes.ICONST_5: expressionStack.add(new NumberLiteral(5)); break; // push the long 0 onto the stack case Opcodes.LCONST_0: expressionStack.add(new NumberLiteral(0L)); break; // push the long 1 onto the stack case Opcodes.LCONST_1: expressionStack.add(new NumberLiteral(1L)); break; // push the 0.0f onto the stack case Opcodes.FCONST_0: expressionStack.add(new NumberLiteral(0f)); break; // push the 1.0f onto the stack case Opcodes.FCONST_1: expressionStack.add(new NumberLiteral(1f)); break; // push the 2.0f onto the stack case Opcodes.FCONST_2: expressionStack.add(new NumberLiteral(2f)); break; // push the constant 0.0 onto the stack case Opcodes.DCONST_0: expressionStack.add(new NumberLiteral(0d)); break; // push the constant 1.0 onto the stack case Opcodes.DCONST_1: expressionStack.add(new NumberLiteral(1d)); break; // compare two longs values case Opcodes.LCMP: // compare two doubles case Opcodes.DCMPL: // compare two doubles case Opcodes.DCMPG: // compare two floats case Opcodes.FCMPL: // compare two floats case Opcodes.FCMPG: statements.addAll( readJumpInstruction(insnCursor.next(), expressionStack, capturedArguments, localVariables)); break; // add 2 ints case Opcodes.IADD: expressionStack.add(readOperation(Operator.ADD, expressionStack)); break; // int subtract case Opcodes.ISUB: expressionStack.add(readOperation(Operator.SUBTRACT, expressionStack)); break; // multiply 2 integers case Opcodes.IMUL: expressionStack.add(readOperation(Operator.MULTIPLY, expressionStack)); break; // divide 2 integers case Opcodes.IDIV: expressionStack.add(readOperation(Operator.DIVIDE, expressionStack)); break; // negate int case Opcodes.INEG: expressionStack.add(inverseInteger(expressionStack)); break; // discard the top value on the stack case Opcodes.POP: statements.add(new ExpressionStatement(expressionStack.pop())); break; // duplicate the value on top of the stack case Opcodes.DUP: expressionStack.push(expressionStack.peek()); break; // insert a copy of the top value into the stack two values from the top. case Opcodes.DUP_X1: expressionStack.add(expressionStack.size() - 2, expressionStack.peek()); break; // store into a reference in an array case Opcodes.AASTORE: readArrayStoreInstruction(insnNode, expressionStack); break; // converts Float to Double -> ignored. case Opcodes.F2D: break; default: throw new AnalyzeException( "Bytecode instruction with OpCode '" + insnNode.getOpcode() + "' is not supported."); } return statements; }
From source file:org.apache.tajo.engine.planner.LogicalPlanner.java
private LogicalNode insertWindowAggNode(PlanContext context, LogicalNode child, Stack<Expr> stack, String[] referenceNames, ExprNormalizer.WindowSpecReferences[] windowSpecReferenceses) throws PlanningException { LogicalPlan plan = context.plan;/* ww w .j av a2s. com*/ QueryBlock block = context.queryBlock; WindowAggNode windowAggNode = context.plan.createNode(WindowAggNode.class); if (child.getType() == NodeType.LIMIT) { LimitNode limitNode = (LimitNode) child; windowAggNode.setChild(limitNode.getChild()); windowAggNode.setInSchema(limitNode.getChild().getOutSchema()); limitNode.setChild(windowAggNode); } else if (child.getType() == NodeType.SORT) { SortNode sortNode = (SortNode) child; windowAggNode.setChild(sortNode.getChild()); windowAggNode.setInSchema(sortNode.getChild().getOutSchema()); sortNode.setChild(windowAggNode); } else { windowAggNode.setChild(child); windowAggNode.setInSchema(child.getOutSchema()); } List<String> winFuncRefs = new ArrayList<String>(); List<WindowFunctionEval> winFuncs = new ArrayList<WindowFunctionEval>(); List<WindowSpec> rawWindowSpecs = Lists.newArrayList(); for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) { NamedExpr rawTarget = it.next(); try { EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(), NameResolvingMode.SUBEXPRS_AND_RELS); if (evalNode.getType() == EvalType.WINDOW_FUNCTION) { winFuncRefs.add(rawTarget.getAlias()); winFuncs.add((WindowFunctionEval) evalNode); block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode); // TODO - Later, we also consider the possibility that a window function contains only a window name. rawWindowSpecs.add(((WindowFunctionExpr) (rawTarget.getExpr())).getWindowSpec()); } } catch (VerifyException ve) { } } // we only consider one window definition. if (windowSpecReferenceses[0].hasPartitionKeys()) { Column[] partitionKeyColumns = new Column[windowSpecReferenceses[0].getPartitionKeys().length]; int i = 0; for (String partitionKey : windowSpecReferenceses[0].getPartitionKeys()) { if (block.namedExprsMgr.isEvaluated(partitionKey)) { partitionKeyColumns[i++] = block.namedExprsMgr.getTarget(partitionKey).getNamedColumn(); } else { throw new PlanningException("Each grouping column expression must be a scalar expression."); } } windowAggNode.setPartitionKeys(partitionKeyColumns); } SortSpec[][] sortGroups = new SortSpec[rawWindowSpecs.size()][]; for (int winSpecIdx = 0; winSpecIdx < rawWindowSpecs.size(); winSpecIdx++) { WindowSpec spec = rawWindowSpecs.get(winSpecIdx); if (spec.hasOrderBy()) { Sort.SortSpec[] sortSpecs = spec.getSortSpecs(); int sortNum = sortSpecs.length; String[] sortKeyRefNames = windowSpecReferenceses[winSpecIdx].getOrderKeys(); SortSpec[] annotatedSortSpecs = new SortSpec[sortNum]; Column column; for (int i = 0; i < sortNum; i++) { if (block.namedExprsMgr.isEvaluated(sortKeyRefNames[i])) { column = block.namedExprsMgr.getTarget(sortKeyRefNames[i]).getNamedColumn(); } else { throw new IllegalStateException("Unexpected State: " + TUtil.arrayToString(sortSpecs)); } annotatedSortSpecs[i] = new SortSpec(column, sortSpecs[i].isAscending(), sortSpecs[i].isNullFirst()); } sortGroups[winSpecIdx] = annotatedSortSpecs; } else { sortGroups[winSpecIdx] = null; } } for (int i = 0; i < winFuncRefs.size(); i++) { WindowFunctionEval winFunc = winFuncs.get(i); if (sortGroups[i] != null) { winFunc.setSortSpecs(sortGroups[i]); } } Target[] targets = new Target[referenceNames.length]; List<Integer> windowFuncIndices = Lists.newArrayList(); Projection projection = (Projection) stack.peek(); int windowFuncIdx = 0; for (NamedExpr expr : projection.getNamedExprs()) { if (expr.getExpr().getType() == OpType.WindowFunction) { windowFuncIndices.add(windowFuncIdx); } windowFuncIdx++; } windowAggNode.setWindowFunctions(winFuncs.toArray(new WindowFunctionEval[winFuncs.size()])); int targetIdx = 0; for (int i = 0; i < referenceNames.length; i++) { if (!windowFuncIndices.contains(i)) { if (block.isConstReference(referenceNames[i])) { targets[targetIdx++] = new Target(block.getConstByReference(referenceNames[i]), referenceNames[i]); } else { targets[targetIdx++] = block.namedExprsMgr.getTarget(referenceNames[i]); } } } for (int i = 0; i < winFuncRefs.size(); i++) { targets[targetIdx++] = block.namedExprsMgr.getTarget(winFuncRefs.get(i)); } windowAggNode.setTargets(targets); verifyProjectedFields(block, windowAggNode); block.registerNode(windowAggNode); postHook(context, stack, null, windowAggNode); if (child.getType() == NodeType.LIMIT) { LimitNode limitNode = (LimitNode) child; limitNode.setInSchema(windowAggNode.getOutSchema()); limitNode.setOutSchema(windowAggNode.getOutSchema()); return null; } else if (child.getType() == NodeType.SORT) { SortNode sortNode = (SortNode) child; sortNode.setInSchema(windowAggNode.getOutSchema()); sortNode.setOutSchema(windowAggNode.getOutSchema()); return null; } else { return windowAggNode; } }