List of usage examples for java.util Deque push
void push(E e);
From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java
private StringBuilder dump(StringBuilder sb) { Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(this); int tabLength = 0; while (!stack.isEmpty()) { ASTNode next = stack.peek();/* w w w .j av a 2 s .c om*/ if (!next.visited) { sb.append(StringUtils.repeat(" ", tabLength * 3)); sb.append(next.toString()); sb.append("\n"); if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack.push((ASTNode) next.children.get(i)); } } tabLength++; next.visited = true; } else { tabLength--; next.visited = false; stack.pop(); } } return sb; }
From source file:net.minecraftforge.common.ForgeHooks.java
@Nullable public static LootTable loadLootTable(Gson gson, ResourceLocation name, String data, boolean custom, LootTableManager lootTableManager) { Deque<LootTableContext> que = lootContext.get(); if (que == null) { que = Queues.newArrayDeque();//from w w w . j av a 2 s. co m lootContext.set(que); } LootTable ret = null; try { que.push(new LootTableContext(name, custom)); ret = gson.fromJson(data, LootTable.class); que.pop(); } catch (JsonParseException e) { que.pop(); throw e; } if (!custom) ret = ForgeEventFactory.loadLootTable(name, ret, lootTableManager); if (ret != null) ret.freeze(); return ret; }
From source file:de.tiqsolutions.hdfs.HadoopFileSystemPath.java
Deque<Path> getPathSegments() { Deque<Path> paths = new ArrayDeque<>(); Path root = getRoot();//ww w . j a v a 2 s. co m Path p = this; while (p != null && !p.equals(root)) { paths.push(p.getFileName()); p = p.getParent(); } return paths; }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArrayIndexExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); JsonNode initialDataNode = jrJsonNode.getDataNode(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + initialDataNode); }//w w w. j ava2s . c o m // populate the stack initially stack.push(jrJsonNode); while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); // process the current stack item if (stackDataNode.isArray()) { if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } if (expression.getIndex() >= 0 && expression.getIndex() < stackDataNode.size()) { JsonNode nodeAtIndex = stackDataNode.get(expression.getIndex()); JRJsonNode child = stackNode.createChild(nodeAtIndex); if (applyFilter(child)) { result.add(child); } } } } return result; }
From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java
private String toStringTree(ASTNode rootNode) { Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(this); while (!stack.isEmpty()) { ASTNode next = stack.peek();//from w w w. j a v a2 s.c om if (!next.visited) { if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) { rootNode.addtoMemoizedString(" "); } next.rootNode = rootNode; next.startIndx = rootNode.getMemoizedStringLen(); // Leaf if (next.children == null || next.children.size() == 0) { String str = next.toString(); rootNode.addtoMemoizedString( next.getType() != HiveParser.StringLiteral ? str.toLowerCase() : str); next.endIndx = rootNode.getMemoizedStringLen(); stack.pop(); continue; } if (!next.isNil()) { rootNode.addtoMemoizedString("("); String str = next.toString(); rootNode.addtoMemoizedString( (next.getType() == HiveParser.StringLiteral || null == str) ? str : str.toLowerCase()); rootNode.addtoMemoizedString(" "); } if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack.push((ASTNode) next.children.get(i)); } } next.visited = true; } else { if (!next.isNil()) { rootNode.addtoMemoizedString(")"); } next.endIndx = rootNode.getMemoizedStringLen(); next.visited = false; stack.pop(); } } return rootNode.getMemoizedSubString(startIndx, endIndx); }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArrayConstructionExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); JsonNode initialDataNode = jrJsonNode.getDataNode(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + initialDataNode); }/*from ww w .j a va2s . co m*/ // populate the stack initially stack.push(jrJsonNode); while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); // process the current stack item if (stackDataNode.isArray()) { if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } ArrayNode newNode = getEvaluationContext().getObjectMapper().createArrayNode(); for (Integer idx : expression.getIndexes()) { if (idx >= 0 && idx < stackDataNode.size()) { JRJsonNode nodeAtIndex = stackNode.createChild(stackDataNode.get(idx)); if (applyFilter(nodeAtIndex)) { newNode.add(nodeAtIndex.getDataNode()); } } } if (newNode.size() > 0) { result.add(stackNode.createChild(newNode)); } } } return result; }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArraySliceExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + jrJsonNode.getDataNode()); }/*from w ww .ja v a 2 s. com*/ // populate the stack initially stack.push(jrJsonNode); while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); // process the current stack item if (stackDataNode.isArray()) { if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } ArrayNode newNode = getEvaluationContext().getObjectMapper().createArrayNode(); Integer start = getSliceStart(stackDataNode.size()); if (start >= stackDataNode.size()) { continue; } Integer end = getSliceEnd(stackDataNode.size()); if (end < 0) { continue; } for (int i = start; i < end; i++) { JRJsonNode nodeAtIndex = stackNode.createChild(stackDataNode.get(i)); if (applyFilter(nodeAtIndex)) { newNode.add(nodeAtIndex.getDataNode()); } } if (newNode.size() > 0) { result.add(stackNode.createChild(newNode)); } } } return result; }
From source file:org.talend.dataprep.transformation.actions.text.Split.java
@Override public void compile(ActionContext context) { super.compile(context); if (context.getActionStatus() == ActionContext.ActionStatus.OK) { if (StringUtils.isEmpty(getSeparator(context))) { LOGGER.warn("Cannot split on an empty separator"); context.setActionStatus(ActionContext.ActionStatus.CANCELED); }//from w w w. j a v a 2 s . com // Create split columns final RowMetadata rowMetadata = context.getRowMetadata(); final String columnId = context.getColumnId(); final ColumnMetadata column = rowMetadata.getById(columnId); final Deque<String> lastColumnId = new ArrayDeque<>(); final Map<String, String> parameters = context.getParameters(); int limit = Integer.parseInt(parameters.get(LIMIT)); final List<String> newColumns = new ArrayList<>(); lastColumnId.push(columnId); for (int i = 0; i < limit; i++) { final int newColumnIndex = i + 1; newColumns.add(context.column(column.getName() + SPLIT_APPENDIX + i, r -> { final ColumnMetadata c = ColumnMetadata.Builder // .column() // .type(Type.STRING) // .computedId(StringUtils.EMPTY) // .name(column.getName() + SPLIT_APPENDIX + newColumnIndex) // .build(); lastColumnId.push(rowMetadata.insertAfter(lastColumnId.pop(), c)); return c; })); } context.get(NEW_COLUMNS_CONTEXT, p -> newColumns); // Save new column names for apply } }
From source file:org.onehippo.cms7.essentials.dashboard.instruction.FileInstruction.java
/** * Recursively creates parent directories in case they don't exist yet * * @param destination starting directory * @throws IOException//from www.j ava2s .co m */ protected void createParentDirectories(final File destination) throws IOException { Deque<String> directories = new ArrayDeque<>(); String parent = destination.getParent(); while (!new File(parent).exists()) { directories.push(parent); parent = new File(parent).getParent(); } processDirectories(directories); Files.createFile(destination.toPath()); }
From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java
private Deque<Point> findConnectedComponent(int[] edgeData, int x, int y) { Deque<Point> points = new LinkedList<Point>(); Deque<Point> queue = new LinkedList<Point>(); edgeData[x + y * width] = WHITE;// ww w .j a v a 2 s .com Point initialPoint = new Point(x, y); points.add(initialPoint); queue.push(initialPoint); while (!queue.isEmpty()) { Point point = queue.removeFirst(); for (int k = 0; k < 8; k++) { int x2 = (int) (point.x + DX[k]); int y2 = (int) (point.y + DY[k]); if (x2 < 0 || y2 < 0 || x2 >= width || y2 >= height) { continue; } if (edgeData[x2 + y2 * width] == BLACK) { edgeData[x2 + y2 * width] = WHITE; Point point2 = new Point(x2, y2); points.add(point2); queue.addLast(point2); } } } return points; }