List of usage examples for java.util Stack peek
public synchronized E peek()
From source file:fr.paris.lutece.plugins.upload.web.UploadJspBean.java
/** * Deletes a directory recursively./*from w w w. ja v a2 s. co m*/ * * @param directory The directory to delete */ private static void deleteDirectory(File directory) { // We use a Stack (LIFO) to keep track of the directories to delete Stack<File> dirsToDelete = new Stack<File>(); // The stack is initialized with the main directory dirsToDelete.push(directory); // Loop until all directories have been deleted while (!dirsToDelete.empty()) { // Look at the directory on top of the stack (don't remove it!) File currentDir = (File) dirsToDelete.peek(); // Are there any subdirectories? File[] subDirs = currentDir.listFiles(dirFilter); if (subDirs.length > 0) { // If so, add them to the stack for (int i = 0; i < subDirs.length; i++) { dirsToDelete.push(subDirs[i]); } } else { // If not, delete all files in the directory File[] files = currentDir.listFiles(fileFilter); for (int i = 0; i < files.length; i++) { files[i].delete(); } // Then delete the directory currentDir.delete(); // Then remove the directory from the stack dirsToDelete.pop(); } } }
From source file:org.apache.hadoop.hbase.filter.ParseFilter.java
/** * Pops an argument from the operator stack and the number of arguments required by the operator * from the filterStack and evaluates them * <p>// w w w. ja va 2s .co m * @param operatorStack the stack containing the operators * @param filterStack the stack containing the filters * @return the evaluated filter */ public static Filter popArguments(Stack<ByteBuffer> operatorStack, Stack<Filter> filterStack) { ByteBuffer argumentOnTopOfStack = operatorStack.peek(); if (argumentOnTopOfStack.equals(ParseConstants.OR_BUFFER)) { // The top of the stack is an OR try { ArrayList<Filter> listOfFilters = new ArrayList<Filter>(); while (!operatorStack.empty() && operatorStack.peek().equals(ParseConstants.OR_BUFFER)) { Filter filter = filterStack.pop(); listOfFilters.add(0, filter); operatorStack.pop(); } Filter filter = filterStack.pop(); listOfFilters.add(0, filter); Filter orFilter = new FilterList(FilterList.Operator.MUST_PASS_ONE, listOfFilters); return orFilter; } catch (EmptyStackException e) { throw new IllegalArgumentException("Incorrect input string - an OR needs two filters"); } } else if (argumentOnTopOfStack.equals(ParseConstants.AND_BUFFER)) { // The top of the stack is an AND try { ArrayList<Filter> listOfFilters = new ArrayList<Filter>(); while (!operatorStack.empty() && operatorStack.peek().equals(ParseConstants.AND_BUFFER)) { Filter filter = filterStack.pop(); listOfFilters.add(0, filter); operatorStack.pop(); } Filter filter = filterStack.pop(); listOfFilters.add(0, filter); Filter andFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, listOfFilters); return andFilter; } catch (EmptyStackException e) { throw new IllegalArgumentException("Incorrect input string - an AND needs two filters"); } } else if (argumentOnTopOfStack.equals(ParseConstants.SKIP_BUFFER)) { // The top of the stack is a SKIP try { Filter wrappedFilter = filterStack.pop(); Filter skipFilter = new SkipFilter(wrappedFilter); operatorStack.pop(); return skipFilter; } catch (EmptyStackException e) { throw new IllegalArgumentException("Incorrect input string - a SKIP wraps a filter"); } } else if (argumentOnTopOfStack.equals(ParseConstants.WHILE_BUFFER)) { // The top of the stack is a WHILE try { Filter wrappedFilter = filterStack.pop(); Filter whileMatchFilter = new WhileMatchFilter(wrappedFilter); operatorStack.pop(); return whileMatchFilter; } catch (EmptyStackException e) { throw new IllegalArgumentException("Incorrect input string - a WHILE wraps a filter"); } } else if (argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) { // The top of the stack is a LPAREN try { Filter filter = filterStack.pop(); operatorStack.pop(); return filter; } catch (EmptyStackException e) { throw new IllegalArgumentException("Incorrect Filter String"); } } else { throw new IllegalArgumentException("Incorrect arguments on operatorStack"); } }
From source file:org.runnerup.workout.WorkoutSerializer.java
public static JSONObject createJSON(Workout workout) throws JSONException { Stack<jsonstep> stepStack = new Stack<jsonstep>(); ArrayList<jsonstep> stepList = new ArrayList<jsonstep>(); int no = 1;/*from w w w .j ava 2 s.com*/ int group = 1; Workout.StepListEntry prev = null; for (Workout.StepListEntry e : workout.getStepList()) { jsonstep s = new jsonstep(); s.step = e.step; s.order = no++; if (e.parent != null) { while (e.parent != stepStack.peek().step) { stepStack.pop(); group = stepStack.peek().group; } s.parentGroup = stepStack.peek().group; s.parentStep = (RepeatStep) stepStack.peek().step; } if (e.step instanceof RepeatStep) { group++; stepStack.push(s); } if (e.parent == null && prev != null && prev.parent != null) { group++; } s.group = group; stepList.add(s); prev = e; } JSONArray steps = new JSONArray(); for (jsonstep s : stepList) { JSONObject obj = toJSON(s.step); obj.put("stepOrder", s.order); obj.put("groupId", s.group); if (s.parentGroup != null) { obj.put("parentGroupId", s.parentGroup.intValue()); } steps.put(obj); } JSONObject obj = new JSONObject(); obj.put("workoutSteps", steps); JSONObject ret = new JSONObject(); ret.put("com.garmin.connect.workout.json.UserWorkoutJson", obj); return ret; }
From source file:org.apache.hadoop.util.ConfTest.java
private static List<NodeInfo> parseConf(InputStream in) throws XMLStreamException { QName configuration = new QName("configuration"); QName property = new QName("property"); List<NodeInfo> nodes = new ArrayList<NodeInfo>(); Stack<NodeInfo> parsed = new Stack<NodeInfo>(); XMLInputFactory factory = XMLInputFactory.newInstance(); XMLEventReader reader = factory.createXMLEventReader(in); while (reader.hasNext()) { XMLEvent event = reader.nextEvent(); if (event.isStartElement()) { StartElement currentElement = event.asStartElement(); NodeInfo currentNode = new NodeInfo(currentElement); if (parsed.isEmpty()) { if (!currentElement.getName().equals(configuration)) { return null; }//w w w . j ava 2 s . co m } else { NodeInfo parentNode = parsed.peek(); QName parentName = parentNode.getStartElement().getName(); if (parentName.equals(configuration) && currentNode.getStartElement().getName().equals(property)) { @SuppressWarnings("unchecked") Iterator<Attribute> it = currentElement.getAttributes(); while (it.hasNext()) { currentNode.addAttribute(it.next()); } } else if (parentName.equals(property)) { parentNode.addElement(currentElement); } } parsed.push(currentNode); } else if (event.isEndElement()) { NodeInfo node = parsed.pop(); if (parsed.size() == 1) { nodes.add(node); } } else if (event.isCharacters()) { if (2 < parsed.size()) { NodeInfo parentNode = parsed.pop(); StartElement parentElement = parentNode.getStartElement(); NodeInfo grandparentNode = parsed.peek(); if (grandparentNode.getElement(parentElement) == null) { grandparentNode.setElement(parentElement, event.asCharacters()); } parsed.push(parentNode); } } } return nodes; }
From source file:org.springframework.statemachine.config.AbstractStateMachineFactory.java
private static <S, E> Collection<StateData<S, E>> popSameParents(Stack<StateData<S, E>> stack) { Collection<StateData<S, E>> data = new ArrayList<StateData<S, E>>(); Object parent = null;//from ww w. j a va2 s . c o m if (!stack.isEmpty()) { parent = stack.peek().getParent(); } while (!stack.isEmpty() && ObjectUtils.nullSafeEquals(parent, stack.peek().getParent())) { data.add(stack.pop()); } return data; }
From source file:com.aurel.track.admin.customize.category.filter.tree.design.TreeFilterSaverBL.java
/** * Transform a list of QueryExpressions with parenthesis into a tree * @param expressionList/* w ww.j av a2s . c o m*/ * @param node * @param operationStack */ public static QNode transformExpressionListToTree(List<FieldExpressionInTreeTO> expressionList, Stack<QNode> operationStack) throws Exception { if (expressionList == null || expressionList.isEmpty()) { return null; } QNode root = new QNode(); root.setType(QNode.AND); operationStack.push(root); if (expressionList != null) { Iterator<FieldExpressionInTreeTO> iterator = expressionList.iterator(); boolean first = true; while (iterator.hasNext()) { FieldExpressionInTreeTO fieldExpressionInTree = iterator.next(); if (operationStack.isEmpty()) { throw new Exception("admin.customize.queryFilter.err.closedGtOpened"); } QNode peekNode = operationStack.peek(); if (!first) { //the first operation (the hidden one) is not significant Integer operation = fieldExpressionInTree.getSelectedOperation(); if (operation != null) { if (peekNode.isTypeAlreadySet()) { if (!equalOperation(peekNode, operation)) { throw new Exception( "admin.customize.queryFilter.err.differentOperationsInParenthesis"); } } else { //in the outermost level the second filter expression sets the operation, //inside internal parenthesis the first one setOperation(peekNode, operation.intValue()); peekNode.setTypeAlreadySet(true); } } } else { first = false; if (!iterator.hasNext()) { //it can be also AND, it doesn't have importance because it is a single expression peekNode.setType(QNode.OR); } } int leftParenthesis = fieldExpressionInTree.getParenthesisOpen(); for (int i = 0; i < leftParenthesis; i++) { //unknown node type (AND or OR) QNode qNode = new QNode(); peekNode.addChild(qNode); operationStack.push(qNode); peekNode = operationStack.peek(); } peekNode.addChild(new QNodeExpression(fieldExpressionInTree)); int rightParenthesis = fieldExpressionInTree.getParenthesisClosed(); if (rightParenthesis > 0) { for (int i = 0; i < rightParenthesis; i++) { if (operationStack.isEmpty()) { throw new Exception("admin.customize.queryFilter.err.closedGtOpened"); } operationStack.pop(); } } } //pop the root if (operationStack.isEmpty()) { throw new Exception("admin.customize.queryFilter.err.closedGtOpened"); } operationStack.pop(); if (!operationStack.isEmpty()) { throw new Exception("admin.customize.queryFilter.err.closedLtOpened"); } } return root; }
From source file:org.dhatim.thread.StackedThreadLocal.java
public T get() { Stack<T> execContextStack = getExecutionContextStack(); try {/*from w w w.j av a 2s . co m*/ return execContextStack.peek(); } catch (EmptyStackException e) { if (logger.isDebugEnabled()) { logger.debug("No currently stacked '" + resourceName + "' instance on active Thread.", e); } return null; } }
From source file:org.dhatim.yaml.handler.YamlEventStreamHandler.java
private boolean lastTypeIsArray(Stack<Type> typeStack) { return !typeStack.empty() && typeStack.peek() == Type.SEQUENCE; }
From source file:tajo.engine.planner.LogicalOptimizer.java
private static void pushSelectionRecursive(LogicalNode plan, List<EvalNode> cnf, Stack<LogicalNode> stack) { switch (plan.getType()) { case SELECTION: SelectionNode selNode = (SelectionNode) plan; stack.push(selNode);//from w w w .java2s .co m pushSelectionRecursive(selNode.getSubNode(), cnf, stack); stack.pop(); // remove the selection operator if there is no search condition // after selection push. if (cnf.size() == 0) { LogicalNode node = stack.peek(); if (node instanceof UnaryNode) { UnaryNode unary = (UnaryNode) node; unary.setSubNode(selNode.getSubNode()); } else { throw new InvalidQueryException("Unexpected Logical Query Plan"); } } break; case JOIN: JoinNode join = (JoinNode) plan; LogicalNode outer = join.getOuterNode(); LogicalNode inner = join.getInnerNode(); pushSelectionRecursive(outer, cnf, stack); pushSelectionRecursive(inner, cnf, stack); List<EvalNode> matched = Lists.newArrayList(); for (EvalNode eval : cnf) { if (canBeEvaluated(eval, plan)) { matched.add(eval); } } EvalNode qual = null; if (matched.size() > 1) { // merged into one eval tree qual = EvalTreeUtil.transformCNF2Singleton(matched.toArray(new EvalNode[matched.size()])); } else if (matched.size() == 1) { // if the number of matched expr is one qual = matched.get(0); } if (qual != null) { JoinNode joinNode = (JoinNode) plan; if (joinNode.hasJoinQual()) { EvalNode conjQual = EvalTreeUtil.transformCNF2Singleton(joinNode.getJoinQual(), qual); joinNode.setJoinQual(conjQual); } else { joinNode.setJoinQual(qual); } if (joinNode.getJoinType() == JoinType.CROSS_JOIN) { joinNode.setJoinType(JoinType.INNER); } cnf.removeAll(matched); } break; case SCAN: matched = Lists.newArrayList(); for (EvalNode eval : cnf) { if (canBeEvaluated(eval, plan)) { matched.add(eval); } } qual = null; if (matched.size() > 1) { // merged into one eval tree qual = EvalTreeUtil.transformCNF2Singleton(matched.toArray(new EvalNode[matched.size()])); } else if (matched.size() == 1) { // if the number of matched expr is one qual = matched.get(0); } if (qual != null) { // if a matched qual exists ScanNode scanNode = (ScanNode) plan; scanNode.setQual(qual); } cnf.removeAll(matched); break; default: stack.push(plan); if (plan instanceof UnaryNode) { UnaryNode unary = (UnaryNode) plan; pushSelectionRecursive(unary.getSubNode(), cnf, stack); } else if (plan instanceof BinaryNode) { BinaryNode binary = (BinaryNode) plan; pushSelectionRecursive(binary.getOuterNode(), cnf, stack); pushSelectionRecursive(binary.getInnerNode(), cnf, stack); } stack.pop(); break; } }
From source file:org.sakaiproject.archive.impl.BasicArchiveService.java
/** * Archive the users defined in this site (internal users only). * @param site the site./*w w w . j a va 2 s . c o m*/ * @param doc The document to contain the xml. * @param stack The stack of elements, the top of which will be the containing * element of the "site" element. */ protected static String archiveUsers(Site site, Document doc, Stack stack) { Element element = doc.createElement(UserDirectoryService.APPLICATION_ID); ((Element) stack.peek()).appendChild(element); stack.push(element); try { // get the site's user list List users = new Vector(); String realmId = "/site/" + site.getId(); try { AuthzGroup realm = AuthzGroupService.getAuthzGroup(realmId); users.addAll(UserDirectoryService.getUsers(realm.getUsers())); Collections.sort(users); for (int i = 0; i < users.size(); i++) { User user = (User) users.get(i); user.toXml(doc, stack); } } catch (GroupNotDefinedException e) { //Log.warn("chef", "SiteAction.updateParticipantList IdUnusedException " + realmId); } catch (Exception any) { } } catch (Exception any) { //.M_log.warn("archve: exception archiving users: " // + site.getId() + ": ", any); } stack.pop(); return "archiving the users for Site: " + site.getId() + "\n"; }