List of usage examples for java.util Stack push
public E push(E item)
From source file:app.web.SeleniumPage.java
@Override public Stack<URL> getLocationStack() { Stack<URL> locationStack = new Stack<URL>(); for (Action action : actionStack) { if (action.getType() == Action.ActionType.OPEN || action.getType() == Action.ActionType.CLICK_NEW_PAGE) { locationStack.push(toLocationPath(action.getValue())); }//www. j a v a2 s . co m } return locationStack; }
From source file:ca.weblite.codename1.ios.CodenameOneIOSBuildTask.java
/** * Based on https://github.com/shannah/cn1/blob/master/Ports/iOSPort/xmlvm/src/xmlvm/org/xmlvm/proc/out/build/XCodeFile.java * @param template//from ww w. j a v a2 s . co m * @param filter */ private String injectFilesIntoXcodeProject(String template, File[] files) { int nextid = 0; Pattern idPattern = Pattern.compile(" (\\d+) "); Matcher m = idPattern.matcher(template); while (m.find()) { int curr = Integer.parseInt(m.group(1)); if (curr > nextid) { nextid = curr; } } nextid++; StringBuilder filerefs = new StringBuilder(); StringBuilder buildrefs = new StringBuilder(); StringBuilder display = new StringBuilder(); StringBuilder source = new StringBuilder(); StringBuilder resource = new StringBuilder(); for (File f : files) { String fname = f.getName(); if (template.indexOf(" " + fname + " ") >= 0) { continue; } FileResource fres = new FileResource(fname); if (f.exists()) { filerefs.append("\t\t").append(nextid); filerefs.append(" /* ").append(fname).append(" */"); filerefs.append(" = { isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = "); filerefs.append(fres.type).append("; path = \""); filerefs.append(fname).append("\"; sourceTree = \"<group>\"; };"); filerefs.append('\n'); display.append("\t\t\t\t").append(nextid); display.append(" /* ").append(fname).append(" */"); display.append(",\n"); if (fres.isBuildable) { int fileid = nextid; nextid++; buildrefs.append("\t\t").append(nextid); buildrefs.append(" /* ").append(fname); buildrefs.append(" in ").append(fres.isSource ? "Sources" : "Resources"); buildrefs.append(" */ = {isa = PBXBuildFile; fileRef = ").append(fileid); buildrefs.append(" /* ").append(fname); buildrefs.append(" */; };\n"); if (fres.isSource) { source.append("\t\t\t\t").append(nextid); source.append(" /* ").append(fname).append(" */"); source.append(",\n"); } } nextid++; } } String data = template; data = data.replace("/* End PBXFileReference section */", filerefs.toString() + "/* End PBXFileReference section */"); data = data.replace("/* End PBXBuildFile section */", buildrefs.toString() + "/* End PBXBuildFile section */"); // The next two we probably shouldn't do by regex because there is no clear pattern. Stack<String> buffer = new Stack<String>(); Stack<String> backtrackStack = new Stack<String>(); Scanner scanner = new Scanner(data); while (scanner.hasNextLine()) { String line = scanner.nextLine(); if (line.indexOf("/* End PBXSourcesBuildPhase section */") >= 0) { // Found the end, let's backtrack while (!buffer.isEmpty()) { String l = buffer.pop(); backtrackStack.push(l); if (");".equals(l.trim())) { // This is the closing of the sources list // we can insert the sources here buffer.push(source.toString()); while (!backtrackStack.isEmpty()) { buffer.push(backtrackStack.pop()); } break; } } } else if (line.indexOf("name = Application;") >= 0) { while (!buffer.isEmpty()) { String l = buffer.pop(); backtrackStack.push(l); if (");".equals(l.trim())) { buffer.push(display.toString()); while (!backtrackStack.isEmpty()) { buffer.push(backtrackStack.pop()); } break; } } } buffer.push(line); } StringBuilder sb = new StringBuilder(); String[] lines = buffer.toArray(new String[0]); for (String line : lines) { sb.append(line).append("\n"); } data = sb.toString(); return data; }
From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashConfigUtils.java
@SuppressWarnings("deprecation") public static ObjectNode parseLogstashConfig(String configFile, StringBuffer error) { ObjectNode tree = _mapper.createObjectNode(); // Stage 0: remove escaped "s and 's (for the purpose of the validation): // (prevents tricksies with escaped "s and then #s) // (http://stackoverflow.com/questions/5082398/regex-to-replace-single-backslashes-excluding-those-followed-by-certain-chars) configFile = configFile.replaceAll("(?<!\\\\)(?:((\\\\\\\\)*)\\\\)[\"']", "X"); //TESTED (by hand - using last 2 fields of success_2_1) // Stage 1: remove #s, and anything in quotes (for the purpose of the validation) configFile = configFile.replaceAll("(?m)(?:([\"'])(?:(?!\\1).)*\\1)", "VALUE").replaceAll("(?m)(?:#.*$)", "");//from w w w .ja v a 2s. c om //TESTED (2_1 - including with a # inside the ""s - Event_Date -> Event_#Date) //TESTED (2_2 - various combinations of "s nested inside 's) ... yes that is a negative lookahead up there - yikes! // Stage 2: get a nested list of objects int depth = 0; int ifdepth = -1; Stack<Integer> ifStack = new Stack<Integer>(); ObjectNode inputOrFilter = null; Matcher m = _navigateLogstash.matcher(configFile); // State: String currTopLevelBlockName = null; String currSecondLevelBlockName = null; ObjectNode currSecondLevelBlock = null; while (m.find()) { boolean simpleField = false; //DEBUG //System.out.println("--DEPTH="+depth + " GROUP=" + m.group() + " IFS" + Arrays.toString(ifStack.toArray())); //System.out.println("STATES: " + currTopLevelBlockName + " AND " + currSecondLevelBlockName); if (m.group().equals("}")) { if (ifdepth == depth) { // closing an if statement ifStack.pop(); if (ifStack.isEmpty()) { ifdepth = -1; } else { ifdepth = ifStack.peek(); } } //TESTED (1_1bc, 2_1) else { // closing a processing block depth--; if (depth < 0) { // {} Mismatch error.append("{} Mismatch (})"); return null; } //TESTED (1_1abc) } } else { // new attribute! String typeName = m.group(1); if (null == typeName) { // it's an if statement or a string value typeName = m.group(4); if (null != typeName) { simpleField = true; } } else if (typeName.equalsIgnoreCase("else")) { // It's an if statement.. typeName = null; } if (null == typeName) { // if statement after all // Just keep track of ifs so we can ignore them ifStack.push(depth); ifdepth = depth; // (don't increment depth) } //TESTED (1_1bc, 2_1) else { // processing block String subTypeName = m.group(3); if (null != subTypeName) { // eg codec.multiline typeName = typeName + "." + subTypeName; } //TESTED (2_1, 2_3) if (depth == 0) { // has to be one of input/output/filter) String topLevelType = typeName.toLowerCase(); if (topLevelType.equalsIgnoreCase("input") || topLevelType.equalsIgnoreCase("filter")) { if (tree.has(topLevelType)) { error.append("Multiple input or filter blocks: " + topLevelType); return null; } //TESTED (1_3ab) else { inputOrFilter = _mapper.createObjectNode(); tree.put(topLevelType, inputOrFilter); // Store state: currTopLevelBlockName = topLevelType; } //TESTED (*) } else { if (topLevelType.equalsIgnoreCase("output")) { error.append( "Not allowed output blocks - these are appended automatically by the logstash harvester"); } else { error.append("Unrecognized processing block: " + topLevelType); } return null; } //TESTED (1_4a) } else if ((depth == 1) && (null != inputOrFilter)) { // processing blocks String subElType = typeName.toLowerCase(); // Some validation: can't include a type called "filter" anywhere if ((null != currTopLevelBlockName) && currTopLevelBlockName.equals("input")) { if (subElType.equals("filter") || subElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (1)"); return null; } } //TESTED (1_5b) ArrayNode subElements = (ArrayNode) inputOrFilter.get(subElType); if (null == subElements) { subElements = _mapper.createArrayNode(); inputOrFilter.put(subElType, subElements); } ObjectNode newEl = _mapper.createObjectNode(); subElements.add(newEl); // Store state: currSecondLevelBlockName = subElType; currSecondLevelBlock = newEl; } //TESTED (*) else if (depth == 2) { // attributes of processing blocks // we'll just store the field names for these and do any simple validation that was too complicated for the regexes String subSubElType = typeName.toLowerCase(); // Validation: if (null != currTopLevelBlockName) { // 1] sincedb path if (currTopLevelBlockName.equals("input") && (null != currSecondLevelBlockName)) { // (don't care what the second level block name is - no sincedb allowed) if (subSubElType.equalsIgnoreCase("sincedb_path")) { error.append("Not allowed sincedb_path in input.* block"); return null; } //TESTED (1_5a) // 2] no sub-(-sub etc)-elements of input called filter if (subSubElType.equals("filter") || subSubElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (2)"); return null; } //TESTED (1_5c) } } // Store in map: if (null != currSecondLevelBlock) { currSecondLevelBlock.put(subSubElType, _mapper.createObjectNode()); } } // (won't go any deeper than this) if (!simpleField) { depth++; } } } } if (0 != depth) { error.append("{} Mismatch ({)"); return null; } //TESTED (1_2a) return tree; }
From source file:hudson.gridmaven.MavenModuleSet.java
public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException { modules = Collections.emptyMap(); // needed during load super.onLoad(parent, name); modules = loadChildren(this, getModulesDir(), new Function1<ModuleName, MavenModule>() { public ModuleName call(MavenModule module) { return module.getModuleName(); }//from w w w . j ava2s . c o m }); // update the transient nest level field. MavenModule root = getRootModule(); if (root != null && root.getChildren() != null) { List<MavenModule> sortedList = new ArrayList<MavenModule>(); Stack<MavenModule> q = new Stack<MavenModule>(); root.nestLevel = 0; q.push(root); while (!q.isEmpty()) { MavenModule p = q.pop(); sortedList.add(p); List<MavenModule> children = p.getChildren(); if (children != null) { for (MavenModule m : children) m.nestLevel = p.nestLevel + 1; for (int i = children.size() - 1; i >= 0; i--) // add them in the reverse order q.push(children.get(i)); } } this.sortedActiveModules = sortedList; } else { this.sortedActiveModules = getDisabledModules(false); } if (reporters == null) { reporters = new DescribableList<MavenReporter, Descriptor<MavenReporter>>(this); } reporters.setOwner(this); if (publishers == null) { publishers = new DescribableList<Publisher, Descriptor<Publisher>>(this); } publishers.setOwner(this); if (buildWrappers == null) { buildWrappers = new DescribableList<BuildWrapper, Descriptor<BuildWrapper>>(this); } buildWrappers.setOwner(this); if (prebuilders == null) { prebuilders = new DescribableList<Builder, Descriptor<Builder>>(this); } prebuilders.setOwner(this); if (postbuilders == null) { postbuilders = new DescribableList<Builder, Descriptor<Builder>>(this); } postbuilders.setOwner(this); if (perModuleEmail == null) { perModuleEmail = Boolean.TRUE; } if (Boolean.TRUE.equals(usePrivateRepository)) { this.localRepository = new PerJobLocalRepositoryLocator(); usePrivateRepository = null; } updateTransientActions(); }
From source file:org.apache.tajo.engine.planner.rewrite.FilterPushDownRule.java
@Override public LogicalNode visitProjection(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block, ProjectionNode projectionNode, Stack<LogicalNode> stack) throws PlanningException { LogicalNode childNode = projectionNode.getChild(); List<EvalNode> notMatched = new ArrayList<EvalNode>(); //copy -> origin BiMap<EvalNode, EvalNode> transformedMap = findCanPushdownAndTransform(context, block, projectionNode, childNode, notMatched, null, false, 0); context.setFiltersTobePushed(transformedMap.keySet()); stack.push(projectionNode); childNode = visit(context, plan, plan.getBlock(childNode), childNode, stack); stack.pop();//ww w . j a v a2 s. com // find not matched after visiting child for (EvalNode eval : context.pushingDownFilters) { notMatched.add(transformedMap.get(eval)); } EvalNode qual = null; if (notMatched.size() > 1) { // merged into one eval tree qual = AlgebraicUtil.createSingletonExprFromCNF(notMatched.toArray(new EvalNode[notMatched.size()])); } else if (notMatched.size() == 1) { // if the number of matched expr is one qual = notMatched.get(0); } // If there is not matched node add SelectionNode and clear context.pushingDownFilters if (qual != null && LogicalPlanner.checkIfBeEvaluatedAtThis(qual, projectionNode)) { SelectionNode selectionNode = plan.createNode(SelectionNode.class); selectionNode.setInSchema(childNode.getOutSchema()); selectionNode.setOutSchema(childNode.getOutSchema()); selectionNode.setQual(qual); block.registerNode(selectionNode); projectionNode.setChild(selectionNode); selectionNode.setChild(childNode); // clean all remain filters because all conditions are merged into a qual context.pushingDownFilters.clear(); } // if there are remain filters, recover the original names and give back to the upper query block. if (context.pushingDownFilters.size() > 0) { ImmutableSet<EvalNode> copy = ImmutableSet.copyOf(context.pushingDownFilters); context.pushingDownFilters.clear(); context.pushingDownFilters.addAll(reverseTransform(transformedMap, copy)); } return projectionNode; }
From source file:org.sakaiproject.poll.service.impl.PollListManagerImpl.java
@SuppressWarnings("unchecked") public String archive(String siteId, Document doc, Stack stack, String archivePath, List attachments) { log.debug("archive: poll " + siteId); // prepare the buffer for the results log StringBuilder results = new StringBuilder(); // String assignRef = assignmentReference(siteId, SiteService.MAIN_CONTAINER); results.append("archiving " + getLabel() + " context " + Entity.SEPARATOR + siteId + Entity.SEPARATOR + SiteService.MAIN_CONTAINER + ".\n"); // start with an element with our very own (service) name Element element = doc.createElement(PollListManager.class.getName()); ((Element) stack.peek()).appendChild(element); stack.push(element);/* w w w . j av a 2s. co m*/ List pollsList = findAllPolls(siteId); log.debug("got list of " + pollsList.size() + " polls"); for (int i = 0; pollsList.size() > i; i++) { try { Poll poll = (Poll) pollsList.get(i); log.info("got poll " + poll.getId()); // archive this assignment Element el = poll.toXml(doc, stack); // since we aren't archiving votes too, don't worry about archiving the // soft-deleted options -- only "visible". List options = getVisibleOptionsForPoll(poll.getPollId()); for (int q = 0; options.size() > q; q++) { Option opt = (Option) options.get(q); Element el2 = PollUtil.optionToXml(opt, doc, stack); el.appendChild(el2); } element.appendChild(el); } catch (Exception e) { e.printStackTrace(); } } // while stack.pop(); return results.toString(); }
From source file:org.apache.cocoon.util.log.ExtensiblePatternFormatter.java
/** * Extract and build a text run from input string. * It does special handling of '\n' and '\t' replaceing * them with newline and tab.// www . jav a 2 s. c o m * * @param stack the stack on which to place runs * @param pattern the input string * @param index the start of the text run * @return the number of characters in run */ protected int addTextRun(final Stack stack, final char pattern[], int index) { final PatternRun run = new PatternRun(); final int start = index; boolean escapeMode = false; if ('%' == pattern[index]) { index++; } final StringBuffer sb = new StringBuffer(); while (index < pattern.length && pattern[index] != '%') { if (escapeMode) { if ('n' == pattern[index]) { sb.append(SystemUtils.LINE_SEPARATOR); } else if ('t' == pattern[index]) { sb.append('\t'); } else { sb.append(pattern[index]); } escapeMode = false; } else if ('\\' == pattern[index]) { escapeMode = true; } else { sb.append(pattern[index]); } index++; } run.m_data = sb.toString(); run.m_type = TYPE_TEXT; stack.push(run); return index - start; }
From source file:org.apache.flink.cep.nfa.SharedBuffer.java
private void internalRemove(final SharedBufferEntry<K, V> entry) { Stack<SharedBufferEntry<K, V>> entriesToRemove = new Stack<>(); entriesToRemove.add(entry);//from www. j a v a2 s .c o m while (!entriesToRemove.isEmpty()) { SharedBufferEntry<K, V> currentEntry = entriesToRemove.pop(); if (currentEntry.getReferenceCounter() == 0) { currentEntry.remove(); for (SharedBufferEdge<K, V> edge : currentEntry.getEdges()) { if (edge.getTarget() != null) { edge.getTarget().decreaseReferenceCounter(); entriesToRemove.push(edge.getTarget()); } } } } }
From source file:net.bulletin.pdi.xero.step.support.XMLChunkerImpl.java
private String pullNextXmlChunkFromTopElementOnStack(XMLChunkerState data) throws KettleException { Stack<String> elementStack = data.getElementStack(); XMLStreamReader xmlStreamReader = data.getXmlStreamReader(); int elementStackDepthOnEntry = elementStack.size(); StringWriter stringWriter = new StringWriter(); try {/*from w w w. j a v a2 s.c o m*/ XMLStreamWriter xmlStreamWriter = data.getXmlOutputFactory().createXMLStreamWriter(stringWriter); xmlStreamWriter.writeStartDocument(CharEncoding.UTF_8, "1.0"); // put the current element on because presumably it's the open element for the one // that is being looked for. XmlReaderToWriter.write(xmlStreamReader, xmlStreamWriter); while (xmlStreamReader.hasNext() & elementStack.size() >= elementStackDepthOnEntry) { switch (xmlStreamReader.next()) { case XMLStreamConstants.END_DOCUMENT: break; // handled below explicitly. case XMLStreamConstants.END_ELEMENT: elementStack.pop(); XmlReaderToWriter.write(xmlStreamReader, xmlStreamWriter); break; case XMLStreamConstants.START_ELEMENT: elementStack.push(xmlStreamReader.getLocalName()); XmlReaderToWriter.write(xmlStreamReader, xmlStreamWriter); break; default: XmlReaderToWriter.write(xmlStreamReader, xmlStreamWriter); break; } } xmlStreamWriter.writeEndDocument(); xmlStreamWriter.close(); } catch (Exception e) { throw new KettleException("unable to process a chunk of the xero xml stream", e); } return stringWriter.toString(); }
From source file:com.udojava.evalex.Expression.java
/** * Check that the expression has enough numbers and variables to fit the * requirements of the operators and functions, also check * for only 1 result stored at the end of the evaluation. */// w ww. j a va2s .c o m private void validate(List<String> rpn) { /*- * Thanks to Norman Ramsey: * http://http://stackoverflow.com/questions/789847/postfix-notation-validation */ // each push on to this stack is a new function scope, with the value of each // layer on the stack being the count of the number of parameters in that scope Stack<Integer> stack = new Stack<>(); // push the 'global' scope stack.push(0); for (final String token : rpn) { if (operators.containsKey(token)) { if (stack.peek() < 2) { throw new ExpressionException("Missing parameter(s) for operator " + token); } // pop the operator's 2 parameters and add the result stack.set(stack.size() - 1, stack.peek() - 2 + 1); } else if (mainVars.containsKey(token)) { stack.set(stack.size() - 1, stack.peek() + 1); } else if (functions.containsKey(token.toUpperCase(Locale.ROOT))) { LazyFunction f = functions.get(token.toUpperCase(Locale.ROOT)); int numParams = stack.pop(); if (!f.numParamsVaries() && numParams != f.getNumParams()) { throw new ExpressionException("Function " + token + " expected " + f.getNumParams() + " parameters, got " + numParams); } if (stack.size() <= 0) { throw new ExpressionException("Too many function calls, maximum scope exceeded"); } // push the result of the function stack.set(stack.size() - 1, stack.peek() + 1); } else if ("(".equals(token)) { stack.push(0); } else { stack.set(stack.size() - 1, stack.peek() + 1); } } if (stack.size() > 1) { throw new ExpressionException("Too many unhandled function parameter lists"); } else if (stack.peek() > 1) { throw new ExpressionException("Too many numbers or variables"); } else if (stack.peek() < 1) { throw new ExpressionException("Empty expression"); } }