List of usage examples for java.util Stack Stack
public Stack()
From source file:app.web.SeleniumPage.java
@Override public Stack<URL> getLocationStack() { Stack<URL> locationStack = new Stack<URL>(); for (Action action : actionStack) { if (action.getType() == Action.ActionType.OPEN || action.getType() == Action.ActionType.CLICK_NEW_PAGE) { locationStack.push(toLocationPath(action.getValue())); }/*w w w . j av a2 s. co m*/ } return locationStack; }
From source file:com.ctriposs.rest4j.tools.idlcheck.Rest4JResourceModelCompatibilityChecker.java
/** * Check backwards compatibility between two idl (.restspec.json) files. * * @param prevRestspecPath previously existing idl file * @param currRestspecPath current idl file * @param compatLevel compatibility level which affects the return value * @return true if the check result conforms the compatibility level requirement * e.g. false if backwards compatible changes are found but the level is equivalent *//*from w w w . j a v a 2 s . com*/ public boolean check(String prevRestspecPath, String currRestspecPath, CompatibilityLevel compatLevel) { _prevRestspecPath = prevRestspecPath; _currRestspecPath = currRestspecPath; Stack<Object> path = new Stack<Object>(); path.push(""); ResourceSchema prevRec = null; ResourceSchema currRec = null; try { prevRec = _codec.readResourceSchema(new FileInputStream(prevRestspecPath)); } catch (FileNotFoundException e) { _infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_NEW, path, currRestspecPath); } catch (IOException e) { _infoMap.addRestSpecInfo(CompatibilityInfo.Type.OTHER_ERROR, path, e.getMessage()); } try { currRec = _codec.readResourceSchema(new FileInputStream(currRestspecPath)); } catch (FileNotFoundException e) { _infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_MISSING, path, prevRestspecPath); } catch (Exception e) { _infoMap.addRestSpecInfo(CompatibilityInfo.Type.OTHER_ERROR, path, e.getMessage()); } if (prevRec == null || currRec == null) { return _infoMap.isCompatible(compatLevel); } final DataSchemaResolver resolver; if (_resolverPath == null) { resolver = new DefaultDataSchemaResolver(); } else { resolver = new XmlFileDataSchemaResolver(SchemaParserFactory.instance(), _resolverPath); } ResourceCompatibilityChecker checker = new ResourceCompatibilityChecker(prevRec, resolver, currRec, resolver); boolean check = checker.check(compatLevel); _infoMap.addAll(checker.getInfoMap()); return check; }
From source file:com.anite.zebra.core.Engine.java
public void transitionTask(ITaskInstance taskInstance) throws TransitionException { /*/*from ww w. j a va 2s.c o m*/ * we need to LOCK the ProcessInstance from changes by other Engine * instances */ IProcessInstance currentProcess = taskInstance.getProcessInstance(); try { stateFactory.acquireLock(currentProcess, this); } catch (LockException e) { String emsg = "Failed to aquire an exclusive lock on the Process Instance (" + currentProcess + "). Transitioning aborted."; log.error(emsg, e); throw new TransitionException(emsg, e); } Stack taskStack = new Stack(); taskStack.push(taskInstance); while (!taskStack.empty()) { // get the task from the Stack ITaskInstance currentTask = (ITaskInstance) taskStack.pop(); Map createdTasks; try { createdTasks = transitionTaskFromStack(currentTask, currentProcess); } catch (Exception e) { String emsg = "Problem encountered transitioning task from Stack"; log.error(emsg, e); throw new TransitionException(e); } for (Iterator it = createdTasks.values().iterator(); it.hasNext();) { ITaskInstance newTask = (ITaskInstance) it.next(); ITaskDefinition td; try { td = newTask.getTaskDefinition(); } catch (DefinitionNotFoundException e) { String emsg = "FATAL: Failed to access the Task Definition"; log.error(emsg, e); // throwing an exception here will leave the process "locked", but that is a valid situation throw new TransitionException(emsg, e); } if (td.isAuto() || td.isSynchronised()) { /* * is an Auto task, so add to the stack for processing. * Also treat * check, * to see if task is present in stack before adding it */ if (!taskStack.contains(newTask)) { if (log.isInfoEnabled()) { log.info("Added task to TaskStack - " + newTask); } taskStack.push(newTask); } else { if (log.isInfoEnabled()) { log.info("transitionTask - task already exists in stack " + newTask); } } } } } try { if (currentProcess.getTaskInstances().size() == 0) { // mark process complete doProcessDestruct(currentProcess); } /* * release lock on process instance */ stateFactory.releaseLock(currentProcess, this); } catch (Exception e) { String emsg = "FATAL: Couldnt release lock on Process Instance (" + currentProcess + ") after transitioning. Process will be left in an usuable state"; log.fatal(emsg, e); throw new TransitionException(emsg, e); } }
From source file:com.webcohesion.ofx4j.io.TestBaseOFXReader.java
/** * tests using sax to parse an OFX doc.//from w w w . j a v a 2 s. c om */ public void testVersion2() throws Exception { BaseOFXReader reader = new BaseOFXReader() { protected void parseV1FromFirstElement(Reader reader) throws IOException, OFXParseException { fail(); } }; final Map<String, String> headers = new HashMap<String, String>(); final Stack<Map<String, Object>> aggregateStack = new Stack<Map<String, Object>>(); TreeMap<String, Object> root = new TreeMap<String, Object>(); aggregateStack.push(root); reader.setContentHandler(new DefaultHandler() { @Override public void onHeader(String name, String value) { LOG.debug(name + ":" + value); headers.put(name, value); } @Override public void onElement(String name, String value) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + name + "=" + value); aggregateStack.peek().put(name, value); } @Override public void startAggregate(String aggregateName) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + aggregateName + " {"); TreeMap<String, Object> aggregate = new TreeMap<String, Object>(); aggregateStack.peek().put(aggregateName, aggregate); aggregateStack.push(aggregate); } @Override public void endAggregate(String aggregateName) { aggregateStack.pop(); char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + "}"); } }); reader.parse(BaseOFXReader.class.getResourceAsStream("example-response.ofx2")); assertEquals(5, headers.size()); assertEquals(1, aggregateStack.size()); assertSame(root, aggregateStack.pop()); }
From source file:csns.importer.parser.csula.RosterParserImpl.java
/** * This parser handles the format under Self Service -> Faculty Center -> My * Schedule on GET. A sample record is as follows: * "Doe,John M 302043188 3.00 Engr, Comp Sci, & Tech CS MS". Again, not all * fields may be present.//from ww w . j a va 2 s .c om */ private List<ImportedUser> parse2(String text) { List<ImportedUser> students = new ArrayList<ImportedUser>(); Stack<String> stack = new Stack<String>(); Scanner scanner = new Scanner(text); scanner.useDelimiter("\\s+|\\r\\n|\\r|\\n"); while (scanner.hasNext()) { String name = ""; do { String token = scanner.next(); if (!isName(token)) stack.push(token); else { name = token; while (!stack.isEmpty() && !isDegree(stack.peek())) name = stack.pop() + " " + name; break; } } while (scanner.hasNext()); String cin = ""; boolean cinFound = false; while (scanner.hasNext()) { cin = scanner.next(); if (isCin(cin)) { cinFound = true; break; } else name += " " + cin; } if (cinFound) { ImportedUser student = new ImportedUser(); student.setCin(cin); student.setName(name); students.add(student); } } scanner.close(); return students; }
From source file:com.espertech.esper.epl.spec.PatternStreamSpecRaw.java
private PatternStreamSpecCompiled compileInternal(StatementContext context, Set<String> eventTypeReferences, boolean isInsertInto, Collection<Integer> assignedTypeNumberStack, MatchEventSpec tags, Set<String> priorAllTags) throws ExprValidationException { if (tags == null) { tags = new MatchEventSpec(); }//from w w w. j a va 2s .c o m Deque<Integer> subexpressionIdStack = new ArrayDeque<Integer>(assignedTypeNumberStack); ExprEvaluatorContext evaluatorContextStmt = new ExprEvaluatorContextStatement(context); Stack<EvalFactoryNode> nodeStack = new Stack<EvalFactoryNode>(); // detemine ordered tags Set<EvalFactoryNode> filterFactoryNodes = EvalNodeUtil.recursiveGetChildNodes(evalFactoryNode, FilterForFilterFactoryNodes.INSTANCE); LinkedHashSet<String> allTagNamesOrdered = new LinkedHashSet<String>(); if (priorAllTags != null) { allTagNamesOrdered.addAll(priorAllTags); } for (EvalFactoryNode filterNode : filterFactoryNodes) { EvalFilterFactoryNode factory = (EvalFilterFactoryNode) filterNode; int tagNumber; if (factory.getEventAsName() != null) { if (!allTagNamesOrdered.contains(factory.getEventAsName())) { allTagNamesOrdered.add(factory.getEventAsName()); tagNumber = allTagNamesOrdered.size() - 1; } else { tagNumber = findTagNumber(factory.getEventAsName(), allTagNamesOrdered); } factory.setEventAsTagNumber(tagNumber); } } recursiveCompile(evalFactoryNode, context, evaluatorContextStmt, eventTypeReferences, isInsertInto, tags, subexpressionIdStack, nodeStack, allTagNamesOrdered); Audit auditPattern = AuditEnum.PATTERN.getAudit(context.getAnnotations()); Audit auditPatternInstance = AuditEnum.PATTERNINSTANCES.getAudit(context.getAnnotations()); EvalFactoryNode compiledEvalFactoryNode = evalFactoryNode; if (auditPattern != null || auditPatternInstance != null) { EvalAuditInstanceCount instanceCount = new EvalAuditInstanceCount(); compiledEvalFactoryNode = recursiveAddAuditNode(null, auditPattern != null, auditPatternInstance != null, evalFactoryNode, evalNodeExpressions, instanceCount); } return new PatternStreamSpecCompiled(compiledEvalFactoryNode, tags.getTaggedEventTypes(), tags.getArrayEventTypes(), allTagNamesOrdered, this.getViewSpecs(), this.getOptionalStreamName(), this.getOptions()); }
From source file:de.tudarmstadt.ukp.dkpro.wsd.graphconnectivity.algorithm.GraphConnectivityWSD.java
@Override public Map<Pair<String, POS>, Map<String, Double>> getDisambiguation(Collection<Pair<String, POS>> sods) throws SenseInventoryException { Graph<String, UnorderedPair<String>> siGraph = inventory.getUndirectedGraph(); Graph<String, UnorderedPair<String>> dGraph = new UndirectedSparseGraph<String, UnorderedPair<String>>(); int sodCount = 0; if (graphVisualizer != null) { graphVisualizer.initializeColorMap(sods.size()); graphVisualizer.setVertexToolTipTransformer(new VertexToolTipTransformer()); }/*from w w w. jav a 2s . c om*/ // Find the senses for the lemmas and add them to a graph for (Pair<String, POS> wsdItem : sods) { List<String> senses = inventory.getSenses(wsdItem.getFirst(), wsdItem.getSecond()); if (senses.isEmpty()) { logger.warn("unknown subject of disambiguation " + wsdItem); // throw new SenseInventoryException( // "unknown subject of disambiguation " + wsdItem); } for (String sense : senses) { if (graphVisualizer != null) { graphVisualizer.setColor(sense, sodCount); } dGraph.addVertex(sense); } sodCount++; } logger.debug(dGraph.toString()); if (graphVisualizer != null) { graphVisualizer.initialize(dGraph); } // For each synset v in s, perform a depth-first search on siGraph. // Every time we encounter a synset v' from s along a path of length // l, we add to dGraph all intermediate nodes and edges on the path // from v to v'. // Run DFS on each synset in s Collection<String> s = new HashSet<String>(dGraph.getVertices()); for (String v : s) { logger.debug("Beginning DFS from " + v); Collection<String> t = new HashSet<String>(s); t.remove(v); Stack<String> synsetPath = new Stack<String>(); synsetPath.push(v); dfs(v, t, siGraph, dGraph, synsetPath, new Stack<UnorderedPair<String>>(), searchDepth); } logger.debug(dGraph.toString()); // Find the best synsets for each word in the sentence final Map<Pair<String, POS>, Map<String, Double>> solutions = getDisambiguation(sods, dGraph); // Repaint the frame to show the disambiguated senses if (graphVisualizer != null) { graphVisualizer.refresh(); } return solutions; }
From source file:org.matsim.analysis.IterationStopWatch.java
/** * Sets the current iteration, so that the times measured using {@link #beginOperation(String)}, * {@link #endOperation(String)} and {@link #timestamp(String)} are assigned to the correct iteration for * the analysis.// w w w.j av a 2 s .c o m * * @param iteration */ public void beginIteration(final int iteration) { this.iteration = Integer.valueOf(iteration); if (this.iterations.get(this.iteration) == null) { this.currentIterationValues = new HashMap<String, Long>(); this.iterations.put(this.iteration, this.currentIterationValues); this.nextIdentifierPosition = 0; this.nextOperationPosition = 0; this.currentMeasuredOperations = new Stack<String>(); this.currentIterationChildren = new HashMap<String, List<String>>(); this.children.put(this.iteration, this.currentIterationChildren); } this.beginOperation(AbstractController.OPERATION_ITERATION); }
From source file:com.flexive.shared.search.query.SqlQueryBuilder.java
/** * Instantiates an empty query builder for the specified result location and view type. * * @param location the result location//www .j ava2 s.c o m * @param viewType the view type */ public SqlQueryBuilder(ResultLocation location, ResultViewType viewType) { this(new ArrayList<String>(), new ArrayList<String>(), new StringBuilder(), new ArrayList<String>(), new HashSet<Table>(), new Stack<Operator>(), new Stack<Integer>(), true, false, location, viewType); }