List of usage examples for java.util Stack push
public E push(E item)
From source file:com.consol.citrus.mail.server.MailServer.java
private void fillStack(org.springframework.integration.Message<?> message, Stack<org.springframework.integration.Message<?>> responseStack) { if (message != null) { responseStack.push(message); }/*from w w w . ja va 2 s. c o m*/ }
From source file:com.joliciel.jochre.graphics.features.InnerEmptyChupchikLowerLeftFeature.java
@Override public FeatureResult<Boolean> checkInternal(ShapeWrapper shapeWrapper, RuntimeEnvironment env) { Shape shape = shapeWrapper.getShape(); BitSet bitset = shape.getBlackAndWhiteBitSet(shape.getJochreImage().getBlackThreshold()); boolean[][] grid = new boolean[shape.getWidth()][shape.getHeight()]; for (int i = 0; i < shape.getWidth(); i++) { for (int j = 0; j < shape.getHeight(); j++) { if (bitset.get(j * shape.getWidth() + i)) grid[i][j] = true;// w w w.j a va 2s .c om } } int startX = shape.getWidth() / 2; int startY = shape.getHeight() / 2; while (startY < shape.getHeight() && grid[startX][startY]) { startY += 1; } WritableImageGrid mirror = this.graphicsService.getEmptyMirror(shape); boolean foundChupchikOrOpening = false; if (startY < shape.getHeight()) { Stack<HorizontalLineSegment> whiteLineStack = new Stack<HorizontalLineSegment>(); Set<HorizontalLineSegment> whiteLineSet = new TreeSet<HorizontalLineSegment>(); HorizontalLineSegment startLine = new HorizontalLineSegment(startX, startY); whiteLineStack.push(startLine); while (!whiteLineStack.empty()) { HorizontalLineSegment line = whiteLineStack.pop(); if (line.y == shape.getHeight() - 1) { // found opening to the outside world if (LOG.isTraceEnabled()) LOG.trace("Reached edge: found opening"); foundChupchikOrOpening = true; break; } if (mirror.getPixel(line.xLeft, line.y) == 1) continue; // extend this line to the right and left for (int i = line.xLeft; i >= 0; i--) { if (!grid[i][line.y]) line.xLeft = i; else break; } for (int i = line.xRight; i <= startX; i++) { if (!grid[i][line.y]) line.xRight = i; else break; } if (LOG.isTraceEnabled()) LOG.trace(line.toString()); whiteLineSet.add(line); for (int i = line.xLeft; i <= line.xRight; i++) { mirror.setPixel(i, line.y, 1); } // find lines below and to the left if (line.y < shape.getHeight() - 1) { boolean inLine = false; int row = line.y + 1; int xLeft = 0; for (int i = line.xLeft; i <= line.xRight; i++) { if (!inLine && !grid[i][row]) { inLine = true; xLeft = i; } else if (inLine && grid[i][row]) { HorizontalLineSegment newLine = new HorizontalLineSegment(xLeft, row); newLine.xRight = i - 1; whiteLineStack.push(newLine); inLine = false; } } if (inLine) { HorizontalLineSegment newLine = new HorizontalLineSegment(xLeft, row); newLine.xRight = line.xRight; whiteLineStack.push(newLine); } } } if (!foundChupchikOrOpening) { // if (LOG.isDebugEnabled()) { // LOG.trace("List of lines"); // for (HorizontalLineSegment line : whiteLineSet) { // LOG.trace(line.toString()); // } // } Iterator<HorizontalLineSegment> iLines = whiteLineSet.iterator(); HorizontalLineSegment bottomLeftLine = iLines.next(); double threshold = shape.getWidth() / 8; if (LOG.isTraceEnabled()) LOG.trace("Length threshold: " + threshold); HorizontalLineSegment nextLine = null; List<HorizontalLineSegment> firstFewLines = new ArrayList<HorizontalLineSegment>(); firstFewLines.add(bottomLeftLine); HorizontalLineSegment currentLine = bottomLeftLine; while (iLines.hasNext() && firstFewLines.size() < 3) { nextLine = iLines.next(); if (nextLine.y != currentLine.y) { firstFewLines.add(nextLine); currentLine = nextLine; } } boolean mightHaveChupchik = true; HorizontalLineSegment prevLine = null; for (HorizontalLineSegment line : firstFewLines) { if (LOG.isTraceEnabled()) LOG.trace("Next line left, " + bottomLeftLine.xLeft + ", length: " + bottomLeftLine.length() + ", threshold: " + threshold); if (line.length() > threshold) { mightHaveChupchik = false; break; } if (prevLine != null) { if (line.xLeft + 2 < prevLine.xLeft) { mightHaveChupchik = false; break; } if (line.length() + 1 < prevLine.length()) { mightHaveChupchik = false; break; } } prevLine = line; threshold = threshold * 1.2; } if (mightHaveChupchik) foundChupchikOrOpening = true; } } FeatureResult<Boolean> outcome = this.generateResult(foundChupchikOrOpening); return outcome; }
From source file:org.apache.hadoop.tools.DistCpV1.java
/** * Delete the dst files/dirs which do not exist in src * //from www. ja va2 s .c o m * @return total count of files and directories deleted from destination * @throws IOException */ static private long deleteNonexisting(FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf) throws IOException { if (dstroot.isFile()) { throw new IOException("dst must be a directory when option " + Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath() + ") is not a directory."); } //write dst lsr results final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr"); try (final SequenceFile.Writer writer = SequenceFile.createWriter(jobconf, Writer.file(dstlsr), Writer.keyClass(Text.class), Writer.valueClass(NullWritable.class), Writer.compression(SequenceFile.CompressionType.NONE))) { //do lsr to get all file statuses in dstroot final Stack<FileStatus> lsrstack = new Stack<FileStatus>(); for (lsrstack.push(dstroot); !lsrstack.isEmpty();) { final FileStatus status = lsrstack.pop(); if (status.isDirectory()) { for (FileStatus child : dstfs.listStatus(status.getPath())) { String relative = makeRelative(dstroot.getPath(), child.getPath()); writer.append(new Text(relative), NullWritable.get()); lsrstack.push(child); } } } } //sort lsr results final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted"); SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class, NullWritable.class, jobconf); sorter.sort(dstlsr, sortedlsr); //compare lsr list and dst list long deletedPathsCount = 0; try (SequenceFile.Reader lsrin = new SequenceFile.Reader(jobconf, Reader.file(sortedlsr)); SequenceFile.Reader dstin = new SequenceFile.Reader(jobconf, Reader.file(dstsorted))) { //compare sorted lsr list and sorted dst list final Text lsrpath = new Text(); final Text dstpath = new Text(); final Text dstfrom = new Text(); final Trash trash = new Trash(dstfs, conf); Path lastpath = null; boolean hasnext = dstin.next(dstpath, dstfrom); while (lsrin.next(lsrpath, NullWritable.get())) { int dst_cmp_lsr = dstpath.compareTo(lsrpath); while (hasnext && dst_cmp_lsr < 0) { hasnext = dstin.next(dstpath, dstfrom); dst_cmp_lsr = dstpath.compareTo(lsrpath); } if (dst_cmp_lsr == 0) { //lsrpath exists in dst, skip it hasnext = dstin.next(dstpath, dstfrom); } else { //lsrpath does not exist, delete it final Path rmpath = new Path(dstroot.getPath(), lsrpath.toString()); ++deletedPathsCount; if ((lastpath == null || !isAncestorPath(lastpath, rmpath))) { if (!(trash.moveToTrash(rmpath) || dstfs.delete(rmpath, true))) { throw new IOException("Failed to delete " + rmpath); } lastpath = rmpath; } } } } return deletedPathsCount; }
From source file:org.openmrs.module.shr.cdahandler.api.impl.CdaImportServiceImpl.java
/** * Import the parsed clinical document/* w ww.j a v a2s . c om*/ * Auto generated method comment * * @param clinicalDocument * @return * @throws DocumentImportException */ @Override public Visit importDocument(ClinicalDocument clinicalDocument) throws DocumentImportException { if (this.m_processor == null) this.m_processor = CdaImporter.getInstance(); // TODO: Store incoming to a temporary table for CDAs (like the HL7 queue) Visit retVal = this.m_processor.processCdaDocument(clinicalDocument); // Notify of successful import if (retVal != null) { Stack<CdaImportSubscriber> toBeNotified = new Stack<CdaImportSubscriber>(); // The generic ones for all Set<CdaImportSubscriber> candidates = this.m_subscribers.get("*"); if (candidates != null) for (CdaImportSubscriber subscriber : candidates) toBeNotified.push(subscriber); // Notify the default always for (II templateId : clinicalDocument.getTemplateId()) { candidates = this.m_subscribers.get(templateId.getRoot()); if (candidates == null) continue; // no candidates for (CdaImportSubscriber subscriber : candidates) if (!toBeNotified.contains(subscriber)) toBeNotified.push(subscriber); } // Notify the found subscribers while (!toBeNotified.isEmpty()) toBeNotified.pop().onDocumentImported(clinicalDocument, retVal); } return retVal; }
From source file:com.marklogic.dom.NodeImpl.java
protected NodeList getElementsByTagNameNSOrNodeName(String namespaceURI, String name, final boolean nodeName) { final String tagname = name; final String ns = namespaceURI; final Node thisNode = this; return new NodeList() { protected ArrayList<Node> elementList = new ArrayList<Node>(); protected boolean done = false; protected void init() { if (done) return; Stack<Node> childrenStack = new Stack<Node>(); childrenStack.push(thisNode); boolean root = true; while (!childrenStack.isEmpty()) { Node curr = childrenStack.pop(); NodeList children = curr.getChildNodes(); for (int childi = children.getLength() - 1; childi >= 0; childi--) if (children.item(childi).getNodeType() == Node.ELEMENT_NODE) childrenStack.push(children.item(childi)); if (root) { root = false;/*www .j a va 2 s . com*/ continue; } if (nodeName) { if (curr.getNodeName().equals(tagname) || tagname.equals("*")) elementList.add(curr); } else { // do nothing if only one of the two is null if ("*".equals(ns) && "*".equals(tagname)) { elementList.add(curr); continue; } if (ns != null) { if ((ns.equals("*") || ns.equals(curr.getNamespaceURI())) && (tagname.equals("*") || tagname.equals(curr.getLocalName()))) elementList.add(curr); } else if (tagname.equals("*") || tagname.equals(curr.getLocalName())) elementList.add(curr); } } done = true; } public int getLength() { init(); return elementList.size(); } public Node item(int index) { init(); return (index < getLength()) ? elementList.get(index) : null; } }; }
From source file:com.spidertracks.datanucleus.query.runtime.EqualityOperand.java
@Override public Operand optimizeDescriminator(Bytes descriminatorColumnValue, List<Bytes> possibleValues) { // the equality node is always a leaf, so we don't need to recurse if (possibleValues.size() == 1) { IndexExpression leaf = new IndexExpression(); leaf.setColumn_name(descriminatorColumnValue.getBytes()); leaf.setValue(possibleValues.get(0).getBytes()); leaf.setOp(IndexOperator.EQ);/*from w w w . j ava 2s. c o m*/ // discriminator fields are always indexed. addExpression(leaf, true); return this; } Stack<EqualityOperand> eqOps = new Stack<EqualityOperand>(); Stack<OrOperand> orOps = new Stack<OrOperand>(); for (Bytes value : possibleValues) { if (orOps.size() == 2) { OrOperand orOp = new OrOperand(); orOp.setLeft(orOps.pop()); orOp.setRight(orOps.pop()); orOps.push(orOp); } if (eqOps.size() == 2) { OrOperand orOp = new OrOperand(); orOp.setLeft(eqOps.pop()); orOp.setRight(eqOps.pop()); orOps.push(orOp); } EqualityOperand subClass = new EqualityOperand(clause.getCount()); // add the existing clause subClass.addAll(this.getIndexClause().getExpressions(), this.isIndexed()); IndexExpression expression = new IndexExpression(); expression.setColumn_name(descriminatorColumnValue.getBytes()); expression.setValue(value.getBytes()); expression.setOp(IndexOperator.EQ); // now add the discriminator, discriminator is always indexed. subClass.addExpression(expression, true); // push onto the stack eqOps.push(subClass); } // only rewritten without needing to OR to other clauses, short circuit while (eqOps.size() > 0) { OrOperand orOp = new OrOperand(); if (eqOps.size() % 2 == 0) { orOp.setLeft(eqOps.pop()); orOp.setRight(eqOps.pop()); } else { orOp.setLeft(eqOps.pop()); orOp.setRight(orOps.pop()); } orOps.push(orOp); } while (orOps.size() > 1) { OrOperand orOp = new OrOperand(); orOp.setLeft(orOps.pop()); orOp.setRight(orOps.pop()); orOps.push(orOp); } // check if there's anything left in the eqOps. return orOps.pop(); }
From source file:com.qq.tars.maven.gensrc.TarsBuildMojo.java
private void collect(final DependencyNode root, final Set<Artifact> artifacts) { Stack<DependencyNode> stack = new Stack<DependencyNode>(); stack.push(root); while (!stack.isEmpty()) { DependencyNode node = stack.pop(); if (node.getState() == DependencyNode.INCLUDED) { final Artifact artifact = node.getArtifact(); if (includedScope(artifact.getScope())) { getLog().info("Adding Artefact: " + artifact.toString()); artifacts.add(artifact); // check children if (!node.getChildren().isEmpty()) { stack.addAll(node.getChildren()); }//from w ww.j a v a2 s . c om } } } }
From source file:app.web.Application2ITCase.java
public Stack<URL> toLocationStack(Stack<Action> actionStack) { Stack<URL> locationStack = new Stack<URL>(); for (Action action : actionStack) { if (action.getType() == Action.ActionType.OPEN || action.getType() == Action.ActionType.CLICK_NEW_PAGE) { locationStack.push(toLocationPath(action.getValue())); }//from w w w . ja v a 2 s. co m } return locationStack; }
From source file:com.modelsolv.kaboom.serializer.HalSerializerImpl.java
private boolean buildObjectRepresentation(Representation rep, Object obj, CanonicalObjectReader reader, ResourceDataModel model, Stack<Object> objectStack) { if ((maxObjectCount >= 1) && (++objectCount >= maxObjectCount)) { throw new RuntimeException( "Object graph has exceeded the maximum object count. Suspected cycle in resource embedding."); }//from www . ja v a 2s. c om if (objectStack.contains(obj)) { // object is already rendered; rendering again would create an // embedding cycle. return false; } objectStack.push(obj); Iterable<RDMProperty> props = model.getIncludedProperties(); for (RDMProperty prop : props) { if (!(prop instanceof RDMReferenceProperty)) { // primitive field, just render it. rep.withProperty(prop.getName(), reader.getPropertyValue(obj, prop)); } else { // it's a reference, we have to see how to treat it. if (prop instanceof ReferenceLink) { // render link Object targetObj = reader.getPropertyValue(obj, (ReferenceLink) prop); if (targetObj instanceof Iterable<?>) { Iterable<Object> targets = (Iterable<Object>) targetObj; for (Object target : targets) { buildLink(rep, target, reader, (ReferenceLink) prop); } } else { buildLink(rep, targetObj, reader, (ReferenceLink) prop); } } else { // render an embedded object ReferenceEmbed refEmbed = (ReferenceEmbed) prop; ResourceDataModel embeddedModel = refEmbed.getEmbeddedModel(); Object targetObject = reader.getPropertyValue(obj, refEmbed); if (targetObject instanceof Iterable<?>) { Iterable<?> targetCollection = (Iterable<?>) targetObject; for (Object targetElement : targetCollection) { Representation embeddedRep = createNewRepresentation(targetElement, reader, embeddedModel); if (buildObjectRepresentation(embeddedRep, targetElement, reader, refEmbed.getEmbeddedModel(), objectStack)) { rep.withRepresentation(refEmbed.getName(), embeddedRep); } } } else { Representation embeddedRep = createNewRepresentation(targetObject, reader, embeddedModel); if (buildObjectRepresentation(embeddedRep, targetObject, reader, refEmbed.getEmbeddedModel(), objectStack)) { rep.withRepresentation(refEmbed.getName(), embeddedRep); } } } } } return true; }
From source file:com.joliciel.csvLearner.features.NormalisationLimitReader.java
public Map<String, Float> read() { Map<String, Float> featureToMaxMap = new TreeMap<String, Float>(); try {/*from www . ja v a2 s . co m*/ if (inputStream != null) { this.readCSVFile(inputStream, featureToMaxMap); } else if (file.isDirectory()) { Stack<File> directoryStack = new Stack<File>(); directoryStack.add(file); while (!directoryStack.isEmpty()) { File directory = directoryStack.pop(); LOG.debug("Scanning directory: " + directory.getName()); File[] files = directory.listFiles(); if (files == null) { continue; } for (File oneFile : files) { if (oneFile.isDirectory()) { directoryStack.push(oneFile); } else if (oneFile.getName().endsWith(".nrm_limits.csv")) { LOG.debug("Scanning limits file : " + oneFile.getName()); this.readCSVFile(new FileInputStream(oneFile), featureToMaxMap); } else { LOG.trace("Ignoring : " + oneFile.getName()); } } } } else { LOG.debug("Scanning limits file : " + file.getName()); this.readCSVFile(new FileInputStream(file), featureToMaxMap); } } catch (FileNotFoundException e) { throw new RuntimeException(e); } return featureToMaxMap; }