List of usage examples for java.util Stack Stack
public Stack()
From source file:graphene.util.fs.FileUtils.java
public static void deleteRecursively(final File directory) throws IOException { final Stack<File> stack = new Stack<>(); final List<File> temp = new LinkedList<>(); stack.push(directory.getAbsoluteFile()); while (!stack.isEmpty()) { final File top = stack.pop(); File[] files = top.listFiles(); if (files != null) { for (final File child : files) { if (child.isFile()) { if (!deleteFile(child)) { throw new IOException("Failed to delete " + child.getCanonicalPath()); }//from w ww. j a v a2 s . co m } else { temp.add(child); } } } files = top.listFiles(); if ((files == null) || (files.length == 0)) { if (!deleteFile(top)) { throw new IOException("Failed to delete " + top.getCanonicalPath()); } } else { stack.push(top); for (final File f : temp) { stack.push(f); } } temp.clear(); } }
From source file:org.gephi.statistics.plugin.GraphDistance.java
public void execute(HierarchicalGraph hgraph, AttributeModel attributeModel) { isCanceled = false;/*from w w w . j av a 2 s . c o m*/ AttributeTable nodeTable = attributeModel.getNodeTable(); AttributeColumn eccentricityCol = nodeTable.getColumn(ECCENTRICITY); AttributeColumn closenessCol = nodeTable.getColumn(CLOSENESS); AttributeColumn betweenessCol = nodeTable.getColumn(BETWEENNESS); if (eccentricityCol == null) { eccentricityCol = nodeTable.addColumn(ECCENTRICITY, "Eccentricity", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (closenessCol == null) { closenessCol = nodeTable.addColumn(CLOSENESS, "Closeness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (betweenessCol == null) { betweenessCol = nodeTable.addColumn(BETWEENNESS, "Betweenness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } hgraph.readLock(); N = hgraph.getNodeCount(); betweenness = new double[N]; eccentricity = new double[N]; closeness = new double[N]; diameter = 0; avgDist = 0; shortestPaths = 0; radius = Integer.MAX_VALUE; HashMap<Node, Integer> indicies = new HashMap<Node, Integer>(); int index = 0; for (Node s : hgraph.getNodes()) { indicies.put(s, index); index++; } Progress.start(progress, hgraph.getNodeCount()); int count = 0; for (Node s : hgraph.getNodes()) { Stack<Node> S = new Stack<Node>(); LinkedList<Node>[] P = new LinkedList[N]; double[] theta = new double[N]; int[] d = new int[N]; for (int j = 0; j < N; j++) { P[j] = new LinkedList<Node>(); theta[j] = 0; d[j] = -1; } int s_index = indicies.get(s); theta[s_index] = 1; d[s_index] = 0; LinkedList<Node> Q = new LinkedList<Node>(); Q.addLast(s); while (!Q.isEmpty()) { Node v = Q.removeFirst(); S.push(v); int v_index = indicies.get(v); EdgeIterable edgeIter = null; if (isDirected) { edgeIter = ((HierarchicalDirectedGraph) hgraph).getOutEdgesAndMetaOutEdges(v); } else { edgeIter = hgraph.getEdgesAndMetaEdges(v); } for (Edge edge : edgeIter) { Node reachable = hgraph.getOpposite(v, edge); int r_index = indicies.get(reachable); if (d[r_index] < 0) { Q.addLast(reachable); d[r_index] = d[v_index] + 1; } if (d[r_index] == (d[v_index] + 1)) { theta[r_index] = theta[r_index] + theta[v_index]; P[r_index].addLast(v); } } } double reachable = 0; for (int i = 0; i < N; i++) { if (d[i] > 0) { avgDist += d[i]; eccentricity[s_index] = (int) Math.max(eccentricity[s_index], d[i]); closeness[s_index] += d[i]; diameter = Math.max(diameter, d[i]); reachable++; } } radius = (int) Math.min(eccentricity[s_index], radius); if (reachable != 0) { closeness[s_index] /= reachable; } shortestPaths += reachable; double[] delta = new double[N]; while (!S.empty()) { Node w = S.pop(); int w_index = indicies.get(w); ListIterator<Node> iter1 = P[w_index].listIterator(); while (iter1.hasNext()) { Node u = iter1.next(); int u_index = indicies.get(u); delta[u_index] += (theta[u_index] / theta[w_index]) * (1 + delta[w_index]); } if (w != s) { betweenness[w_index] += delta[w_index]; } } count++; if (isCanceled) { hgraph.readUnlockAll(); return; } Progress.progress(progress, count); } avgDist /= shortestPaths;//mN * (mN - 1.0f); for (Node s : hgraph.getNodes()) { AttributeRow row = (AttributeRow) s.getNodeData().getAttributes(); int s_index = indicies.get(s); if (!isDirected) { betweenness[s_index] /= 2; } if (isNormalized) { closeness[s_index] = (closeness[s_index] == 0) ? 0 : 1.0 / closeness[s_index]; betweenness[s_index] /= isDirected ? (N - 1) * (N - 2) : (N - 1) * (N - 2) / 2; } row.setValue(eccentricityCol, eccentricity[s_index]); row.setValue(closenessCol, closeness[s_index]); row.setValue(betweenessCol, betweenness[s_index]); } hgraph.readUnlock(); }
From source file:autohit.creator.compiler.SimCompiler.java
/** * Compile the xml tree into an VMExecutable object. * //ww w .j av a 2s. c o m * We will create a new log for each run, so that we can uniquely * identify them. * * @param xd A parsed XML document. * @return a reference to the target object, in this case it will be a VMExecutableWrapper, or null if it failed. * @see autohit.vm.VMExecutableWrapper */ public Object build(Document xd) { int idx; NodeList rootChildren; Element itemTree = null; Element codeTree = null; int numNodes; Node scratchNode; String scratchString; // Any exception or verification check aborts the compile try { // Ok, build our working Sim object ob = new VMExecutableWrapper(); ob.create(); // Create our symbol table and fixup stack symboltable = new HashMap(); fixupstack = new Stack(); // set defaults attributes ob.exec.major = 0; // set any default attributes ob.exec.type = MY_TYPE_OF_EXEC; ob.exec.minor = 0; ob.exec.output = null; // assume there is nothign to return // Get the root element and normalize Element root = (Element) xd.getDocumentElement(); root.normalize(); // Peal out the <info> and <code> sub-trees rootChildren = (NodeList) root.getChildNodes(); numNodes = rootChildren.getLength(); while (numNodes > 0) { scratchNode = rootChildren.item(numNodes - 1); if (scratchNode instanceof Element) { scratchString = scratchNode.getNodeName(); if (scratchString.charAt(0) == 'i') { itemTree = (Element) scratchNode; } else if (scratchString.charAt(0) == 'c') { codeTree = (Element) scratchNode; } } numNodes--; } if (itemTree == null) { runtimeError("Missing infomation <info> block."); } if (codeTree == null) { runtimeError("Missing infomation <code> block."); throw new Exception(); } // Deal with the <info> tree NodeList itemTreeChildren = itemTree.getChildNodes(); for (idx = 0; idx < itemTreeChildren.getLength(); idx++) { scratchNode = itemTreeChildren.item(idx); // pull only Elements if (scratchNode instanceof Element) { processItem((Element) scratchNode); } } // Deal with the <code> tree // Basicall, I'm gonna go wtih recursion. I don't think it should // get very deep. try { processCode(codeTree); // Put a NOP on the end of the executable ob.emit(new VMINop()); ob.clean(); // fixup goto symbols ListIterator li = fixupstack.listIterator(); VMIGoto jcandidate; NOPair nocandidate; Integer currentgoto; while (li.hasNext()) { nocandidate = (NOPair) li.next(); if (symboltable.containsKey(nocandidate.n)) { jcandidate = (VMIGoto) nocandidate.o; currentgoto = (Integer) symboltable.get(nocandidate.n); jcandidate.t = currentgoto.intValue(); runtimeDebug("Fixup GOTO for label=" + nocandidate.n + " target=" + jcandidate.t); } else { runtimeError("Broken GOTO. No label for " + nocandidate.n + "."); } } } catch (Exception e) { // an otherwise uncaught exception. A runaway compiler... runtimeError("FATAL ERROR. Runaway compilation errors. Stopping compile."); ob = null; } } catch (Exception e) { myLog.error("CRITICAL ERROR encountered. Stopping compile of " + localname + ". " + e.toString(), AutohitErrorCodes.CODE_COMPILE_ERROR); myLog.error(e.toString()); ob = null; // leave the objectCode as null; } // ditch data as it falls out of scope symboltable = null; fixupstack = null; // clean up logs int err = numberErrors(); runtimeLog.error("Total errors for " + localname + " : " + err); runtimeLog.warning("Total errors for " + localname + " : " + numberWarnings()); if (err > 0) { runtimeLog.info("COMPILE FAILED " + localname + " DUE TO ERRORS."); ob = null; } return ob; }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
/** * Creates new SAXDigiDocFactory//from w w w. j a v a2 s . c om * and initializes the variables */ public SAXDigiDocFactory() { m_tags = new Stack(); m_doc = null; m_sig = null; m_sbCollectSignature = null; m_xmlnsAttr = null; m_nsAsicPref = null; m_sbCollectItem = null; m_digest = null; m_altDigest = null; m_bCollectDigest = false; m_dfCacheOutStream = null; m_tempDir = null; m_errs = null; m_elemRoot = null; m_elemCurrent = null; m_logger = Logger.getLogger(SAXDigiDocFactory.class); nMaxBdocFilCached = ConfigManager.instance().getLongProperty("DIGIDOC_MAX_DATAFILE_CACHED", Long.MAX_VALUE); }
From source file:edu.emory.cci.aiw.cvrg.eureka.etl.ksb.PropositionDefinitionFinder.java
private void readParentsForSearchResult(PropositionDefinition pf, LinkedHashSet<String> nodesToLoad) throws PropositionFinderException { try {//from w ww .j a v a 2s .c o m Queue<PropositionDefinition> toProcessQueue = new LinkedList<>(); Stack<String> processedStack = new Stack<>(); toProcessQueue.add(pf); while (!toProcessQueue.isEmpty()) { PropositionDefinition currentPropDef = toProcessQueue.remove(); List<PropositionDefinition> parents; synchronized (parentsCache) { parents = parentsCache.get(currentPropDef.getId()); if (parents == null) { parents = knowledgeSource.readParents(currentPropDef); parentsCache.put(currentPropDef.getId(), parents); } } for (PropositionDefinition parent : parents) { toProcessQueue.add(parent); processedStack.add(parent.getId()); } } getNodesToLoad(processedStack, nodesToLoad); } catch (KnowledgeSourceReadException e) { throw new PropositionFinderException(e); } }
From source file:hudson.model.Items.java
/** * Computes the canonical full name of a relative path in an {@link ItemGroup} context, handling relative * positions ".." and "." as absolute path starting with "/". The resulting name is the item fullName from Jenkins * root./*from ww w . j a va2 s. co m*/ */ public static String getCanonicalName(ItemGroup context, String path) { String[] c = context.getFullName().split("/"); String[] p = path.split("/"); Stack<String> name = new Stack<String>(); for (int i = 0; i < c.length; i++) { if (i == 0 && c[i].equals("")) continue; name.push(c[i]); } for (int i = 0; i < p.length; i++) { if (i == 0 && p[i].equals("")) { // Absolute path starting with a "/" name.clear(); continue; } if (p[i].equals("..")) { if (name.size() == 0) { throw new IllegalArgumentException(String .format("Illegal relative path '%s' within context '%s'", path, context.getFullName())); } name.pop(); continue; } if (p[i].equals(".")) { continue; } name.push(p[i]); } return StringUtils.join(name, '/'); }
From source file:com.linkedin.restli.tools.snapshot.check.RestLiSnapshotCompatibilityChecker.java
private CompatibilityInfoMap checkCompatibility(String prevRestModelPath, String currRestModelPath, CompatibilityLevel compatLevel, boolean isAgainstRestSpec) { final CompatibilityInfoMap infoMap = _infoMap; if (compatLevel == CompatibilityLevel.OFF) { // skip check entirely. return infoMap; }/*w w w .j av a2 s . co m*/ final Stack<Object> path = new Stack<Object>(); path.push(""); FileInputStream prevSnapshotFile = null; FileInputStream currSnapshotFile = null; try { prevSnapshotFile = new FileInputStream(prevRestModelPath); } catch (FileNotFoundException e) { infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_NEW, path, currRestModelPath); } try { currSnapshotFile = new FileInputStream(currRestModelPath); } catch (FileNotFoundException e) { infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_MISSING, path, prevRestModelPath); } if (prevSnapshotFile == null || currSnapshotFile == null) { return infoMap; } AbstractSnapshot prevSnapshot = null; AbstractSnapshot currSnapshot = null; try { if (isAgainstRestSpec) { prevSnapshot = new RestSpec(prevSnapshotFile); } else { prevSnapshot = new Snapshot(prevSnapshotFile); } currSnapshot = new Snapshot(currSnapshotFile); } catch (IOException e) { infoMap.addRestSpecInfo(CompatibilityInfo.Type.OTHER_ERROR, path, e.getMessage()); } if (prevSnapshot == null || currSnapshot == null) { return infoMap; } final DataSchemaResolver currResolver = createResolverFromSnapshot(currSnapshot, _resolverPath); final DataSchemaResolver prevResolver; if (isAgainstRestSpec) { prevResolver = currResolver; } else { prevResolver = createResolverFromSnapshot(prevSnapshot, _resolverPath); } final ResourceCompatibilityChecker checker = new ResourceCompatibilityChecker( prevSnapshot.getResourceSchema(), prevResolver, currSnapshot.getResourceSchema(), currResolver); checker.check(compatLevel); infoMap.addAll(checker.getInfoMap()); return infoMap; }
From source file:forge.limited.SealedCardPoolGenerator.java
/** * <p>/*w w w.j a v a2s. co m*/ * Constructor for SealedDeck. * </p> * * @param poolType * a {@link java.lang.String} object. */ private SealedCardPoolGenerator(final LimitedPoolType poolType) { switch (poolType) { case Full: // Choose number of boosters if (!chooseNumberOfBoosters(new UnOpenedProduct(SealedProduct.Template.genericBooster))) { return; } landSetCode = CardEdition.Predicates.getRandomSetWithAllBasicLands(FModel.getMagicDb().getEditions()) .getCode(); break; case Block: case FantasyBlock: List<CardBlock> blocks = new ArrayList<CardBlock>(); Iterable<CardBlock> src = poolType == LimitedPoolType.Block ? FModel.getBlocks() : FModel.getFantasyBlocks(); for (CardBlock b : src) { blocks.add(b); } final CardBlock block = SGuiChoose.oneOrNone("Choose Block", blocks); if (block == null) { return; } final int nPacks = block.getCntBoostersSealed(); final Stack<String> sets = new Stack<String>(); for (CardEdition edition : block.getSets()) { sets.add(edition.getCode()); } for (String ms : block.getMetaSetNames()) { sets.push(ms); } if (sets.size() > 1) { final List<String> setCombos = getSetCombos(sets, nPacks); if (setCombos == null || setCombos.isEmpty()) { throw new RuntimeException( "Unsupported amount of packs (" + nPacks + ") in a Sealed Deck block!"); } final String p = setCombos.size() > 1 ? SGuiChoose.oneOrNone("Choose packs to play with", setCombos) : setCombos.get(0); if (p == null) { return; } for (String pz : TextUtil.split(p, ',')) { String[] pps = TextUtil.splitWithParenthesis(pz.trim(), ' '); String setCode = pps[pps.length - 1]; int nBoosters = pps.length > 1 ? Integer.parseInt(pps[0]) : 1; while (nBoosters-- > 0) { this.product.add(block.getBooster(setCode)); } } } else { IUnOpenedProduct prod = block.getBooster(sets.get(0)); for (int i = 0; i < nPacks; i++) { this.product.add(prod); } } landSetCode = block.getLandSet().getCode(); break; case Custom: String[] dList; final List<CustomLimited> customs = new ArrayList<CustomLimited>(); // get list of custom draft files final File dFolder = new File(ForgeConstants.SEALED_DIR); if (!dFolder.exists()) { throw new RuntimeException( "GenerateSealed : folder not found -- folder is " + dFolder.getAbsolutePath()); } if (!dFolder.isDirectory()) { throw new RuntimeException("GenerateSealed : not a folder -- " + dFolder.getAbsolutePath()); } dList = dFolder.list(); for (final String element : dList) { if (element.endsWith(FILE_EXT)) { final List<String> dfData = FileUtil.readFile(ForgeConstants.SEALED_DIR + element); final CustomLimited cs = CustomLimited.parse(dfData, FModel.getDecks().getCubes()); if (cs.getSealedProductTemplate().getNumberOfCardsExpected() > 5) { // Do not allow too small cubes to be played as 'stand-alone'! customs.add(cs); } } } // present list to user if (customs.isEmpty()) { SOptionPane.showMessageDialog("No custom sealed files found."); return; } final CustomLimited draft = SGuiChoose.oneOrNone("Choose Custom Sealed Pool", customs); if (draft == null) { return; } UnOpenedProduct toAdd = new UnOpenedProduct(draft.getSealedProductTemplate(), draft.getCardPool()); toAdd.setLimitedPool(draft.isSingleton()); if (!chooseNumberOfBoosters(toAdd)) { return; } landSetCode = draft.getLandSetCode(); break; } }
From source file:sapience.injectors.stax.inject.StringBasedStaxStreamInjector.java
/** * The actual injection procedure//from w w w. jav a 2 s . c o m * @param in the input stream where the XML is coming from (will be closed in the end) * @param out the output stream where we write the annotated XML into (remains open) * @param refs a list of references * @throws IOException */ public void inject(InputStream in, OutputStream out, List<Reference> refs) throws IOException { StringBuilder pb; String characters = null; NamespaceContext context = null; int marked; current_path = new Stack<String>(); current_path.push("//"); try { XMLEventReader r = inFac.createXMLEventReader(in); XMLEventWriter w = outFac.createXMLEventWriter(out); XMLStreamWriter ws = outFac.createXMLStreamWriter(System.out); while (r.hasNext()) { XMLEvent e = r.nextEvent(); switch (e.getEventType()) { case XMLEvent.START_ELEMENT: StartElement se = (StartElement) e; context = se.getNamespaceContext(); if (elementReferences == null) { // process the namespaces in the references this.prepareReferences(refs, context); } // store location col = e.getLocation().getColumnNumber(); // add to current xpath current_path.add(generator.asXPathString((StartElement) e)); //XPathHelper.addCurrentElementToStack(current_path, se); // check if the current xpath is in our list of attribute references if (attributeReferences.size() > 0) { for (int i = 0; i < refs.size(); i++) { Stack<String> stack = attributeReferences.get(refs.get(i)); if (matcher.matches(current_path, stack, true, context)) { // yes, let's inject the reference (only valid for attributes here) this.handleAttribute(w, refs.get(i)); attributeReferences.remove(refs.get(i)); refs.remove(i); } } } w.add(e); break; case XMLEvent.END_ELEMENT: // before removing from stack, we check if the current path with added characters is a match (which means we have to add a new element now) if (characters != null) this.current_path.push(characters); if (elementReferences.size() > 0) { for (int i = 0; i < refs.size(); i++) { Stack<String> stack = elementReferences.get(refs.get(i)); if (matcher.matches(current_path, stack, true, context)) { // yes, let's inject the reference (only valid for attributes here) this.interceptingElement = refs.get(i); elementReferences.remove(refs.get(i)); refs.remove(i); } } } if (characters != null) { // clean up this.current_path.pop(); characters = null; } this.current_path.pop(); w.add(e); // if the intercepting is not null, the preceding element was a match, hence we inject some xml before writing a new element if (this.interceptingElement != null) { w.add(eventFac.createSpace("\n")); writeElementIntoStream(w, interceptingElement); } break; case XMLEvent.CHARACTERS: characters = generator.asXPathString((Characters) e); w.add(e); break; default: w.add(e); break; } } } catch (XPathExpressionException e) { if (logger.isLoggable(Level.SEVERE)) { logger.log(Level.SEVERE, "Not a valid XPath", e); } throw new IOException(e); } catch (XMLStreamException e) { if (logger.isLoggable(Level.SEVERE)) { logger.log(Level.SEVERE, "Failed to inject. Reason: " + e.getLocalizedMessage(), e); } throw new IOException(e); } finally { in.close(); } }
From source file:com.espertech.esper.view.ViewSupport.java
/** * Find the descendent view in the view tree under the parent view returning the list of view nodes * between the parent view and the descendent view. Returns null if the descendent view is not found. * Returns an empty list if the descendent view is a child view of the parent view. * @param parentView is the view to start searching under * @param descendentView is the view to find * @return list of Viewable nodes between parent and descendent view. *//*from www .jav a 2 s . co m*/ public static List<View> findDescendent(Viewable parentView, Viewable descendentView) { Stack<View> stack = new Stack<View>(); boolean found; for (View view : parentView.getViews()) { if (view == descendentView) { return stack; } found = findDescendentRecusive(view, descendentView, stack); if (found) { return stack; } } return null; }