List of usage examples for java.util Stack Stack
public Stack()
From source file:gdt.jgui.entity.JEntityDigestDisplay.java
private String[] listFacetOpenItems() { try {// ww w . j a v a 2 s . c om Entigrator entigrator = console.getEntigrator(entihome$); Sack entity = entigrator.getEntityAtKey(entityKey$); String[] sa = entigrator.ent_listComponentsCascade(entity); Core[] ca = entity.elementGet("jfacet"); Stack<String> s = new Stack<String>(); if (ca != null) for (Core c : ca) Support.addItem(c.value, s); if (sa != null) { Sack component; for (String aSa : sa) { component = entigrator.getEntityAtKey(aSa); if (component == null) continue; ca = component.elementGet("jfacet"); if (ca != null) for (Core c : ca) Support.addItem(c.value, s); } } return s.toArray(new String[0]); } catch (Exception e) { Logger.getLogger(getClass().getName()).severe(e.toString()); } return null; }
From source file:com.haulmont.cuba.desktop.sys.DesktopWindowManager.java
protected JComponent showNewWindow(Window window, OpenType openType, String caption) { window.setHeight("100%"); window.setWidth("100%"); TopLevelFrame windowFrame = createTopLevelFrame(caption); windowFrame.setName(window.getId()); Dimension dimension = new Dimension(); dimension.width = 800;/*from w w w . j a v a 2 s. c om*/ if (openType.getWidth() != null) { dimension.width = openType.getWidth().intValue(); } dimension.height = 500; if (openType.getHeight() != null) { dimension.height = openType.getHeight().intValue(); } boolean resizable = true; if (openType.getResizable() != null) { resizable = openType.getResizable(); } windowFrame.setResizable(resizable); windowFrame.setMinimumSize(dimension); windowFrame.pack(); getDialogParams().reset(); WindowBreadCrumbs breadCrumbs = createBreadCrumbs(); breadCrumbs.addWindow(window); JComponent tabContent = createTabPanel(window, breadCrumbs); WindowOpenInfo openInfo = new WindowOpenInfo(window, OpenMode.NEW_WINDOW); openInfo.setData(tabContent); Map<Window, WindowOpenInfo> openInfos = new HashMap<>(); if (window instanceof Window.Wrapper) { Window wrappedWindow = ((Window.Wrapper) window).getWrappedWindow(); openInfos.put(wrappedWindow, openInfo); } else { openInfos.put(window, openInfo); } windowFrame.getWindowManager().attachTab(breadCrumbs, new Stack<>(), window, getWindowHashCode(window), tabContent, openInfos); App.getInstance().registerFrame(windowFrame); windowFrame.setVisible(true); return DesktopComponentsHelper.getComposition(window); }
From source file:com.gargoylesoftware.htmlunit.activex.javascript.msxml.XMLHTTPRequest.java
/** * Sends an HTTP request to the server and receives a response. * @param body the body of the message being sent with the request. *//*from www. ja v a2 s. com*/ @JsxFunction public void send(final Object body) { if (webRequest_ == null) { setState(STATE_DONE, Context.getCurrentContext()); return; } if (sent_) { throw Context.reportRuntimeError("Unspecified error (request already sent)."); } sent_ = true; prepareRequest(body); // quite strange but IE seems to fire state loading twice setState(STATE_OPENED, Context.getCurrentContext()); final WebClient client = getWindow().getWebWindow().getWebClient(); final AjaxController ajaxController = client.getAjaxController(); final HtmlPage page = (HtmlPage) getWindow().getWebWindow().getEnclosedPage(); final boolean synchron = ajaxController.processSynchron(page, webRequest_, async_); if (synchron) { doSend(Context.getCurrentContext()); } else { // Create and start a thread in which to execute the request. final Scriptable startingScope = getWindow(); final ContextFactory cf = client.getJavaScriptEngine().getContextFactory(); final ContextAction action = new ContextAction() { @Override public Object run(final Context cx) { // KEY_STARTING_SCOPE maintains a stack of scopes @SuppressWarnings("unchecked") Stack<Scriptable> stack = (Stack<Scriptable>) cx .getThreadLocal(JavaScriptEngine.KEY_STARTING_SCOPE); if (null == stack) { stack = new Stack<>(); cx.putThreadLocal(JavaScriptEngine.KEY_STARTING_SCOPE, stack); } stack.push(startingScope); try { doSend(cx); } finally { stack.pop(); } return null; } }; final JavaScriptJob job = BackgroundJavaScriptFactory.theFactory().createJavascriptXMLHttpRequestJob(cf, action); if (LOG.isDebugEnabled()) { LOG.debug("Starting XMLHTTPRequest thread for asynchronous request"); } jobID_ = getWindow().getWebWindow().getJobManager().addJob(job, page); } }
From source file:com.anite.zebra.hivemind.om.state.ZebraProcessInstance.java
/** * Looks for the first list of tasks that come from the child(ren) of this * processinstance This is used for finding the next screen. We don't do * this exaustively as it could be very large. The first is good enough for * determining the next screen//w w w .jav a 2s .c o m */ @Transient public List<ZebraTaskInstance> getFirstTasksFromAChildProcess() throws NestableException { Stack<ZebraProcessInstance> checkList = new Stack<ZebraProcessInstance>(); checkList.push(this); while (!checkList.isEmpty()) { try { ZebraProcessInstance currentProcess = checkList.pop(); List childProcesses = currentProcess.getRunningChildProcesses(); for (Iterator it = childProcesses.iterator(); it.hasNext();) { ZebraProcessInstance child = (ZebraProcessInstance) it.next(); List<ZebraTaskInstance> allTasks = child.getUsersTasks(); if (!allTasks.isEmpty()) { return allTasks; } checkList.push(child); } } catch (Exception e) { String emsg = "Failed to retrieve child processes"; log.error(emsg, e); throw new NestableException(emsg, e); } } return new ArrayList<ZebraTaskInstance>(); }
From source file:com.inmobi.conduit.distcp.tools.mapred.TestCopyCommitter.java
private boolean checkDirectoryPermissions(FileSystem fs, String targetBase, FsPermission sourcePerm) throws IOException { Path base = new Path(targetBase); Stack<Path> stack = new Stack<Path>(); stack.push(base);/* w w w. ja v a2s . c om*/ while (!stack.isEmpty()) { Path file = stack.pop(); if (!fs.exists(file)) continue; FileStatus[] fStatus = fs.listStatus(file); if (fStatus == null || fStatus.length == 0) continue; for (FileStatus status : fStatus) { if (status.isDir()) { stack.push(status.getPath()); Assert.assertEquals(status.getPermission(), sourcePerm); } } } return true; }
From source file:com.kadwa.hadoop.DistExec.java
/** * Initialize ExecFilesMapper specific job-configuration. * * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments// w w w. ja v a2s. co m * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); jobConf.set(EXEC_CMD_LABEL, args.execCmd); //set boolean values jobConf.setBoolean(Options.REDIRECT_ERROR_TO_OUT.propertyname, args.flags.contains(Options.REDIRECT_ERROR_TO_OUT)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(FileSystem.get(jobDirectory.toUri(), conf), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_" + NAME + "_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_" + NAME + "_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_" + NAME + "_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_" + NAME + "_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory. final boolean special = (args.srcs.size() == 1 && !dstExists); int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_" + NAME + "_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_" + NAME + "_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToExecCount=" + fileCount); LOG.info("bytesToExecCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(fileCount, jobConf); return fileCount > 0; }
From source file:com.projity.pm.graphic.model.transform.NodeCacheTransformer.java
private Map<GraphicNode, List<GraphicNode>> extractAssignments(List list) { Map<GraphicNode, List<GraphicNode>> map = new HashMap<GraphicNode, List<GraphicNode>>(); GraphicNode current, last;//from w w w .j a v a 2 s . c om Stack<GraphicNode> path = new Stack<GraphicNode>(); for (ListIterator i = list.listIterator(); i.hasNext();) { current = (GraphicNode) i.next(); if (current.getLevel() == 1) { path.clear(); path.push(current); continue; } while ((last = path.peek()).getLevel() >= current.getLevel()) path.pop(); if (current.isAssignment()) { GraphicNode task = path.peek(); List<GraphicNode> ass = map.get(task); if (ass == null) { ass = new LinkedList<GraphicNode>(); map.put(task, ass); } ass.add(current); i.remove(); } path.push(current); } return map; }
From source file:fr.paris.lutece.plugins.upload.web.UploadJspBean.java
/** * Deletes a directory recursively./*from ww w. jav a 2 s . c o m*/ * * @param directory The directory to delete */ private static void deleteDirectory(File directory) { // We use a Stack (LIFO) to keep track of the directories to delete Stack<File> dirsToDelete = new Stack<File>(); // The stack is initialized with the main directory dirsToDelete.push(directory); // Loop until all directories have been deleted while (!dirsToDelete.empty()) { // Look at the directory on top of the stack (don't remove it!) File currentDir = (File) dirsToDelete.peek(); // Are there any subdirectories? File[] subDirs = currentDir.listFiles(dirFilter); if (subDirs.length > 0) { // If so, add them to the stack for (int i = 0; i < subDirs.length; i++) { dirsToDelete.push(subDirs[i]); } } else { // If not, delete all files in the directory File[] files = currentDir.listFiles(fileFilter); for (int i = 0; i < files.length; i++) { files[i].delete(); } // Then delete the directory currentDir.delete(); // Then remove the directory from the stack dirsToDelete.pop(); } } }
From source file:com.quinsoft.zeidon.zeidonoperations.ZDRVROPR.java
public int ImportCSV_ToZeidonOI(View vTgt, String cpcCSV_FileName) throws IOException { Stack<ZNameItem> EntityStack = new Stack<ZNameItem>(); // tag stack (need not be unique) Stack<ZNameItem> AttributeStack = new Stack<ZNameItem>(); // tag stack (need not be unique) Stack<ZNameItem> ValueStack = new Stack<ZNameItem>(); // tag stack (need not be unique) Stack<ZNameItem> ContextStack = new Stack<ZNameItem>(); // tag stack (need not be unique) ZNameItem pEntityItem;//from w w w . ja v a 2 s. c o m ZNameItem pAttribItem; ZNameItem pValueItem; ZNameItem pContextItem; StringBuilder sbLine = new StringBuilder(); StringBuilder sbAttributeValue = new StringBuilder(); String pchNext; int hFileCSV; int lLineNbr; int k; int nRC; KZOEP1AA m_KZOEP1AA = new KZOEP1AA(vTgt); hFileCSV = m_KZOEP1AA.SysOpenFile(vTgt, cpcCSV_FileName, COREFILE_READ); if (hFileCSV == -1) return (-1); nRC = m_KZOEP1AA.SysReadLine(vTgt, sbLine, hFileCSV); if (nRC != 1) return (-2); // entity.attribute header line not read successfully // Even though we don't use the Context here, set up for it. fnSetEntityAttribList(EntityStack, AttributeStack, ValueStack, ContextStack, "", sbLine.toString()); if (EntityStack.isEmpty() || AttributeStack.isEmpty()) return (-3); // entity.attribute header line not well-formed lLineNbr = 1; // account for entity.attribute header line nRC = m_KZOEP1AA.SysReadLine(vTgt, sbLine, hFileCSV); while (nRC == 1) { lLineNbr++; if (sbLine != null && sbLine.length() != 0) { pchNext = sbLine.toString(); for (k = 0; k < AttributeStack.size(); k++) { pEntityItem = EntityStack.get(k); pAttribItem = AttributeStack.get(k); pValueItem = ValueStack.get(k); pContextItem = ContextStack.get(k); if (k == 0) CreateEntity(vTgt, pEntityItem.getName(), zPOS_LAST); else if (CheckExistenceOfEntity(vTgt, pEntityItem.getName()) != zCURSOR_SET) CreateEntity(vTgt, pEntityItem.getName(), zPOS_LAST); pchNext = fnGetNextAttribute(pchNext, sbAttributeValue, lLineNbr); SetAttributeFromString(vTgt, pEntityItem.getName(), pAttribItem.getName(), sbAttributeValue.toString()); if (pchNext == null || pchNext.isEmpty()) break; } } nRC = m_KZOEP1AA.SysReadLine(vTgt, sbLine, hFileCSV); } m_KZOEP1AA.SysCloseFile(vTgt, hFileCSV, 0); return 0; }
From source file:ca.weblite.xmlvm.XMLVM.java
/** * Finds all of the classes that may be dirty due to changes in * an initial set of classes.//from ww w .ja v a 2 s . c o m * @param changedClasses An initial set of classes that were modified. * @param dirtyClasses The full set of classes that may be dirty due to these * changes. This will necessarily be a superset of changedClasses. * @param depsDir The directory the contains the deps. * @throws IOException */ public void collectDirtyClasses(Set<String> changedClasses, Set<String> dirtyClasses, File depsDir) throws IOException { Set<String> processed = new HashSet<String>(); Stack<String> stack = new Stack<>(); stack.addAll(changedClasses); dirtyClasses.addAll(changedClasses); while (!stack.isEmpty()) { String cls = stack.pop(); if (processed.contains(cls)) { continue; } else { processed.add(cls); } File depsFile = new File(depsDir, cls + ".deps"); if (depsFile.exists()) { List<String> lines = FileUtils.readLines(depsFile); for (String line : lines) { String[] parts = line.split(" "); String clsName = parts[0]; if (dirtyClasses.contains(clsName)) { // This class is already marked dirty. continue; } String kind = parts[1]; switch (kind) { case "usage": dirtyClasses.add(clsName); break; case "super": case "interface": dirtyClasses.add(clsName); stack.push(clsName); break; } } } } }