List of usage examples for java.util Stack pop
public synchronized E pop()
From source file:com.thoughtworks.go.util.FileUtil.java
public static File normalize(final String path) { Stack s = new Stack(); String[] dissect = dissect(path); s.push(dissect[0]);/* www. j a v a 2s . co m*/ StringTokenizer tok = new StringTokenizer(dissect[1], File.separator); while (tok.hasMoreTokens()) { String thisToken = tok.nextToken(); if (".".equals(thisToken)) { continue; } if ("..".equals(thisToken)) { if (s.size() < 2) { // Cannot resolve it, so skip it. return new File(path); } s.pop(); } else { // plain component s.push(thisToken); } } StringBuilder sb = new StringBuilder(); for (int i = 0; i < s.size(); i++) { if (i > 1) { // not before the filesystem root and not after it, since root // already contains one sb.append(File.separatorChar); } sb.append(s.elementAt(i)); } return new File(sb.toString()); }
From source file:gdt.data.entity.BaseHandler.java
/** * List all databases in the parent directory * @param entiroot$ the parent directory. * @return The locator string./*from w w w .j a v a2 s . c o m*/ */ public static String[] bases(String entiroot$) { try { if (entiroot$ == null) return null; File entiroot = new File(entiroot$); File[] dirs = entiroot.listFiles(); if (dirs == null) return null; File propertyIndex; Stack<String> s = new Stack<String>(); for (int i = 0; i < dirs.length; i++) { if (!dirs[i].isDirectory()) continue; propertyIndex = new File(dirs[i] + "/" + Entigrator.PROPERTY_INDEX); if (propertyIndex.exists() && propertyIndex.isFile()) s.push(dirs[i].getPath()); } int cnt = s.size(); if (cnt < 1) return null; String[] sa = new String[cnt]; for (int i = 0; i < cnt; i++) sa[i] = (String) s.pop(); return sa; } catch (Exception e) { Logger.getLogger(BaseHandler.class.getName()).severe(e.toString()); ; return null; } }
From source file:com.w20e.socrates.servlet.ValidatorHelper.java
/** * Add single item to the stream or, if it's a group, add it's controls. * // w w w . j a v a2 s . com * @param rItem * @param context * @param pContext * @param bundle */ private static void addItem(Renderable rItem, Stack<Group> parents, final Map<String, Map<String, String>> props, final Instance inst, Model model, RenderConfig cfg, final UTF8ResourceBundle bundle, final Locale locale) { /** * If it's a group, just add it's controls to the context. */ if (rItem instanceof Group) { parents.push((Group) rItem); for (Renderable rSubItem : ((Group) rItem).getItems()) { addItem(rSubItem, parents, props, inst, model, cfg, bundle, locale); } parents.pop(); } if (!(rItem instanceof Control)) { return; } Control control = (Control) rItem; String bind = control.getBind(); Node n; Map<String, String> localProps = new HashMap<String, String>(); try { n = inst.getNode(bind); } catch (InvalidPathExpression e1) { return; } ItemProperties itemProps = model.getItemProperties(bind); if (itemProps == null) { itemProps = new ItemPropertiesImpl(bind); } try { // Is the item required? if (NodeValidator.isRequired(itemProps, inst, model)) { localProps.put("required", "true"); } else { localProps.put("required", "false"); } if (NodeValidator.isRelevant(itemProps, inst, model)) { localProps.put("relevant", "true"); for (Group group : parents) { LOGGER.fine("Adding relevant to parent " + group.getId()); Map<String, String> groupProps = new HashMap<String, String>(); groupProps.put("relevant", "true"); props.put("group:" + group.getId(), groupProps); } } else { localProps.put("relevant", "false"); for (Group group : parents) { if (!props.containsKey("group:" + group.getId())) { LOGGER.fine("Removing relevant of parent " + group.getId()); Map<String, String> groupProps = new HashMap<String, String>(); groupProps.put("relevant", "false"); props.put("group:" + group.getId(), groupProps); } } } if (NodeValidator.isReadOnly(itemProps, inst, model)) { localProps.put("readonly", "true"); } else { localProps.put("readonly", "false"); } // New values we might have if (itemProps.getCalculate() != null) { try { Object val = control.getDisplayValue(NodeValidator.getValue(n, itemProps, model, inst), itemProps.getDatatype(), locale); localProps.put("value", val.toString()); } catch (Exception e) { LOGGER.severe("Exception in resolve of value: " + e.getMessage()); } } // Redo label if necessary if (control.getLabel().toString().indexOf("${") != -1) { String label = FillProcessor.processFills(control.getLabel().toString(), inst, model, cfg, locale); localProps.put("label", label); } // Redo hint if necessary if (control.getHint().toString().indexOf("${") != -1) { String hint = FillProcessor.processFills(control.getHint().toString(), inst, model, cfg, locale); localProps.put("hint", hint); } if (n.getValue() != null) { try { NodeValidator.validate(n, itemProps, inst, model); localProps.put("alert", ""); } catch (Exception cv) { LOGGER.finest("Exception during validation" + cv.getMessage()); LOGGER.finest("Node value: " + n.getValue() + "; type " + itemProps.getDatatype()); String msg = ""; if ("".equals(((Control) rItem).getAlert())) { msg = translateError(cv.getMessage(), bundle); } else { msg = ((Control) rItem).getAlert().toString(); } localProps.put("alert", msg); } } else { localProps.put("alert", ""); } } catch (Exception e) { LOGGER.severe("Couldn't resolve properties:" + e.getMessage()); } props.put(rItem.getId(), localProps); }
From source file:com.netflix.spinnaker.clouddriver.appengine.artifacts.StorageUtils.java
public static void untarStreamToPath(InputStream inputStream, String basePath) throws IOException { class DirectoryTimestamp { public DirectoryTimestamp(File d, long m) { directory = d;//from w ww. j a v a2s .c om millis = m; } public File directory; public long millis; } ; // Directories come in hierarchical order within the stream, but // we need to set their timestamps after their children have been written. Stack<DirectoryTimestamp> directoryStack = new Stack<DirectoryTimestamp>(); File baseDirectory = new File(basePath); baseDirectory.mkdir(); TarArchiveInputStream tarStream = new TarArchiveInputStream(inputStream); for (TarArchiveEntry entry = tarStream.getNextTarEntry(); entry != null; entry = tarStream .getNextTarEntry()) { File target = new File(baseDirectory, entry.getName()); if (entry.isDirectory()) { directoryStack.push(new DirectoryTimestamp(target, entry.getModTime().getTime())); continue; } writeStreamToFile(tarStream, target); target.setLastModified(entry.getModTime().getTime()); } while (!directoryStack.empty()) { DirectoryTimestamp info = directoryStack.pop(); info.directory.setLastModified(info.millis); } tarStream.close(); }
From source file:com.kadwa.hadoop.DistExec.java
/** * Initialize ExecFilesMapper specific job-configuration. * * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments//ww w .j a v a2s . c o m * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); jobConf.set(EXEC_CMD_LABEL, args.execCmd); //set boolean values jobConf.setBoolean(Options.REDIRECT_ERROR_TO_OUT.propertyname, args.flags.contains(Options.REDIRECT_ERROR_TO_OUT)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(FileSystem.get(jobDirectory.toUri(), conf), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_" + NAME + "_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_" + NAME + "_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_" + NAME + "_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_" + NAME + "_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory. final boolean special = (args.srcs.size() == 1 && !dstExists); int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_" + NAME + "_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_" + NAME + "_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToExecCount=" + fileCount); LOG.info("bytesToExecCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(fileCount, jobConf); return fileCount > 0; }
From source file:de.codesourcery.jasm16.ast.ASTUtils.java
public static Iterator<ASTNode> createDepthFirst(ASTNode node) { final Stack<ASTNode> stack = new Stack<ASTNode>(); if (node != null) { stack.push(node);/* w w w. j a v a 2 s. com*/ } return new Iterator<ASTNode>() { @Override public boolean hasNext() { return !stack.isEmpty(); } @Override public ASTNode next() { ASTNode n = stack.peek(); for (ASTNode child : n.getChildren()) { stack.push(child); } return stack.pop(); } @Override public void remove() { throw new UnsupportedOperationException("Not implemented"); } }; }
From source file:fr.paris.lutece.plugins.upload.web.UploadJspBean.java
/** * Returns the total size of a directory. * @param directory The directory// www .ja v a 2 s. c om * @return The total size */ private static long getDirectorySize(File directory) { long lResult = 0; // We use a Stack (LIFO) to keep track of the unprocessed directories Stack<File> dirsToProcess = new Stack<File>(); // The stack is initialized with the main directory dirsToProcess.push(directory); // Loop until all directories have been processed while (!dirsToProcess.empty()) { // Get a new directory from the stack File currentDir = dirsToProcess.pop(); // Don't forget the directory's own size! lResult += currentDir.length(); // Add the local files' size to the global size File[] files = currentDir.listFiles(fileFilter); for (int i = 0; i < files.length; i++) { lResult += files[i].length(); } // Add the sub-directories to the stack File[] subDirs = currentDir.listFiles(dirFilter); for (int i = 0; i < subDirs.length; i++) { dirsToProcess.push(subDirs[i]); } } return lResult; }
From source file:com.sunyue.util.calculator.core.ExpressionConverter.java
/** * Convert an infix expression to a postfix expression. * /*from ww w . ja va 2 s .co m*/ * @param exp * expression parsed by ExpressionParser * @return postfix expression */ public static Object[] convert(Object[] exp) { if (ArrayUtils.isEmpty(exp)) { throw new CalculationException("Expression can not be empty"); } // remember if brackets are coupled int coupled = 0; // out put postfix expression List<Object> out = new ArrayList<Object>(); Stack<Object> opStack = new Stack<Object>(); for (int i = 0; i < exp.length; i++) { if (exp[i] instanceof Operator) { // operator Operator op = (Operator) exp[i]; while (true) { if (opStack.isEmpty()) { opStack.push(op); break; } else { Object obj = opStack.peek(); if (!(obj instanceof Bracket)) { Operator preOp = (Operator) opStack.peek(); if (op.getPriority() <= preOp.getPriority()) { // pop and output operator with not lower // priority out.add(opStack.pop()); } else { // push otherwise opStack.push(op); break; } } else { // push when bracket on top opStack.push(op); break; } } } } else if (Bracket.LEFT_BRACKET.equals(exp[i])) { opStack.push(exp[i]); coupled++; } else if (Bracket.RIGHT_BRACKET.equals(exp[i])) { if (coupled <= 0) { throw new CalculationException("Brackets are not coupled, missing left bracket ("); } while (true) { Object op = opStack.pop(); if (Bracket.LEFT_BRACKET.equals(op)) { // eliminate coupled brackets break; } else { // pop and output until coupled left bracket out.add(op); } } coupled--; } else { // general numbers out.add(exp[i]); } } if (coupled != 0) { throw new CalculationException("Brackets are not coupled, missing right bracket )"); } // output rest elements while (!opStack.isEmpty()) { out.add(opStack.pop()); } return out.toArray(); }
From source file:com.frostwire.android.gui.util.FileUtils.java
/** Given a folder path it'll return all the files contained within it and it's subfolders * as a flat set of Files.// ww w . j a v a2 s .c o m * * Non-recursive implementation, up to 20% faster in tests than recursive implementation. :) * * @author gubatron * @param folder * @param extensions If you only need certain files filtered by their extensions, use this string array (without the "."). or set to null if you want all files. e.g. ["txt","jpg"] if you only want text files and jpegs. * * @return The set of files. */ public static Collection<File> getAllFolderFiles(File folder, String[] extensions) { Set<File> results = new HashSet<File>(); Stack<File> subFolders = new Stack<File>(); File currentFolder = folder; while (currentFolder != null && currentFolder.isDirectory() && currentFolder.canRead()) { File[] fs = null; try { fs = currentFolder.listFiles(); } catch (SecurityException e) { } if (fs != null && fs.length > 0) { for (File f : fs) { if (!f.isDirectory()) { if (extensions == null || FilenameUtils.isExtension(f.getName(), extensions)) { results.add(f); } } else { subFolders.push(f); } } } if (!subFolders.isEmpty()) { currentFolder = subFolders.pop(); } else { currentFolder = null; } } return results; }
From source file:org.alfresco.web.bean.wcm.AVMUtil.java
/** * Creates all directories for a path if they do not already exist. *//*from w ww. ja v a 2 s.com*/ public static void makeAllDirectories(final String avmDirectoryPath) { final AVMService avmService = getAVMService(); // LOGGER.debug("mkdir -p " + avmDirectoryPath); String s = avmDirectoryPath; final Stack<String[]> dirNames = new Stack<String[]>(); while (s != null) { try { if (avmService.lookup(-1, s) != null) { // LOGGER.debug("path " + s + " exists"); break; } } catch (AVMNotFoundException avmfe) { } final String[] sb = AVMNodeConverter.SplitBase(s); s = sb[0]; // LOGGER.debug("pushing " + sb[1]); dirNames.push(sb); } while (!dirNames.isEmpty()) { final String[] sb = dirNames.pop(); // LOGGER.debug("creating " + sb[1] + " in " + sb[0]); avmService.createDirectory(sb[0], sb[1]); } }