List of usage examples for java.util Stack push
public E push(E item)
From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java
private static QueryPlanNode buildPlanNode(int numStreams, int streamNo, String[] streamNames, QueryGraph queryGraph, OuterInnerDirectionalGraph outerInnerGraph, OuterJoinDesc[] outerJoinDescList, InnerJoinGraph innerJoinGraph, QueryPlanIndex[] indexSpecs, EventType[] typesPerStream, boolean[] ishistorical, DependencyGraph dependencyGraph, HistoricalStreamIndexList[] historicalStreamIndexLists, ExprEvaluatorContext exprEvaluatorContext) throws ExprValidationException { // For each stream build an array of substreams, considering required streams (inner joins) first // The order is relevant therefore preserving order via a LinkedHashMap. LinkedHashMap<Integer, int[]> substreamsPerStream = new LinkedHashMap<Integer, int[]>(); boolean[] requiredPerStream = new boolean[numStreams]; // Recursive populating the required (outer) and optional (inner) relationships // of this stream and the substream Set<Integer> completedStreams = new HashSet<Integer>(); // keep track of tree path as only those stream events are always available to historical streams Stack<Integer> streamCallStack = new Stack<Integer>(); streamCallStack.push(streamNo); // For all inner-joins, the algorithm is slightly different if (innerJoinGraph.isAllInnerJoin()) { Arrays.fill(requiredPerStream, true); recursiveBuildInnerJoin(streamNo, streamCallStack, queryGraph, completedStreams, substreamsPerStream, dependencyGraph);//from www .j a v a 2s. c om // compute a best chain to see if all streams are handled and add the remaining NStreamQueryPlanBuilder.BestChainResult bestChain = NStreamQueryPlanBuilder.computeBestPath(streamNo, queryGraph, dependencyGraph); addNotYetNavigated(streamNo, numStreams, substreamsPerStream, bestChain); } else { recursiveBuild(streamNo, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams, substreamsPerStream, requiredPerStream, dependencyGraph); } // verify the substreamsPerStream, all streams must exists and be linked verifyJoinedPerStream(streamNo, substreamsPerStream); // build list of instructions for lookup List<LookupInstructionPlan> lookupInstructions = buildLookupInstructions(streamNo, substreamsPerStream, requiredPerStream, streamNames, queryGraph, indexSpecs, typesPerStream, outerJoinDescList, ishistorical, historicalStreamIndexLists, exprEvaluatorContext); // build strategy tree for putting the result back together BaseAssemblyNode assemblyTopNode = AssemblyStrategyTreeBuilder.build(streamNo, substreamsPerStream, requiredPerStream); List<BaseAssemblyNode> assemblyInstructions = BaseAssemblyNode.getDescendentNodesBottomUp(assemblyTopNode); return new LookupInstructionQueryPlanNode(streamNo, streamNames[streamNo], numStreams, requiredPerStream, lookupInstructions, assemblyInstructions); }
From source file:fr.paris.lutece.plugins.upload.web.UploadJspBean.java
/** * Returns the total size of a directory. * @param directory The directory//from w w w. j ava 2 s . c o m * @return The total size */ private static long getDirectorySize(File directory) { long lResult = 0; // We use a Stack (LIFO) to keep track of the unprocessed directories Stack<File> dirsToProcess = new Stack<File>(); // The stack is initialized with the main directory dirsToProcess.push(directory); // Loop until all directories have been processed while (!dirsToProcess.empty()) { // Get a new directory from the stack File currentDir = dirsToProcess.pop(); // Don't forget the directory's own size! lResult += currentDir.length(); // Add the local files' size to the global size File[] files = currentDir.listFiles(fileFilter); for (int i = 0; i < files.length; i++) { lResult += files[i].length(); } // Add the sub-directories to the stack File[] subDirs = currentDir.listFiles(dirFilter); for (int i = 0; i < subDirs.length; i++) { dirsToProcess.push(subDirs[i]); } } return lResult; }
From source file:fr.paris.lutece.plugins.upload.web.UploadJspBean.java
/** * Deletes a directory recursively.//from ww w. j av a2 s . c om * * @param directory The directory to delete */ private static void deleteDirectory(File directory) { // We use a Stack (LIFO) to keep track of the directories to delete Stack<File> dirsToDelete = new Stack<File>(); // The stack is initialized with the main directory dirsToDelete.push(directory); // Loop until all directories have been deleted while (!dirsToDelete.empty()) { // Look at the directory on top of the stack (don't remove it!) File currentDir = (File) dirsToDelete.peek(); // Are there any subdirectories? File[] subDirs = currentDir.listFiles(dirFilter); if (subDirs.length > 0) { // If so, add them to the stack for (int i = 0; i < subDirs.length; i++) { dirsToDelete.push(subDirs[i]); } } else { // If not, delete all files in the directory File[] files = currentDir.listFiles(fileFilter); for (int i = 0; i < files.length; i++) { files[i].delete(); } // Then delete the directory currentDir.delete(); // Then remove the directory from the stack dirsToDelete.pop(); } } }
From source file:Main.java
/** * Returns the XPath to retrieve targetElement from rootElement. rootElement may be null, in this case the XPath starts with and includes * the farthest non-null ancestor of targetElement. If rootElement == targetElement, an empty string * is returned. /*from ww w . ja v a 2s. c o m*/ * @param includeElementIndex Indicates if the element indices in the form elementName[n] should * be included in the XPath. * @param namespacesMap Maps namespace ids to namespace URIs. */ public static String getXPath(Element rootElement, Element targetElement, boolean includeElementIndex, Map<String, String> namespacesMap) { Stack<Element> elementPath = new Stack<Element>(); // since we need the mapping the other way round, we invert the map Map<String, String> namespaceUriToIdMap = new HashMap<String, String>(); for (Entry<String, String> entry : namespacesMap.entrySet()) { namespaceUriToIdMap.put(entry.getValue(), entry.getKey()); } // recursively find all ancestors of targetElement (up to, not including, rootElement) { Element currentElement = targetElement; while (currentElement != null && currentElement != rootElement) { elementPath.push(currentElement); Node parent = currentElement.getParentNode(); if (parent instanceof Element) { currentElement = (Element) currentElement.getParentNode(); } else { currentElement = null; } } } // construct XPath StringBuilder builder = new StringBuilder(); while (!elementPath.isEmpty()) { Element currentElement = elementPath.pop(); if (builder.length() > 0) { // don't include "/" at the beginning builder.append("/"); } if (namespacesMap != null) { String namespace = currentElement.getNamespaceURI(); if (namespace != null) { namespace = namespaceUriToIdMap.get(namespace); builder.append(namespace); builder.append(":"); } } builder.append(currentElement.getLocalName()); if (includeElementIndex) { int index = getElementIndex(currentElement); builder.append("["); builder.append(index); builder.append("]"); } } return builder.toString(); }
From source file:org.apache.struts2.components.Include.java
public static String getContextRelativePath(ServletRequest request, String relativePath) { String returnValue;// ww w . ja v a2s. c om if (relativePath.startsWith("/")) { returnValue = relativePath; } else if (!(request instanceof HttpServletRequest)) { returnValue = relativePath; } else { HttpServletRequest hrequest = (HttpServletRequest) request; String uri = (String) request.getAttribute("javax.servlet.include.servlet_path"); if (uri == null) { uri = RequestUtils.getServletPath(hrequest); } returnValue = uri.substring(0, uri.lastIndexOf('/')) + '/' + relativePath; } // .. is illegal in an absolute path according to the Servlet Spec and will cause // known problems on Orion application servers. if (returnValue.indexOf("..") != -1) { Stack stack = new Stack(); StringTokenizer pathParts = new StringTokenizer(returnValue.replace('\\', '/'), "/"); while (pathParts.hasMoreTokens()) { String part = pathParts.nextToken(); if (!part.equals(".")) { if (part.equals("..")) { stack.pop(); } else { stack.push(part); } } } StringBuffer flatPathBuffer = new StringBuffer(); for (int i = 0; i < stack.size(); i++) { flatPathBuffer.append("/").append(stack.elementAt(i)); } returnValue = flatPathBuffer.toString(); } return returnValue; }
From source file:com.kadwa.hadoop.DistExec.java
/** * Initialize ExecFilesMapper specific job-configuration. * * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments//from w ww . ja v a 2 s .com * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); jobConf.set(EXEC_CMD_LABEL, args.execCmd); //set boolean values jobConf.setBoolean(Options.REDIRECT_ERROR_TO_OUT.propertyname, args.flags.contains(Options.REDIRECT_ERROR_TO_OUT)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(FileSystem.get(jobDirectory.toUri(), conf), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_" + NAME + "_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_" + NAME + "_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_" + NAME + "_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_" + NAME + "_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory. final boolean special = (args.srcs.size() == 1 && !dstExists); int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_" + NAME + "_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_" + NAME + "_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToExecCount=" + fileCount); LOG.info("bytesToExecCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(fileCount, jobConf); return fileCount > 0; }
From source file:com.wavemaker.json.JSONMarshaller.java
private static boolean handleObjectInternal(Object object, Object root, String key, Object value, boolean firstProperty, JSONState js, Writer writer, Stack<Object> touchedObjects, Stack<String> propertyNames, boolean sort, FieldDefinition fieldDefinition, int arrayLevel, TypeState typeState, boolean prettyPrint, int level, Logger logger) throws IOException { if (fieldDefinition == null) { throw new NullArgumentException("fieldDefinition"); }/* www .ja v a 2 s . c o m*/ propertyNames.push(key); String propertyName = getPropertyName(propertyNames, js); try { if (js.getExcludes().contains(propertyName)) { return firstProperty; } if (js.getPropertyFilter() != null) { if (js.getPropertyFilter().filter(object, key, value)) { return firstProperty; } } // cycle if (isCycle(value, touchedObjects, propertyName, js)) { if (logger.isInfoEnabled()) { logger.info(MessageResource.JSON_CYCLE_FOUND.getMessage(value, js.getCycleHandler())); } if (js.getCycleHandler().equals(JSONState.CycleHandler.FAIL)) { throw new WMRuntimeException(MessageResource.JSON_CYCLE_FOUND, value, js.getCycleHandler()); } else if (js.getCycleHandler().equals(JSONState.CycleHandler.NULL)) { value = null; } else if (js.getCycleHandler().equals(JSONState.CycleHandler.NO_PROPERTY)) { return firstProperty; } else { throw new WMRuntimeException(MessageResource.JSON_BAD_CYCLE_HANDLER, js.getCycleHandler()); } } if (!firstProperty) { writer.write(','); } if (prettyPrint) { writer.write("\n"); writeIndents(writer, level); } if (js.isUnquoteKeys()) { writer.write(key + ":"); } else { writer.write("\"" + key + "\":"); } if (prettyPrint) { writer.write(" "); } doMarshal(writer, value, root, js, sort, false, touchedObjects, propertyNames, fieldDefinition, arrayLevel, typeState, prettyPrint, level, logger); if (firstProperty) { firstProperty = false; } return firstProperty; } finally { propertyNames.pop(); } }
From source file:org.apache.camel.util.FileUtil.java
/** * Compacts a path by stacking it and reducing <tt>..</tt> *//*from w w w .j av a 2 s .c o m*/ public static String compactPath(String path) { if (path == null) { return null; } // only normalize path if it contains .. as we want to avoid: path/../sub/../sub2 as this can leads to trouble if (path.indexOf("..") == -1) { return path; } // only normalize if contains a path separator if (path.indexOf(File.separator) == -1) { return path; } Stack<String> stack = new Stack<String>(); String separatorRegex = File.separator; if (FileUtil.isWindows()) { separatorRegex = "\\\\"; } String[] parts = path.split(separatorRegex); for (String part : parts) { if (part.equals("..") && !stack.isEmpty()) { // only pop if there is a previous path stack.pop(); } else { stack.push(part); } } // build path based on stack StringBuilder sb = new StringBuilder(); for (Iterator<String> it = stack.iterator(); it.hasNext();) { sb.append(it.next()); if (it.hasNext()) { sb.append(File.separator); } } return sb.toString(); }
From source file:com.frostwire.android.gui.util.FileUtils.java
/** Given a folder path it'll return all the files contained within it and it's subfolders * as a flat set of Files.//from w w w . ja v a2 s.c o m * * Non-recursive implementation, up to 20% faster in tests than recursive implementation. :) * * @author gubatron * @param folder * @param extensions If you only need certain files filtered by their extensions, use this string array (without the "."). or set to null if you want all files. e.g. ["txt","jpg"] if you only want text files and jpegs. * * @return The set of files. */ public static Collection<File> getAllFolderFiles(File folder, String[] extensions) { Set<File> results = new HashSet<File>(); Stack<File> subFolders = new Stack<File>(); File currentFolder = folder; while (currentFolder != null && currentFolder.isDirectory() && currentFolder.canRead()) { File[] fs = null; try { fs = currentFolder.listFiles(); } catch (SecurityException e) { } if (fs != null && fs.length > 0) { for (File f : fs) { if (!f.isDirectory()) { if (extensions == null || FilenameUtils.isExtension(f.getName(), extensions)) { results.add(f); } } else { subFolders.push(f); } } } if (!subFolders.isEmpty()) { currentFolder = subFolders.pop(); } else { currentFolder = null; } } return results; }
From source file:org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.java
private static void getComb(int n, int k, Stack<Integer> stack, List<List<Integer>> res) { if (stack.size() == k) { List<Integer> list = new ArrayList<Integer>(stack); res.add(list);//from w w w . ja v a 2 s .c o m } else { int next = stack.empty() ? 0 : stack.peek() + 1; while (next < n) { stack.push(next); getComb(n, k, stack, res); next++; } } if (!stack.empty()) { stack.pop(); } }