List of usage examples for java.util Stack pop
public synchronized E pop()
From source file:com.scaleunlimited.cascading.DistCp.java
/** Delete the dst files/dirs which do not exist in src */ static private void deleteNonexisting(FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf) throws IOException { if (!dstroot.isDir()) { throw new IOException("dst must be a directory when option " + Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath() + ") is not a directory."); }// ww w .ja v a 2 s. co m //write dst lsr results final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr"); final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf, dstlsr, Text.class, FileStatus.class, SequenceFile.CompressionType.NONE); try { //do lsr to get all file statuses in dstroot final Stack<FileStatus> lsrstack = new Stack<FileStatus>(); for (lsrstack.push(dstroot); !lsrstack.isEmpty();) { final FileStatus status = lsrstack.pop(); if (status.isDir()) { for (FileStatus child : dstfs.listStatus(status.getPath())) { String relative = makeRelative(dstroot.getPath(), child.getPath()); writer.append(new Text(relative), child); lsrstack.push(child); } } } } finally { checkAndClose(writer); } //sort lsr results final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted"); SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class, FileStatus.class, jobconf); sorter.sort(dstlsr, sortedlsr); //compare lsr list and dst list SequenceFile.Reader lsrin = null; SequenceFile.Reader dstin = null; try { lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf); dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf); //compare sorted lsr list and sorted dst list final Text lsrpath = new Text(); final FileStatus lsrstatus = new FileStatus(); final Text dstpath = new Text(); final Text dstfrom = new Text(); final FsShell shell = new FsShell(conf); final String[] shellargs = { "-rmr", null }; boolean hasnext = dstin.next(dstpath, dstfrom); for (; lsrin.next(lsrpath, lsrstatus);) { int dst_cmp_lsr = dstpath.compareTo(lsrpath); for (; hasnext && dst_cmp_lsr < 0;) { hasnext = dstin.next(dstpath, dstfrom); dst_cmp_lsr = dstpath.compareTo(lsrpath); } if (dst_cmp_lsr == 0) { //lsrpath exists in dst, skip it hasnext = dstin.next(dstpath, dstfrom); } else { //lsrpath does not exist, delete it String s = new Path(dstroot.getPath(), lsrpath.toString()).toString(); if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) { shellargs[1] = s; int r = 0; try { r = shell.run(shellargs); } catch (Exception e) { throw new IOException("Exception from shell.", e); } if (r != 0) { throw new IOException( "\"" + shellargs[0] + " " + shellargs[1] + "\" returns non-zero value " + r); } } } } } finally { checkAndClose(lsrin); checkAndClose(dstin); } }
From source file:org.apache.sling.testing.tools.sling.SlingClient.java
/** Create path and all its parent folders, using MKCOL */ public void mkdirs(String path) throws IOException { // Call mkdir on all parent paths, starting at the topmost one final Stack<String> parents = new Stack<String>(); path = getParentPath(path);// w ww . ja va2 s. c o m while (path.length() > 0 && !exists(path)) { parents.push(path); path = getParentPath(path); } while (!parents.isEmpty()) { mkdir(parents.pop()); } }
From source file:com.espertech.esper.epl.spec.PatternStreamSpecRaw.java
private static void recursiveCompile(EvalFactoryNode evalNode, StatementContext context, ExprEvaluatorContext evaluatorContext, Set<String> eventTypeReferences, boolean isInsertInto, MatchEventSpec tags, Deque<Integer> subexpressionIdStack, Stack<EvalFactoryNode> parentNodeStack, LinkedHashSet<String> allTagNamesOrdered) throws ExprValidationException { int counter = 0; parentNodeStack.push(evalNode);//from w ww . j a va2s . c o m for (EvalFactoryNode child : evalNode.getChildNodes()) { subexpressionIdStack.addLast(counter++); recursiveCompile(child, context, evaluatorContext, eventTypeReferences, isInsertInto, tags, subexpressionIdStack, parentNodeStack, allTagNamesOrdered); subexpressionIdStack.removeLast(); } parentNodeStack.pop(); LinkedHashMap<String, Pair<EventType, String>> newTaggedEventTypes = null; LinkedHashMap<String, Pair<EventType, String>> newArrayEventTypes = null; if (evalNode instanceof EvalFilterFactoryNode) { EvalFilterFactoryNode filterNode = (EvalFilterFactoryNode) evalNode; String eventName = filterNode.getRawFilterSpec().getEventTypeName(); EventType resolvedEventType = FilterStreamSpecRaw.resolveType(context.getEngineURI(), eventName, context.getEventAdapterService(), context.getPlugInTypeResolutionURIs()); EventType finalEventType = resolvedEventType; String optionalTag = filterNode.getEventAsName(); boolean isPropertyEvaluation = false; boolean isParentMatchUntil = isParentMatchUntil(evalNode, parentNodeStack); // obtain property event type, if final event type is properties if (filterNode.getRawFilterSpec().getOptionalPropertyEvalSpec() != null) { PropertyEvaluator optionalPropertyEvaluator = PropertyEvaluatorFactory.makeEvaluator( filterNode.getRawFilterSpec().getOptionalPropertyEvalSpec(), resolvedEventType, filterNode.getEventAsName(), context.getEventAdapterService(), context.getMethodResolutionService(), context.getSchedulingService(), context.getVariableService(), context.getEngineURI(), context.getStatementId(), context.getStatementName(), context.getAnnotations(), subexpressionIdStack, context.getConfigSnapshot()); finalEventType = optionalPropertyEvaluator.getFragmentEventType(); isPropertyEvaluation = true; } if (finalEventType instanceof EventTypeSPI) { eventTypeReferences.add(((EventTypeSPI) finalEventType).getMetadata().getPrimaryName()); } // If a tag was supplied for the type, the tags must stay with this type, i.e. a=BeanA -> b=BeanA -> a=BeanB is a no if (optionalTag != null) { Pair<EventType, String> pair = tags.getTaggedEventTypes().get(optionalTag); EventType existingType = null; if (pair != null) { existingType = pair.getFirst(); } if (existingType == null) { pair = tags.getArrayEventTypes().get(optionalTag); if (pair != null) { throw new ExprValidationException("Tag '" + optionalTag + "' for event '" + eventName + "' used in the repeat-until operator cannot also appear in other filter expressions"); } } if ((existingType != null) && (existingType != finalEventType)) { throw new ExprValidationException("Tag '" + optionalTag + "' for event '" + eventName + "' has already been declared for events of type " + existingType.getUnderlyingType().getName()); } pair = new Pair<EventType, String>(finalEventType, eventName); // add tagged type if (isPropertyEvaluation || isParentMatchUntil) { newArrayEventTypes = new LinkedHashMap<String, Pair<EventType, String>>(); newArrayEventTypes.put(optionalTag, pair); } else { newTaggedEventTypes = new LinkedHashMap<String, Pair<EventType, String>>(); newTaggedEventTypes.put(optionalTag, pair); } } // For this filter, filter types are all known tags at this time, // and additionally stream 0 (self) is our event type. // Stream type service allows resolution by property name event if that name appears in other tags. // by defaulting to stream zero. // Stream zero is always the current event type, all others follow the order of the map (stream 1 to N). String selfStreamName = optionalTag; if (selfStreamName == null) { selfStreamName = "s_" + UuidGenerator.generate(); } LinkedHashMap<String, Pair<EventType, String>> filterTypes = new LinkedHashMap<String, Pair<EventType, String>>(); Pair<EventType, String> typePair = new Pair<EventType, String>(finalEventType, eventName); filterTypes.put(selfStreamName, typePair); filterTypes.putAll(tags.getTaggedEventTypes()); // for the filter, specify all tags used LinkedHashMap<String, Pair<EventType, String>> filterTaggedEventTypes = new LinkedHashMap<String, Pair<EventType, String>>( tags.getTaggedEventTypes()); filterTaggedEventTypes.remove(optionalTag); // handle array tags (match-until clause) LinkedHashMap<String, Pair<EventType, String>> arrayCompositeEventTypes = null; if (tags.getArrayEventTypes() != null && !tags.getArrayEventTypes().isEmpty()) { arrayCompositeEventTypes = new LinkedHashMap<String, Pair<EventType, String>>(); String patternSubexEventType = getPatternSubexEventType(context.getStatementId(), "pattern", subexpressionIdStack); for (Map.Entry<String, Pair<EventType, String>> entry : tags.getArrayEventTypes().entrySet()) { LinkedHashMap<String, Pair<EventType, String>> specificArrayType = new LinkedHashMap<String, Pair<EventType, String>>(); specificArrayType.put(entry.getKey(), entry.getValue()); EventType arrayTagCompositeEventType = context.getEventAdapterService() .createSemiAnonymousMapType(patternSubexEventType, Collections.<String, Pair<EventType, String>>emptyMap(), specificArrayType, isInsertInto); String tag = entry.getKey(); if (!filterTypes.containsKey(tag)) { Pair<EventType, String> pair = new Pair<EventType, String>(arrayTagCompositeEventType, tag); filterTypes.put(tag, pair); arrayCompositeEventTypes.put(tag, pair); } } } StreamTypeService streamTypeService = new StreamTypeServiceImpl(filterTypes, context.getEngineURI(), true, false); List<ExprNode> exprNodes = filterNode.getRawFilterSpec().getFilterExpressions(); FilterSpecCompiled spec = FilterSpecCompiler.makeFilterSpec(resolvedEventType, eventName, exprNodes, filterNode.getRawFilterSpec().getOptionalPropertyEvalSpec(), filterTaggedEventTypes, arrayCompositeEventTypes, streamTypeService, null, context, subexpressionIdStack); filterNode.setFilterSpec(spec); } else if (evalNode instanceof EvalObserverFactoryNode) { EvalObserverFactoryNode observerNode = (EvalObserverFactoryNode) evalNode; try { ObserverFactory observerFactory = context.getPatternResolutionService() .create(observerNode.getPatternObserverSpec()); StreamTypeService streamTypeService = getStreamTypeService(context.getEngineURI(), context.getStatementId(), context.getEventAdapterService(), tags.getTaggedEventTypes(), tags.getArrayEventTypes(), subexpressionIdStack, "observer"); ExprValidationContext validationContext = new ExprValidationContext(streamTypeService, context.getMethodResolutionService(), null, context.getSchedulingService(), context.getVariableService(), evaluatorContext, context.getEventAdapterService(), context.getStatementName(), context.getStatementId(), context.getAnnotations(), context.getContextDescriptor()); List<ExprNode> validated = validateExpressions( observerNode.getPatternObserverSpec().getObjectParameters(), validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl(tags.getTaggedEventTypes(), tags.getArrayEventTypes(), allTagNamesOrdered, context.getEventAdapterService()); observerNode.setObserverFactory(observerFactory); observerFactory.setObserverParameters(validated, convertor); } catch (ObserverParameterException e) { throw new ExprValidationException("Invalid parameter for pattern observer: " + e.getMessage(), e); } catch (PatternObjectException e) { throw new ExprValidationException("Failed to resolve pattern observer: " + e.getMessage(), e); } } else if (evalNode instanceof EvalGuardFactoryNode) { EvalGuardFactoryNode guardNode = (EvalGuardFactoryNode) evalNode; try { GuardFactory guardFactory = context.getPatternResolutionService() .create(guardNode.getPatternGuardSpec()); StreamTypeService streamTypeService = getStreamTypeService(context.getEngineURI(), context.getStatementId(), context.getEventAdapterService(), tags.getTaggedEventTypes(), tags.getArrayEventTypes(), subexpressionIdStack, "guard"); ExprValidationContext validationContext = new ExprValidationContext(streamTypeService, context.getMethodResolutionService(), null, context.getSchedulingService(), context.getVariableService(), evaluatorContext, context.getEventAdapterService(), context.getStatementName(), context.getStatementId(), context.getAnnotations(), context.getContextDescriptor()); List<ExprNode> validated = validateExpressions( guardNode.getPatternGuardSpec().getObjectParameters(), validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl(tags.getTaggedEventTypes(), tags.getArrayEventTypes(), allTagNamesOrdered, context.getEventAdapterService()); guardNode.setGuardFactory(guardFactory); guardFactory.setGuardParameters(validated, convertor); } catch (GuardParameterException e) { throw new ExprValidationException("Invalid parameter for pattern guard: " + e.getMessage(), e); } catch (PatternObjectException e) { throw new ExprValidationException("Failed to resolve pattern guard: " + e.getMessage(), e); } } else if (evalNode instanceof EvalEveryDistinctFactoryNode) { EvalEveryDistinctFactoryNode distinctNode = (EvalEveryDistinctFactoryNode) evalNode; MatchEventSpec matchEventFromChildNodes = analyzeMatchEvent(distinctNode); StreamTypeService streamTypeService = getStreamTypeService(context.getEngineURI(), context.getStatementId(), context.getEventAdapterService(), matchEventFromChildNodes.getTaggedEventTypes(), matchEventFromChildNodes.getArrayEventTypes(), subexpressionIdStack, "every-distinct"); ExprValidationContext validationContext = new ExprValidationContext(streamTypeService, context.getMethodResolutionService(), null, context.getSchedulingService(), context.getVariableService(), evaluatorContext, context.getEventAdapterService(), context.getStatementName(), context.getStatementId(), context.getAnnotations(), context.getContextDescriptor()); List<ExprNode> validated; try { validated = validateExpressions(distinctNode.getExpressions(), validationContext); } catch (ExprValidationPropertyException ex) { throw new ExprValidationPropertyException(ex.getMessage() + ", every-distinct requires that all properties resolve from sub-expressions to the every-distinct", ex.getCause()); } MatchedEventConvertor convertor = new MatchedEventConvertorImpl( matchEventFromChildNodes.getTaggedEventTypes(), matchEventFromChildNodes.getArrayEventTypes(), allTagNamesOrdered, context.getEventAdapterService()); distinctNode.setConvertor(convertor); // Determine whether some expressions are constants or time period List<ExprNode> distinctExpressions = new ArrayList<ExprNode>(); Long msecToExpire = null; for (ExprNode expr : validated) { if (expr instanceof ExprTimePeriod) { Double secondsExpire = (Double) ((ExprTimePeriod) expr).evaluate(null, true, evaluatorContext); if ((secondsExpire != null) && (secondsExpire > 0)) { msecToExpire = Math.round(1000d * secondsExpire); } log.debug("Setting every-distinct msec-to-expire to " + msecToExpire); } else if (expr.isConstantResult()) { log.warn( "Every-distinct node utilizes an expression returning a constant value, please check expression '" + expr.toExpressionString() + "', not adding expression to distinct-value expression list"); } else { distinctExpressions.add(expr); } } if (distinctExpressions.isEmpty()) { throw new ExprValidationException( "Every-distinct node requires one or more distinct-value expressions that each return non-constant result values"); } distinctNode.setDistinctExpressions(distinctExpressions, msecToExpire); } else if (evalNode instanceof EvalMatchUntilFactoryNode) { EvalMatchUntilFactoryNode matchUntilNode = (EvalMatchUntilFactoryNode) evalNode; // compile bounds expressions, if any MatchEventSpec untilMatchEventSpec = new MatchEventSpec(tags.getTaggedEventTypes(), tags.getArrayEventTypes()); StreamTypeService streamTypeService = getStreamTypeService(context.getEngineURI(), context.getStatementId(), context.getEventAdapterService(), untilMatchEventSpec.getTaggedEventTypes(), untilMatchEventSpec.getArrayEventTypes(), subexpressionIdStack, "until"); ExprValidationContext validationContext = new ExprValidationContext(streamTypeService, context.getMethodResolutionService(), null, context.getSchedulingService(), context.getVariableService(), evaluatorContext, context.getEventAdapterService(), context.getStatementName(), context.getStatementId(), context.getAnnotations(), context.getContextDescriptor()); String message = "Match-until bounds value expressions must return a numeric value"; if (matchUntilNode.getLowerBounds() != null) { ExprNode validated = ExprNodeUtility.getValidatedSubtree(matchUntilNode.getLowerBounds(), validationContext); matchUntilNode.setLowerBounds(validated); if ((validated.getExprEvaluator().getType() == null) || (!JavaClassHelper.isNumeric(validated.getExprEvaluator().getType()))) { throw new ExprValidationException(message); } } if (matchUntilNode.getUpperBounds() != null) { ExprNode validated = ExprNodeUtility.getValidatedSubtree(matchUntilNode.getUpperBounds(), validationContext); matchUntilNode.setUpperBounds(validated); if ((validated.getExprEvaluator().getType() == null) || (!JavaClassHelper.isNumeric(validated.getExprEvaluator().getType()))) { throw new ExprValidationException(message); } } MatchedEventConvertor convertor = new MatchedEventConvertorImpl( untilMatchEventSpec.getTaggedEventTypes(), untilMatchEventSpec.getArrayEventTypes(), allTagNamesOrdered, context.getEventAdapterService()); matchUntilNode.setConvertor(convertor); // compile new tag lists Set<String> arrayTags = null; EvalNodeAnalysisResult matchUntilAnalysisResult = EvalNodeUtil .recursiveAnalyzeChildNodes(matchUntilNode.getChildNodes().get(0)); for (EvalFilterFactoryNode filterNode : matchUntilAnalysisResult.getFilterNodes()) { String optionalTag = filterNode.getEventAsName(); if (optionalTag != null) { if (arrayTags == null) { arrayTags = new HashSet<String>(); } arrayTags.add(optionalTag); } } if (arrayTags != null) { for (String arrayTag : arrayTags) { if (!tags.getArrayEventTypes().containsKey(arrayTag)) { tags.getArrayEventTypes().put(arrayTag, tags.getTaggedEventTypes().get(arrayTag)); tags.getTaggedEventTypes().remove(arrayTag); } } } matchUntilNode.setTagsArrayedSet(getIndexesForTags(allTagNamesOrdered, arrayTags)); } else if (evalNode instanceof EvalFollowedByFactoryNode) { EvalFollowedByFactoryNode followedByNode = (EvalFollowedByFactoryNode) evalNode; StreamTypeService streamTypeService = new StreamTypeServiceImpl(context.getEngineURI(), false); ExprValidationContext validationContext = new ExprValidationContext(streamTypeService, context.getMethodResolutionService(), null, context.getSchedulingService(), context.getVariableService(), evaluatorContext, context.getEventAdapterService(), context.getStatementName(), context.getStatementId(), context.getAnnotations(), context.getContextDescriptor()); if (followedByNode.getOptionalMaxExpressions() != null) { List<ExprNode> validated = new ArrayList<ExprNode>(); for (ExprNode maxExpr : followedByNode.getOptionalMaxExpressions()) { if (maxExpr == null) { validated.add(null); } else { ExprNodeSummaryVisitor visitor = new ExprNodeSummaryVisitor(); maxExpr.accept(visitor); if (!visitor.isPlain()) { String errorMessage = "Invalid maximum expression in followed-by, " + visitor.getMessage() + " are not allowed within the expression"; log.error(errorMessage); throw new ExprValidationException(errorMessage); } ExprNode validatedExpr = ExprNodeUtility.getValidatedSubtree(maxExpr, validationContext); validated.add(validatedExpr); if ((validatedExpr.getExprEvaluator().getType() == null) || (!JavaClassHelper.isNumeric(validatedExpr.getExprEvaluator().getType()))) { String message = "Invalid maximum expression in followed-by, the expression must return an integer value"; throw new ExprValidationException(message); } } } followedByNode.setOptionalMaxExpressions(validated); } } if (newTaggedEventTypes != null) { tags.getTaggedEventTypes().putAll(newTaggedEventTypes); } if (newArrayEventTypes != null) { tags.getArrayEventTypes().putAll(newArrayEventTypes); } }
From source file:org.abstracthorizon.proximity.storage.local.ReadOnlyFileSystemStorage.java
public void recreateMetadata(Map extraProps) throws StorageException { // issue #44, we will not delete existing metadata, // instead, we will force to "recreate" those the properties factory // eventually appending it with new ones. int processed = 0; Stack stack = new Stack(); List dir = listItems(ItemProperties.PATH_ROOT); stack.push(dir);/*from ww w .j a v a 2s. c o m*/ while (!stack.isEmpty()) { dir = (List) stack.pop(); for (Iterator i = dir.iterator(); i.hasNext();) { ItemProperties ip = (ItemProperties) i.next(); if (ip.isDirectory()) { List subdir = listItems(ip.getPath()); stack.push(subdir); } else { logger.debug("**** {}", ip.getPath()); File target = new File(getStorageBaseDir(), ip.getPath()); ItemProperties nip = getProxiedItemPropertiesFactory().expandItemProperties(ip.getPath(), target, false); if (extraProps != null) { nip.getAllMetadata().putAll(extraProps); } storeItemProperties(nip); processed++; } } } logger.info("Recreated metadata on {} items.", Integer.toString(processed)); }
From source file:ch.zhaw.icclab.tnova.expressionsolver.OTFlyEval.java
String partialEvaluation(Stack<String> sTable, Stack<Double> param, double threshold) throws EmptyStackException { //pop elements till we hit an operator String token;/*w ww. j ava 2 s . c o m*/ String result = ""; LinkedList<Double> paramList = new LinkedList<Double>(); while (!(token = sTable.pop()).startsWith("fn")) { String fnParam = token.split(" ")[1]; if (fnParam.startsWith("vnf")) { paramList.add(param.pop()); } else { Double tempVal = new Double(fnParam); paramList.add(tempVal); } logger.info("Popped a parameter: " + fnParam); } String operation = token.split(" ")[1]; logger.info("Function to apply on the popped parameters: " + operation); if (operation.equalsIgnoreCase("min")) { result = findMin(paramList); } else if (operation.equalsIgnoreCase("add")) { result = findSum(paramList); } else if (operation.equalsIgnoreCase("gt")) { result = evalGT(paramList, threshold); } else if (operation.equalsIgnoreCase("lt")) { result = evalLT(paramList, threshold); } else if (operation.equalsIgnoreCase("avg")) { result = findAvg(paramList); } else if (operation.equalsIgnoreCase("max")) { result = findMax(paramList); } return result; }
From source file:com.clustercontrol.performance.operator.RevercePorlishNotation.java
/** * ?/*from ww w. jav a2 s . c o m*/ */ @Override public double calc(DataTable currentTable, DataTable previousTable, String deviceName) throws CollectedDataNotFoundException, InvalidValueException { double right = 0D; double left = 0D; double result = 0D; Stack<Double> _stack = new Stack<Double>(); if (this.expArray.length == 1) { result = getVal(expArray[0], currentTable, previousTable, deviceName); } else { for (int i = 0; i < this.expArray.length; i++) { try { if (expArray[i] instanceof OPERATOR) { right = _stack.pop(); left = _stack.pop(); switch ((OPERATOR) expArray[i]) { case ADDITION: result = left + right; break; case SUBTRACTION: result = left - right; break; case MULTIPLICATION: result = left * right; break; case DIVISION: if (right == 0) { log.warn("0-devided, expression=" + expression); // 0-devide???????NaN? return Double.NaN; } result = left / right; break; } _stack.push(new Double(result)); } else { _stack.push(getVal(expArray[i], currentTable, previousTable, deviceName)); } } catch (CollectedDataNotFoundException | IllegalStateException | EmptyStackException e) { log.warn("calc [" + expression + "], " + e.getClass().getName() + ", " + e.getMessage()); throw new InvalidValueException(e.getMessage()); } catch (Exception e) { log.warn("calc [" + expression + "], " + e.getClass().getName() + ", " + e.getMessage(), e); throw new InvalidValueException(e.getMessage()); } } if (_stack.size() > 1) { String messages = "expression is invalid, expression-" + expression; log.warn("calc : " + messages); throw new InvalidValueException(messages); } } return result; }
From source file:org.apache.hadoop.tools.mapred.TestCopyCommitter.java
private boolean checkDirectoryPermissions(FileSystem fs, String targetBase, FsPermission sourcePerm) throws IOException { Path base = new Path(targetBase); Stack<Path> stack = new Stack<Path>(); stack.push(base);//from ww w .ja v a 2s. c o m while (!stack.isEmpty()) { Path file = stack.pop(); if (!fs.exists(file)) continue; FileStatus[] fStatus = fs.listStatus(file); if (fStatus == null || fStatus.length == 0) continue; for (FileStatus status : fStatus) { if (status.isDirectory()) { stack.push(status.getPath()); Assert.assertEquals(status.getPermission(), sourcePerm); } } } return true; }
From source file:com.galenframework.parser.IndentationStructureParser.java
private void processLine(Stack<IndentationNode> stack, String line, int lineNumber, String source) { StructNode newStructNode = new StructNode(line.trim()); newStructNode.setFileLineNumber(lineNumber); newStructNode.setSource(source);//from ww w . j a v a 2s. c o m int calculatedIndentation = calculateIndentation(line, lineNumber); while (calculatedIndentation <= stack.peek().indentation && stack.peek().parent != null) { stack.pop(); } StructNode parent = stack.peek().structNode; if (parent.getChildNodes() != null && parent.getChildNodes().size() > 0 && calculatedIndentation != stack.peek().childIndentation) { throw new SyntaxException(new Line(line, lineNumber), "Inconsistent indentation"); } parent.addChildNode(newStructNode); stack.peek().childIndentation = calculatedIndentation; stack.push(new IndentationNode(calculatedIndentation, newStructNode, parent)); }
From source file:org.apache.storm.daemon.logviewer.utils.DirectoryCleaner.java
/** * If totalSize of files exceeds the either the per-worker quota or global quota, * Logviewer deletes oldest inactive log files in a worker directory or in all worker dirs. * We use the parameter forPerDir to switch between the two deletion modes. * * @param dirs the list of directories to be scanned for deletion * @param quota the per-dir quota or the total quota for the all directories * @param forPerDir if true, deletion happens for a single dir; otherwise, for all directories globally * @param activeDirs only for global deletion, we want to skip the active logs in activeDirs * @return number of files deleted/*from ww w . j a v a2s .c om*/ */ public DeletionMeta deleteOldestWhileTooLarge(List<Path> dirs, long quota, boolean forPerDir, Set<Path> activeDirs) throws IOException { long totalSize = 0; for (Path dir : dirs) { try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) { for (Path path : stream) { totalSize += Files.size(path); } } } LOG.debug("totalSize: {} quota: {}", totalSize, quota); long toDeleteSize = totalSize - quota; if (toDeleteSize <= 0) { return DeletionMeta.EMPTY; } int deletedFiles = 0; long deletedSize = 0; // the oldest pq_size files in this directory will be placed in PQ, with the newest at the root PriorityQueue<Pair<Path, FileTime>> pq = new PriorityQueue<>(PQ_SIZE, Comparator.comparing((Pair<Path, FileTime> p) -> p.getRight()).reversed()); int round = 0; final Set<Path> excluded = new HashSet<>(); while (toDeleteSize > 0) { LOG.debug("To delete size is {}, start a new round of deletion, round: {}", toDeleteSize, round); for (Path dir : dirs) { try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) { for (Path path : stream) { if (!excluded.contains(path)) { if (isFileEligibleToSkipDelete(forPerDir, activeDirs, dir, path)) { excluded.add(path); } else { Pair<Path, FileTime> p = Pair.of(path, Files.getLastModifiedTime(path)); if (pq.size() < PQ_SIZE) { pq.offer(p); } else if (p.getRight().toMillis() < pq.peek().getRight().toMillis()) { pq.poll(); pq.offer(p); } } } } } } if (!pq.isEmpty()) { // need to reverse the order of elements in PQ to delete files from oldest to newest Stack<Pair<Path, FileTime>> stack = new Stack<>(); while (!pq.isEmpty()) { stack.push(pq.poll()); } while (!stack.isEmpty() && toDeleteSize > 0) { Pair<Path, FileTime> pair = stack.pop(); Path file = pair.getLeft(); final String canonicalPath = file.toAbsolutePath().normalize().toString(); final long fileSize = Files.size(file); final long lastModified = pair.getRight().toMillis(); //Original implementation doesn't actually check if delete succeeded or not. try { Utils.forceDelete(file.toString()); LOG.info("Delete file: {}, size: {}, lastModified: {}", canonicalPath, fileSize, lastModified); toDeleteSize -= fileSize; deletedSize += fileSize; deletedFiles++; } catch (IOException e) { excluded.add(file); } } pq.clear(); round++; if (round >= MAX_ROUNDS) { if (forPerDir) { LOG.warn( "Reach the MAX_ROUNDS: {} during per-dir deletion, you may have too many files in " + "a single directory : {}, will delete the rest files in next interval.", MAX_ROUNDS, dirs.get(0).toAbsolutePath().normalize()); } else { LOG.warn("Reach the MAX_ROUNDS: {} during global deletion, you may have too many files, " + "will delete the rest files in next interval.", MAX_ROUNDS); } break; } } else { LOG.warn("No more files able to delete this round, but {} is over quota by {} MB", forPerDir ? "this directory" : "root directory", toDeleteSize * 1e-6); } } return new DeletionMeta(deletedSize, deletedFiles); }
From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java
/** * Make a path relative to another path by removing all common ancestors * @param parent// www.j a va 2 s. co m * @param descendant * @return */ private static Path makeRelative(Path parent, Path descendant) { Stack<String> components = new Stack<String>(); while (descendant.depth() > parent.depth()) { components.push(descendant.getName()); descendant = descendant.getParent(); } if (!descendant.equals(parent)) throw new RuntimeException("descendant not a child of parent"); if (components.isEmpty()) return new Path("."); Path relative = new Path(components.pop()); while (!components.isEmpty()) relative = new Path(relative, components.pop()); return relative; }