List of usage examples for java.util Stack pop
public synchronized E pop()
From source file:com.colofabrix.mathparser.operators.IntegralOperator.java
@Override public Operand executeOperation(Stack<Expression> operands) throws ExpressionException { if (operands.size() < this.getCurrentOperands()) { throw new ExpressionException("Wrong number of given parameters"); }// ww w . j av a 2s. co m this.parser = new MathParser(this.getContext()); // Interval start Apfloat lower = Operand.extractNumber(this.parser.executePostfix(operands.pop())); // Interval end Apfloat upper = Operand.extractNumber(this.parser.executePostfix(operands.pop())); // Expression to evaluate Expression expression = this.parser.minimise(operands.pop()); // Integration variable Operand variable = (Operand) operands.pop(); Expression2AMLAdapter function = new Expression2AMLAdapter(this.parser, expression, variable); BaseAbstractUnivariateIntegrator integrator = new SimpsonIntegrator(); function.finalize(); Apfloat result = null; int maxEval = 10000, eval = 10, exp = 5; // It starts with a low number of evaluation and exponentially increase to the set maximum while (result == null && eval <= maxEval) { try { result = new Apfloat( integrator.integrate(eval, function, lower.doubleValue(), upper.doubleValue())); } catch (org.apache.commons.math3.exception.TooManyEvaluationsException e) { eval *= exp; } } return new Operand(result); }
From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java
/** * Perform a selection query that retrieves all points in the given range. * The range is specified in the two-dimensional array positions. * @param in// w w w . j a va 2 s . c o m * @param query_mbr * @return * @throws IOException */ public static Node aggregateQuery(FSDataInputStream in, Rectangle query_mbr) throws IOException { long treeStartPosition = in.getPos(); Node result = new Node(); int numOfSelectedRecords = 0; int resolution = in.readInt(); short fillValue = in.readShort(); int cardinality = in.readInt(); final Vector<Integer> selectedNodesPos = new Vector<Integer>(); final Vector<Integer> selectedStarts = new Vector<Integer>(); final Vector<Integer> selectedEnds = new Vector<Integer>(); StockQuadTree stockQuadTree = getOrCreateStockQuadTree(resolution); // Nodes to be searched. Contains node positions in the array of nodes Stack<Integer> nodes_2b_searched = new Stack<Integer>(); nodes_2b_searched.add(0); // Root node (ID=1) Rectangle node_mbr = new Rectangle(); while (!nodes_2b_searched.isEmpty()) { int node_pos = nodes_2b_searched.pop(); stockQuadTree.getNodeMBR(node_pos, node_mbr); if (query_mbr.contains(node_mbr)) { // Add this node to the selection list and stop this branch selectedNodesPos.add(node_pos); } else if (query_mbr.intersects(node_mbr)) { int first_child_id = stockQuadTree.nodesID[node_pos] * 4 + 0; int first_child_pos = Arrays.binarySearch(stockQuadTree.nodesID, first_child_id); if (first_child_pos < 0) { // No children. Hit a leaf node // Scan and add matching points only java.awt.Point record_coords = new Point(); for (int record_pos = stockQuadTree.nodesStartPosition[node_pos]; record_pos < stockQuadTree.nodesEndPosition[node_pos]; record_pos++) { stockQuadTree.getRecordCoords(record_pos, record_coords); if (query_mbr.contains(record_coords)) { // matched a record. if (!selectedEnds.isEmpty() && selectedEnds.lastElement() == record_pos) { // Merge with an adjacent range selectedEnds.set(selectedEnds.size() - 1, record_pos + 1); } else { // Add a new range of unit width selectedStarts.add(record_pos); selectedEnds.add(record_pos + 1); } numOfSelectedRecords++; } } } else { // Non-leaf node. Add all children to the list of nodes to search // Add in reverse order to the stack so that results come in sorted order nodes_2b_searched.add(first_child_pos + 3); nodes_2b_searched.add(first_child_pos + 2); nodes_2b_searched.add(first_child_pos + 1); nodes_2b_searched.add(first_child_pos + 0); } } } // Result 1: Accumulate all values // Sort disk offsets to eliminate backward seeks if (!selectedStarts.isEmpty()) { LOG.debug("Aggregate query selected " + selectedNodesPos.size() + " nodes and " + numOfSelectedRecords + " records"); final IndexedSortable sortable = new IndexedSortable() { @Override public int compare(int i, int j) { return selectedStarts.get(i) - selectedStarts.get(j); } @Override public void swap(int i, int j) { int temp = selectedStarts.get(i); selectedStarts.set(i, selectedStarts.get(j)); selectedStarts.set(j, temp); temp = selectedEnds.get(i); selectedEnds.set(i, selectedEnds.get(j)); selectedEnds.set(j, temp); } }; new QuickSort().sort(sortable, 0, selectedStarts.size()); long dataStartPosition = getValuesStartOffset(cardinality); Point resultCoords = new Point(); // Return all values in the selected ranges for (int iRange = 0; iRange < selectedStarts.size(); iRange++) { int treeStart = selectedStarts.get(iRange); int treeEnd = selectedEnds.get(iRange); long startPosition = dataStartPosition + selectedStarts.get(iRange) * cardinality * 2; in.seek(startPosition); for (int treePos = treeStart; treePos < treeEnd; treePos++) { // Retrieve the coords for the point at treePos stockQuadTree.getRecordCoords(treePos, resultCoords); // Read all entries at current position for (int iValue = 0; iValue < cardinality; iValue++) { short value = in.readShort(); if (value != fillValue) result.accumulate(value); } } } } // Result 2: Accumulate all nodes if (!selectedNodesPos.isEmpty()) { long nodesStartPosition = treeStartPosition + getNodesStartOffset(resolution, cardinality); // Sort node positions to eliminate backward seeks IndexedSortable nodeSortable = new IndexedSortable() { @Override public int compare(int i, int j) { return selectedNodesPos.get(i) - selectedNodesPos.get(j); } @Override public void swap(int i, int j) { int temp = selectedNodesPos.get(i); selectedNodesPos.set(i, selectedNodesPos.get(j)); selectedNodesPos.set(j, temp); } }; new QuickSort().sort(nodeSortable, 0, selectedNodesPos.size()); Node selectedNode = new Node(); for (int node_pos : selectedNodesPos) { long nodePosition = nodesStartPosition + node_pos * NodeSize; in.seek(nodePosition); selectedNode.readFields(in); result.accumulate(selectedNode); } } return result; }
From source file:com.baidu.rigel.biplatform.parser.CompileSection.java
/** * ?section??/* w ww .j a v a 2 s . c o m*/ * arrangeNodes * @param nodes * @return */ private Node arrangeNodes(Stack<Node> nodes) { if (!nodes.isEmpty()) { Node node = null; Node leafNode = null; CalculateNode result = null; while (!nodes.isEmpty()) { node = nodes.pop(); if (node.getNodeType().equals(NodeType.Calculate)) { result = (CalculateNode) node; if (leafNode == null) { leafNode = node; } else { result.setRight(leafNode); leafNode = result; } } else { return node; } } return result; } return null; }
From source file:com.autentia.intra.bean.MenuBean.java
/** * Finalize an opened node and return its parent. If the given node has no * childs it is removed from the tree./*from www.java2s .com*/ * * @param path path of current node * @return false if the node was removed */ private boolean closeNode(Stack<TreeNode> path) { boolean closed = true; TreeNode node = path.pop(); if (node.getChildCount() == 0) { path.peek().getChildren().remove(node); closed = false; } log.debug("addLeaf - " + (closed ? "CLOSE " : "REMOVE") + ": " + node.getIdentifier()); return closed; }
From source file:com.nextep.designer.sqlclient.ui.helpers.SQLHelper.java
private static DMLParseResult parseSQL(String sql, int start) { final ISQLParser parser = GeneratorFactory.getSQLParser(DBGMHelper.getCurrentVendor()); // Retrieving the corresponding statement start IDocument doc = new Document(); doc.set(sql + " "); //$NON-NLS-1$ FindReplaceDocumentAdapter finder = new FindReplaceDocumentAdapter(doc); try {/* www. j a v a 2s .c o m*/ IRegion lastSemicolonRegion = finder.find(start - 1, ";", false, false, false, false); //$NON-NLS-1$ if (lastSemicolonRegion == null) { lastSemicolonRegion = new Region(0, 1); } IRegion selectRegion = finder.find(lastSemicolonRegion.getOffset(), "SELECT|INSERT|UPDATE|DELETE", true, //$NON-NLS-1$ false, false, true); IRegion endSemicolonRegion = finder.find(start == doc.getLength() ? start - 1 : start, ";", true, false, //$NON-NLS-1$ false, false); if (endSemicolonRegion == null) { endSemicolonRegion = new Region(doc.getLength() - 1, 0); } if (selectRegion == null || lastSemicolonRegion == null || endSemicolonRegion == null) { return null; } // The select must be found after the first semicolon, else it is not the // same SQL statement if (selectRegion.getOffset() >= lastSemicolonRegion.getOffset() && endSemicolonRegion.getOffset() >= selectRegion.getOffset()) { DMLScanner scanner = new DMLScanner(parser); scanner.setRange(doc, selectRegion.getOffset(), endSemicolonRegion.getOffset() - selectRegion.getOffset()); IToken token = scanner.nextToken(); DMLParseResult result = new DMLParseResult(); Stack<DMLParseResult> stack = new Stack<DMLParseResult>(); Map<Segment, DMLParseResult> results = new HashMap<Segment, DMLParseResult>(); while (!token.isEOF()) { // Counting parenthethis if (token == DMLScanner.LEFTPAR_TOKEN) { result.parCount++; } else if (token == DMLScanner.RIGHTPAR_TOKEN) { result.parCount--; } if (token == DMLScanner.SELECT_TOKEN) { // && (result.tableSegStart>0 || // result.whereSegStart>0)) { stack.push(result); result = new DMLParseResult(); result.stackStart = scanner.getTokenOffset(); } else if (token == DMLScanner.RIGHTPAR_TOKEN && result.parCount < 0) { // && // stack.size()>0) // { results.put(new Segment(result.stackStart, scanner.getTokenOffset() - result.stackStart), result); result = stack.pop(); } else if (token == DMLScanner.INSERT_TOKEN) { result.ignoreInto = false; } else if (token == DMLScanner.FROM_TOKEN || token == DMLScanner.UPDATE_TOKEN || (token == DMLScanner.INTO_TOKEN && !result.ignoreInto)) { result.ignoreInto = true; // We have a table segment start result.tableSegStart = scanner.getTokenOffset(); result.tableStartToken = token; } else if (token == DMLScanner.WORD_TOKEN && result.tableSegStart > 0) { // We are in a table segment so we instantiate appropriate table references // and aliases // in the parse result if (result.lastAlias == null) { // This is a new table definition, we add it result.lastAlias = new TableAlias( doc.get(scanner.getTokenOffset(), scanner.getTokenLength()).toUpperCase()); // result.lastAlias // .setTable(tablesMap.get(result.lastAlias.getTableName())); result.addFromTable(result.lastAlias); } else if (result.lastAlias.getTableAlias() == null) { // This is an alias of a defined table final String alias = doc.get(scanner.getTokenOffset(), scanner.getTokenLength()); final List<String> reservedWords = parser.getTypedTokens().get(ISQLParser.DML); if (!reservedWords.contains(alias.toUpperCase())) { result.lastAlias.setAlias(alias); } else { result.lastAlias = null; } } } else if (token == DMLScanner.COMMA_TOKEN) { // On a comma, we reset any table reference result.lastAlias = null; } else if (token == DMLScanner.DML_TOKEN) { result.lastAlias = null; if (result.tableSegStart != -1) { int tableSegEnd = scanner.getTokenOffset(); result.addTableSegment( new Segment(result.tableSegStart, tableSegEnd - result.tableSegStart)); result.tableSegStart = -1; } } else if (result.tableSegStart != -1 && ((result.tableStartToken == DMLScanner.FROM_TOKEN && token == DMLScanner.WHERE_TOKEN) || (result.tableStartToken == DMLScanner.UPDATE_TOKEN && token == DMLScanner.SET_TOKEN) || (result.tableStartToken == DMLScanner.INTO_TOKEN && token == DMLScanner.LEFTPAR_TOKEN))) { // We have matched a table segment end, so we close the segment // and we add it to the parse result's table segments int tableSegEnd = scanner.getTokenOffset(); result.addTableSegment( new Segment(result.tableSegStart, tableSegEnd - result.tableSegStart)); result.tableSegStart = -1; if (token == DMLScanner.WHERE_TOKEN) { result.whereSegStart = scanner.getTokenOffset() + scanner.getTokenLength(); } } token = scanner.nextToken(); } // If the table segment is still opened, we close it at the end of the SQL statement if (result.tableSegStart > -1) { int tableSegEnd = endSemicolonRegion.getOffset(); result.addTableSegment( new Segment(result.tableSegStart, tableSegEnd - result.tableSegStart + 1)); } // Locating the appropriate result for (Segment s : results.keySet()) { if (s.getOffset() <= start && s.getOffset() + s.getLength() > start) { return results.get(s); } } return result; } } catch (BadLocationException e) { LOGGER.debug("Problems while retrieving SQL statement"); } return null; }
From source file:com.hubspot.utils.circuitbreaker.CircuitBreakerWrapper.java
/** * Wraps the supplied object toWrap in a CircuitBreaker conforming to the supplied CircuitBreakerPolicy. *///from w w w . j a v a 2 s . com public <T, W extends T> T wrap(W toWrap, Class<T> interfaceToProxy, CircuitBreakerPolicy policy) throws CircuitBreakerWrappingException { sanityCheck(toWrap, interfaceToProxy, policy); // walk the chain of interfaces implemented by T and check for their blacklisted methods Stack<Class<?>> implementedInterfaces = new Stack<Class<?>>(); implementedInterfaces.addAll(Arrays.asList(interfaceToProxy.getInterfaces())); implementedInterfaces.add(interfaceToProxy); Map<Method, Class[]> blacklist = new HashMap(); while (!implementedInterfaces.isEmpty()) { Class<?> implementedInterface = implementedInterfaces.pop(); for (Method m : implementedInterface.getDeclaredMethods()) { // check that the blacklisted method throws CircuitBreakerException if (m.isAnnotationPresent(CircuitBreakerExceptionBlacklist.class)) { if (!ArrayUtils.contains(m.getExceptionTypes(), CircuitBreakerException.class)) { throw new CircuitBreakerWrappingException( "Wrapped methods must throw CircuitBreakerException"); } CircuitBreakerExceptionBlacklist a = (CircuitBreakerExceptionBlacklist) m .getAnnotation(CircuitBreakerExceptionBlacklist.class); blacklist.put(m, a.blacklist()); } } implementedInterfaces.addAll(Arrays.asList(implementedInterface.getInterfaces())); } Class<?>[] interfaces = new Class<?>[] { interfaceToProxy }; InvocationHandler handler = new CircuitBreakerInvocationHandler(toWrap, blacklist, policy); T newProxyInstance = (T) Proxy.newProxyInstance(getClass().getClassLoader(), interfaces, handler); return newProxyInstance; }
From source file:org.apache.synapse.core.axis2.TimeoutHandler.java
private void processCallbacks() { //clear the expired statistics if (statisticsCleaner == null) { StatisticsCollector collector = SynapseConfigUtils.getStatisticsCollector(contextInfo); if (collector != null) { statisticsCleaner = new StatisticsCleaner(collector); }//from w ww. jav a 2 s . c om } if (statisticsCleaner != null) { statisticsCleaner.clean(); } //clear all the expired sessions SALSessions.getInstance().clearSessions(); // checks if callback store contains at least one entry before proceeding. otherwise getting // the time for doing nothing would be a inefficient task. // we have to synchronize this on the callbackStore as iterators of thread safe collections // are not thread safe. callbackStore can be modified // concurrently by the SynapseCallbackReceiver. synchronized (callbackStore) { if (callbackStore.size() > 0) { long currentTime = currentTime(); List toRemove = new ArrayList(); for (Object key : callbackStore.keySet()) { AsyncCallback callback = (AsyncCallback) callbackStore.get(key); if (callback == null) { if (log.isDebugEnabled()) { log.debug("There is no callback for key :" + key); } continue; } if (callback.getTimeOutOn() <= currentTime) { toRemove.add(key); if (callback.getTimeOutAction() == SynapseConstants.DISCARD_AND_FAULT) { // activate the fault sequence of the current sequence mediator MessageContext msgContext = callback.getSynapseOutMsgCtx(); /* Clear the pipe to prevent release of the associated writer buffer to the buffer factory. This is to prevent same buffer is getting released to both source and target buffer factories. Otherwise when a late response arrives, buffer is released to both factories and makes system unstable */ ((Axis2MessageContext) msgContext).getAxis2MessageContext() .removeProperty(PassThroughConstants.PASS_THROUGH_PIPE); // add an error code to the message context, so that error sequences // can identify the cause of error msgContext.setProperty(SynapseConstants.ERROR_CODE, SynapseConstants.HANDLER_TIME_OUT); msgContext.setProperty(SynapseConstants.ERROR_MESSAGE, SEND_TIMEOUT_MESSAGE); SOAPEnvelope soapEnvelope; if (msgContext.isSOAP11()) { soapEnvelope = OMAbstractFactory.getSOAP11Factory().createSOAPEnvelope(); soapEnvelope.addChild(OMAbstractFactory.getSOAP11Factory().createSOAPBody()); } else { soapEnvelope = OMAbstractFactory.getSOAP12Factory().createSOAPEnvelope(); soapEnvelope.addChild(OMAbstractFactory.getSOAP12Factory().createSOAPBody()); } try { msgContext.setEnvelope(soapEnvelope); } catch (Throwable ex) { log.error("Exception or Error occurred resetting SOAP Envelope", ex); continue; } Stack<FaultHandler> faultStack = msgContext.getFaultStack(); if (!faultStack.isEmpty()) { FaultHandler faultHandler = faultStack.pop(); if (faultHandler != null) { try { faultHandler.handleFault(msgContext); } catch (Throwable ex) { log.warn("Exception or Error occurred while " + "executing the fault handler", ex); continue; } } } } } } for (Object key : toRemove) { AsyncCallback callback = (AsyncCallback) callbackStore.get(key); if (callback == null) { // we will get here if we get a response from the Backend while clearing callbacks continue; } if (!"true".equals(callback.getSynapseOutMsgCtx().getProperty(SynapseConstants.OUT_ONLY))) { log.warn("Expiring message ID : " + key + "; dropping message after " + "timeout of : " + (callback.getTimeoutDuration() / 1000) + " seconds"); } callbackStore.remove(key); } } } }
From source file:edu.ucsb.nceas.metacattest.SubTreeTest.java
/** * Test the method getSubTreeNodeStack//ww w. j a v a 2 s . c o m */ public void testGetSubTreeNodeStack() { Stack nodeStack = null; try { nodeStack = tree.getSubTreeNodeStack(); } //try catch (Exception e) { log.debug("Error in SubTreeTest.suite: " + e.getMessage()); } //catch while (nodeStack != null && !nodeStack.empty()) { NodeRecord node = (NodeRecord) nodeStack.pop(); String nodeType = node.getNodeType(); if (nodeType != null && nodeType.equals("ELEMENT")) { log.debug("Elment: " + node.getNodeName()); } else if (nodeType != null && nodeType.equals("ATTRIBUTE")) { log.debug("Attribute: " + node.getNodeName() + " = " + node.getNodeData()); } else { log.debug("text: " + node.getNodeData()); } } }
From source file:com.altoukhov.svsync.fileviews.LocalFileSpace.java
@Override protected Snapshot scan(List<Pattern> filters) { try {//from w w w .j ava 2s . c om Map<String, FileSnapshot> files = new LinkedHashMap<>(); Set<String> dirs = new HashSet<>(); File root = new File(rootPath); if (root.exists()) { Stack<File> stack = new Stack<>(); stack.push(root); dirs.add(""); while (!stack.isEmpty()) { File currentFolder = stack.pop(); for (final File file : currentFolder.listFiles(filter)) { if (file.isFile() && !isExcluded(trimFilePath(file.getAbsolutePath())) && !isFiltered(toRelativePath(file.getAbsolutePath()), filters)) { FileSnapshot fileSnapshot = new FileSnapshot(file.getName(), file.length(), new DateTime(new Date(file.lastModified())), toRelativePath(file.getAbsolutePath())); files.put(fileSnapshot.getRelativePath(), fileSnapshot); } else if (file.isDirectory() && !isExcluded(trimFilePath(file.getAbsolutePath())) && !isFiltered(toRelativePath(file.getAbsolutePath(), true), filters)) { stack.push(file); dirs.add(toRelativePath(file.getAbsolutePath())); System.out.println("Scanning " + file.getAbsolutePath()); } } } } Snapshot snapshot = new Snapshot(files, dirs); return snapshot; } catch (SecurityException ex) { System.out.println("Failed to scan file space"); System.out.println(ex.getMessage()); } return null; }
From source file:com.pinterest.hdfsbackup.distcp.DistCp.java
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments//ww w . j ava 2 s .c o m */ private static void setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path jobDirectory = new Path(jClient.getSystemDir(), NAME + "_" + randomId); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { //skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst)); //skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("srcCount=" + srcCount); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); }