List of usage examples for java.util Stack empty
public boolean empty()
From source file:org.rhq.core.pluginapi.content.FileContentDelegate.java
/** * Computes SHA256 for exploded content as the aggregate SHA256 * of all the files in the deployment//from ww w. j a va 2s .co m * * @param deploymentDirectory deployment directory * @return SHA256 of the content */ private String computeSHAForExplodedContent(File deploymentDirectory) { try { if (deploymentDirectory.isDirectory()) { MessageDigestGenerator messageDigest = new MessageDigestGenerator(MessageDigestGenerator.SHA_256); Stack<File> unvisitedFolders = new Stack<File>(); unvisitedFolders.add(deploymentDirectory); while (!unvisitedFolders.empty()) { File[] files = unvisitedFolders.pop().listFiles(); Arrays.sort(files, new Comparator<File>() { public int compare(File f1, File f2) { try { return f1.getCanonicalPath().compareTo(f2.getCanonicalPath()); } catch (IOException e) { //do nothing if the sort fails at this point } return 0; } }); for (File file : files) { if (file.isDirectory()) { unvisitedFolders.add(file); } else { FileInputStream inputStream = null; try { inputStream = new FileInputStream(file); messageDigest.add(inputStream); } finally { if (inputStream != null) { inputStream.close(); } } } } } return messageDigest.getDigestString(); } } catch (IOException e) { throw new RuntimeException("Error creating artifact for contentFile: " + deploymentDirectory, e); } return null; }
From source file:com.aperigeek.dropvault.web.rest.SearchService.java
@GET @Produces("application/json") public Response query(@HeaderParam("Authorization") String authorization, @QueryParam("q") String query) throws IndexException { User user;/*w ww.j a va 2 s . c o m*/ try { user = authenticationService.checkAuthentication(authorization); } catch (InvalidPasswordException ex) { return Response.status(401).header("WWW-Authenticate", "Basic realm=\"Search authentication\"").build(); } catch (NotAuthorizedException ex) { return Response.status(403).build(); } catch (ProtocolException ex) { return Response.status(400).build(); } URI userUri = URI.create(DAV_BASE); List<String> uris = new ArrayList<String>(); List<String> ids = indexService.search(user.getUsername(), user.getPassword(), query); for (String id : ids) { Resource res = fileService.getResource(id); Stack<Resource> path = new Stack<Resource>(); Resource parent = res; while (parent != null) { path.push(parent); parent = fileService.getParent(parent); } // Remove the user's root folder, we don't want it in the path path.pop(); UriBuilder builder = UriBuilder.fromUri(userUri); while (!path.empty()) { Resource e = path.pop(); builder.path(e.getName()); } uris.add(builder.build().toString()); } JSONArray array = new JSONArray(uris); return Response.ok(array.toString()).build(); }
From source file:org.qipki.site.plugin.WebsiteMojo.java
private void doMarkup() throws MojoExecutionException { getLog().info(">>> Website Maven Plugin :: doMarkup()"); try {/*from w w w. ja va 2 s . c om*/ String layout = FileUtils.fileRead(new File(layoutDirectory, "main.layout.html"), "UTF-8"); MarkdownProcessor md = new MarkdownProcessor(); FileFilter fileFilter = new MarkdownFileFilter(); Stack<File> stack = new Stack<File>(); stack.addAll(Arrays.asList(markupDirectory.listFiles(fileFilter))); while (!stack.empty()) { File eachFile = stack.pop(); // Recurse if needed if (eachFile.isDirectory()) { stack.addAll(Arrays.asList(eachFile.listFiles(fileFilter))); getLog().info("- Added descendants of " + eachFile); continue; } // Resolve target path File target = resolveTargetPath(eachFile, "html", markupDirectory); // Process Markdown String input = FileUtils.fileRead(eachFile, "UTF-8"); String output = md.markdown(input).trim(); // Apply Theme output = applyLayout(layout, output); // Save to target path FileUtils.fileWrite(target.getAbsolutePath(), "UTF-8", output); getLog().info("- Processed " + eachFile + " to " + target); } } catch (IOException ex) { throw new MojoExecutionException("Unable to process markup content: " + ex.getMessage(), ex); } }
From source file:csns.importer.parser.MFTScoreParser.java
public void parse(MFTScoreImporter importer) { Department department = importer.getDepartment(); Date date = importer.getDate(); Scanner scanner = new Scanner(importer.getText()); scanner.useDelimiter("\\s+|\\r\\n|\\r|\\n"); while (scanner.hasNext()) { // last name String lastName = scanner.next(); while (!lastName.endsWith(",")) lastName += " " + scanner.next(); lastName = lastName.substring(0, lastName.length() - 1); // first name String firstName = scanner.next(); // score/*from w w w. j ava2s . c om*/ Stack<String> stack = new Stack<String>(); String s = scanner.next(); while (!isScore(s)) { stack.push(s); s = scanner.next(); } int value = Integer.parseInt(s); // authorization code stack.pop(); // cin String cin = null; if (!stack.empty() && isCin(stack.peek())) cin = stack.pop(); // user User user = null; if (cin != null) user = userDao.getUserByCin(cin); else { List<User> users = userDao.getUsers(lastName, firstName); if (users.size() == 1) user = users.get(0); } if (user != null) { MFTScore score = mftScoreDao.getScore(department, date, user); if (score == null) { score = new MFTScore(); score.setDepartment(department); score.setDate(date); score.setUser(user); } else { logger.info(user.getId() + ": " + score.getValue() + " => " + value); } score.setValue(value); importer.getScores().add(score); } else { User failedUser = new User(); failedUser.setLastName(lastName); failedUser.setFirstName(firstName); failedUser.setCin(cin); importer.getFailedUsers().add(failedUser); } logger.debug(lastName + "," + firstName + "," + cin + "," + value); } scanner.close(); }
From source file:org.rhq.plugins.jbossas5.util.FileContentDelegate.java
/** * Computes SHA256 for exploded content as the aggregate SHA256 * of all the files in the deployment//from ww w . j a va 2 s .com * * @param deploymentDirectory deployment directory * @return SHA256 of the content */ @Deprecated private String computeSHAForExplodedContent(File deploymentDirectory) { try { if (deploymentDirectory.isDirectory()) { MessageDigestGenerator messageDigest = new MessageDigestGenerator(MessageDigestGenerator.SHA_256); Stack<File> unvisitedFolders = new Stack<File>(); unvisitedFolders.add(deploymentDirectory); while (!unvisitedFolders.empty()) { File[] files = unvisitedFolders.pop().listFiles(); Arrays.sort(files, new Comparator<File>() { public int compare(File f1, File f2) { try { return f1.getCanonicalPath().compareTo(f2.getCanonicalPath()); } catch (IOException e) { //do nothing if the sort fails at this point } return 0; } }); for (File file : files) { if (file.isDirectory()) { unvisitedFolders.add(file); } else { FileInputStream inputStream = null; try { inputStream = new FileInputStream(file); messageDigest.add(inputStream); } finally { if (inputStream != null) { inputStream.close(); } } } } } return messageDigest.getDigestString(); } } catch (IOException e) { throw new RuntimeException("Error creating artifact for contentFile: " + deploymentDirectory, e); } return null; }
From source file:org.apache.hadoop.tools.DistCp.java
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments/* ww w. ja va 2s .c o m*/ * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); // FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms); FileSystem.mkdirs(FileSystem.get(jobDirectory.toUri(), conf), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { //skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst), skipCRCCheck); //skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); return fileCount > 0; }
From source file:org.tobarsegais.webapp.ServletContextListenerImpl.java
public void contextInitialized(ServletContextEvent sce) { ServletContext application = sce.getServletContext(); Map<String, String> bundles = new HashMap<String, String>(); Map<String, Toc> contents = new LinkedHashMap<String, Toc>(); List<IndexEntry> keywords = new ArrayList<IndexEntry>(); Directory index = new RAMDirectory(); Analyzer analyzer = new StandardAnalyzer(LUCENE_VERSON); IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LUCENE_VERSON, analyzer); IndexWriter indexWriter;//from www .j av a2 s. c om try { indexWriter = new IndexWriter(index, indexWriterConfig); } catch (IOException e) { application.log("Cannot create search index. Search will be unavailable.", e); indexWriter = null; } for (String path : (Set<String>) application.getResourcePaths(BUNDLE_PATH)) { if (path.endsWith(".jar")) { String key = path.substring("/WEB-INF/bundles/".length(), path.lastIndexOf(".jar")); application.log("Parsing " + path); URLConnection connection = null; try { URL url = new URL("jar:" + application.getResource(path) + "!/"); connection = url.openConnection(); if (!(connection instanceof JarURLConnection)) { application.log(path + " is not a jar file, ignoring"); continue; } JarURLConnection jarConnection = (JarURLConnection) connection; JarFile jarFile = jarConnection.getJarFile(); Manifest manifest = jarFile.getManifest(); if (manifest != null) { String symbolicName = manifest.getMainAttributes().getValue("Bundle-SymbolicName"); if (symbolicName != null) { int i = symbolicName.indexOf(';'); if (i != -1) { symbolicName = symbolicName.substring(0, i); } bundles.put(symbolicName, key); key = symbolicName; } } JarEntry pluginEntry = jarFile.getJarEntry("plugin.xml"); if (pluginEntry == null) { application.log(path + " does not contain a plugin.xml file, ignoring"); continue; } Plugin plugin = Plugin.read(jarFile.getInputStream(pluginEntry)); Extension tocExtension = plugin.getExtension("org.eclipse.help.toc"); if (tocExtension == null || tocExtension.getFile("toc") == null) { application.log(path + " does not contain a 'org.eclipse.help.toc' extension, ignoring"); continue; } JarEntry tocEntry = jarFile.getJarEntry(tocExtension.getFile("toc")); if (tocEntry == null) { application.log(path + " is missing the referenced toc: " + tocExtension.getFile("toc") + ", ignoring"); continue; } Toc toc; try { toc = Toc.read(jarFile.getInputStream(tocEntry)); } catch (IllegalStateException e) { application.log("Could not parse " + path + " due to " + e.getMessage(), e); continue; } contents.put(key, toc); Extension indexExtension = plugin.getExtension("org.eclipse.help.index"); if (indexExtension != null && indexExtension.getFile("index") != null) { JarEntry indexEntry = jarFile.getJarEntry(indexExtension.getFile("index")); if (indexEntry != null) { try { keywords.addAll(Index.read(key, jarFile.getInputStream(indexEntry)).getChildren()); } catch (IllegalStateException e) { application.log("Could not parse " + path + " due to " + e.getMessage(), e); } } else { application.log( path + " is missing the referenced index: " + indexExtension.getFile("index")); } } application.log(path + " successfully parsed and added as " + key); if (indexWriter != null) { application.log("Indexing content of " + path); Set<String> files = new HashSet<String>(); Stack<Iterator<? extends TocEntry>> stack = new Stack<Iterator<? extends TocEntry>>(); stack.push(Collections.singleton(toc).iterator()); while (!stack.empty()) { Iterator<? extends TocEntry> cur = stack.pop(); if (cur.hasNext()) { TocEntry entry = cur.next(); stack.push(cur); if (!entry.getChildren().isEmpty()) { stack.push(entry.getChildren().iterator()); } String file = entry.getHref(); if (file == null) { continue; } int hashIndex = file.indexOf('#'); if (hashIndex != -1) { file = file.substring(0, hashIndex); } if (files.contains(file)) { // already indexed // todo work out whether to just pull the section continue; } Document document = new Document(); document.add(new Field("title", entry.getLabel(), Field.Store.YES, Field.Index.ANALYZED)); document.add(new Field("href", key + "/" + entry.getHref(), Field.Store.YES, Field.Index.NO)); JarEntry docEntry = jarFile.getJarEntry(file); if (docEntry == null) { // ignore missing file continue; } InputStream inputStream = null; try { inputStream = jarFile.getInputStream(docEntry); org.jsoup.nodes.Document docDoc = Jsoup.parse(IOUtils.toString(inputStream)); document.add(new Field("contents", docDoc.body().text(), Field.Store.NO, Field.Index.ANALYZED)); indexWriter.addDocument(document); } finally { IOUtils.closeQuietly(inputStream); } } } } } catch (XMLStreamException e) { application.log("Could not parse " + path + " due to " + e.getMessage(), e); } catch (MalformedURLException e) { application.log("Could not parse " + path + " due to " + e.getMessage(), e); } catch (IOException e) { application.log("Could not parse " + path + " due to " + e.getMessage(), e); } finally { if (connection instanceof HttpURLConnection) { // should never be the case, but we should try to be sure ((HttpURLConnection) connection).disconnect(); } } } } if (indexWriter != null) { try { indexWriter.close(); } catch (IOException e) { application.log("Cannot create search index. Search will be unavailable.", e); } application.setAttribute("index", index); } application.setAttribute("toc", Collections.unmodifiableMap(contents)); application.setAttribute("keywords", new Index(keywords)); application.setAttribute("bundles", Collections.unmodifiableMap(bundles)); application.setAttribute("analyzer", analyzer); application.setAttribute("contentsQueryParser", new QueryParser(LUCENE_VERSON, "contents", analyzer)); }
From source file:org.jd.copier.mapred.DistCp.java
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments//from ww w . j a v a 2 s .c o m * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); long maxBytesPerMap = conf.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (null == parent) { // If dst is '/' on S3, it might not exist yet, but dst.getParent() // will return null. In this case, use '/' as its own parent to prevent // NPE errors below. parent = args.dst; } if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { //skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst), skipCRCCheck); //skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > maxBytesPerMap) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); // Explicitly create the tmpDir to ensure that it can be cleaned // up by fullyDelete() later. tmpDir.getFileSystem(conf).mkdirs(tmpDir); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); return fileCount > 0; }
From source file:org.apache.hadoop.hdfs.server.namenode.WaitingRoom.java
/** * Moves a file/dir to the waiting room//from w w w . j ava 2s .co m * * @param path Path to file/dir * @return false if move failed, true otherwise */ public boolean moveToWaitingRoom(Path path) throws IOException { // Make path absolute if (!path.isAbsolute()) path = new Path(dfs.getWorkingDirectory(), path); // Check if path is valid if (!dfs.exists(path)) throw new FileNotFoundException(path.toString()); // Check if file already in waiting area String qPath = path.makeQualified(dfs).toString(); if (qPath.startsWith(WR.toString())) return false; // Check if trying to move waiting room or its parent dir to // waiting room if (WR.toString().startsWith(qPath)) { throw new IOException("Can't delete " + path + " as it contains the waiting room directory."); } String fName = path.getName(); Path baseWRPath = getWaitingRoomPath(path.getParent()); // Make dir(s) for base Stack<Path> parentDirs = new Stack<Path>(); do { parentDirs.push(baseWRPath); baseWRPath = baseWRPath.getParent(); } while (baseWRPath != null); while (!parentDirs.empty()) { baseWRPath = parentDirs.pop(); // Create new dir with appended .WRx if already exists. for (int i = 0; dfs.exists(baseWRPath) && !dfs.getFileStatus(baseWRPath).isDir(); i++) { baseWRPath = new Path(baseWRPath.toString() + ".WR" + i); } if (!dfs.mkdirs(baseWRPath, PERMISSION)) { LOG.warn("Couldn't create base dir path in waiting room for " + baseWRPath); return false; } } // Rename file/dir to waiting room. Append .WRx if already exists. Path myWRPath = new Path(baseWRPath.toString(), fName); for (int i = 0; dfs.exists(myWRPath); i++) { myWRPath = new Path(myWRPath.toString() + ".WR" + i); } if (dfs.rename(path, myWRPath)) return true; // success return false; }
From source file:geva.Mapper.GEGrammar.java
String generateNameFromTree(DerivationTree tree) { StringBuilder builder = new StringBuilder(); Stack<DerivationNode> nodeStack = new Stack<DerivationNode>(); nodeStack.push((DerivationNode) tree.getRoot()); while (nodeStack.empty() == false) { DerivationNode nodes = nodeStack.pop(); if (nodes != null) { if (nodes.getCodonIndex() != -1) { builder.append(nodes.getCodonPick()); }/*from w w w . j av a 2s .co m*/ if (nodes.size() != 0) { builder.append('['); nodeStack.push(null); for (int i = nodes.size(); i > 0; i--) { nodeStack.push((DerivationNode) nodes.get(i - 1)); } } } else { builder.append(']'); } } return builder.toString(); }