List of usage examples for org.apache.commons.net.ftp FTPFile getName
public String getName()
From source file:org.apache.hadoop.fs.ftp.FTPFileSystem.java
/** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. *//*from ww w . ja v a 2 s . com*/ private FileStatus getFileStatus(FTPClient client, Path file) throws IOException { FileStatus fileStat = null; Path workDir = new Path(client.printWorkingDirectory()); Path absolute = makeAbsolute(workDir, file); Path parentPath = absolute.getParent(); if (parentPath == null) { // root dir long length = -1; // Length of root dir on server not known boolean isDir = true; int blockReplication = 1; long blockSize = DEFAULT_BLOCK_SIZE; // Block Size not known. long modTime = -1; // Modification time of root dir not known. Path root = new Path("/"); return new FileStatus(length, isDir, blockReplication, blockSize, modTime, root.makeQualified(this)); } String pathName = parentPath.toUri().getPath(); FTPFile[] ftpFiles = client.listFiles(pathName); if (ftpFiles != null) { for (FTPFile ftpFile : ftpFiles) { if (ftpFile.getName().equals(file.getName())) { // file found in dir fileStat = getFileStatus(ftpFile, parentPath); break; } } if (fileStat == null) { throw new FileNotFoundException("File " + file + " does not exist."); } } else { throw new FileNotFoundException("File " + file + " does not exist."); } return fileStat; }
From source file:org.apache.hadoop.fs.ftp.FTPFileSystem.java
/** * Convert the file information in FTPFile to a {@link FileStatus} object. * * /*from w ww. j av a 2 s .c o m*/ * @param ftpFile * @param parentPath * @return FileStatus */ private FileStatus getFileStatus(FTPFile ftpFile, Path parentPath) { long length = ftpFile.getSize(); boolean isDir = ftpFile.isDirectory(); int blockReplication = 1; // Using default block size since there is no way in FTP client to know of // block sizes on server. The assumption could be less than ideal. long blockSize = DEFAULT_BLOCK_SIZE; long modTime = ftpFile.getTimestamp().getTimeInMillis(); long accessTime = 0; FsPermission permission = getPermissions(ftpFile); String user = ftpFile.getUser(); String group = ftpFile.getGroup(); Path filePath = new Path(parentPath, ftpFile.getName()); return new FileStatus(length, isDir, blockReplication, blockSize, modTime, accessTime, permission, user, group, filePath.makeQualified(this)); }
From source file:org.apache.hive.hplsql.Ftp.java
/** * Get the list of files to transfer//from w ww . j a v a 2s . c om */ void retrieveFileList(String dir) { if (info) { if (dir == null || dir.isEmpty()) { info(null, " Listing the current working FTP directory"); } else { info(null, " Listing " + dir); } } try { FTPFile[] files = ftp.listFiles(dir); ArrayList<FTPFile> dirs = new ArrayList<FTPFile>(); for (FTPFile file : files) { String name = file.getName(); if (file.isFile()) { if (filePattern == null || Pattern.matches(filePattern, name)) { if (dir != null && !dir.isEmpty()) { if (dir.endsWith("/")) { name = dir + name; } else { name = dir + "/" + name; } } if (!newOnly || !isTargetExists(name)) { fileCnt++; ftpSizeInBytes += file.getSize(); filesQueue.add(name); filesMap.put(name, file); } } } else { if (subdir && !name.equals(".") && !name.equals("..")) { dirCnt++; dirs.add(file); } } } if (subdir) { for (FTPFile d : dirs) { String sd = d.getName(); if (dir != null && !dir.isEmpty()) { if (dir.endsWith("/")) { sd = dir + sd; } else { sd = dir + "/" + sd; } } retrieveFileList(sd); } } } catch (IOException e) { exec.signal(e); } }
From source file:org.apache.maven.wagon.providers.ftp.FtpWagon.java
public List<String> getFileList(String destinationDirectory) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException { Resource resource = new Resource(destinationDirectory); try {/*from www. j a v a 2 s . com*/ ftpChangeDirectory(resource); String filename = PathUtils.filename(resource.getName()); FTPFile[] ftpFiles = ftp.listFiles(filename); if (ftpFiles == null || ftpFiles.length <= 0) { throw new ResourceDoesNotExistException("Could not find file: '" + resource + "'"); } List<String> ret = new ArrayList<String>(); for (FTPFile file : ftpFiles) { String name = file.getName(); if (file.isDirectory() && !name.endsWith("/")) { name += "/"; } ret.add(name); } return ret; } catch (IOException e) { throw new TransferFailedException("Error transferring file via FTP", e); } }
From source file:org.apache.nifi.processors.standard.util.FTPTransfer.java
private List<FileInfo> getListing(final String path, final int depth, final int maxResults) throws IOException { final List<FileInfo> listing = new ArrayList<>(); if (maxResults < 1) { return listing; }//from w ww . j av a2 s .co m if (depth >= 100) { logger.warn(this + " had to stop recursively searching directories at a recursive depth of " + depth + " to avoid memory issues"); return listing; } final boolean ignoreDottedFiles = ctx.getProperty(FileTransfer.IGNORE_DOTTED_FILES).asBoolean(); final boolean recurse = ctx.getProperty(FileTransfer.RECURSIVE_SEARCH).asBoolean(); final String fileFilterRegex = ctx.getProperty(FileTransfer.FILE_FILTER_REGEX).getValue(); final Pattern pattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex); final String pathFilterRegex = ctx.getProperty(FileTransfer.PATH_FILTER_REGEX).getValue(); final Pattern pathPattern = (!recurse || pathFilterRegex == null) ? null : Pattern.compile(pathFilterRegex); final String remotePath = ctx.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions() .getValue(); // check if this directory path matches the PATH_FILTER_REGEX boolean pathFilterMatches = true; if (pathPattern != null) { Path reldir = path == null ? Paths.get(".") : Paths.get(path); if (remotePath != null) { reldir = Paths.get(remotePath).relativize(reldir); } if (reldir != null && !reldir.toString().isEmpty()) { if (!pathPattern.matcher(reldir.toString().replace("\\", "/")).matches()) { pathFilterMatches = false; } } } final FTPClient client = getClient(null); int count = 0; final FTPFile[] files; if (path == null || path.trim().isEmpty()) { files = client.listFiles("."); } else { files = client.listFiles(path); } if (files.length == 0 && path != null && !path.trim().isEmpty()) { // throw exception if directory doesn't exist final boolean cdSuccessful = setWorkingDirectory(path); if (!cdSuccessful) { throw new IOException("Cannot list files for non-existent directory " + path); } } for (final FTPFile file : files) { final String filename = file.getName(); if (filename.equals(".") || filename.equals("..")) { continue; } if (ignoreDottedFiles && filename.startsWith(".")) { continue; } final File newFullPath = new File(path, filename); final String newFullForwardPath = newFullPath.getPath().replace("\\", "/"); if (recurse && file.isDirectory()) { try { listing.addAll(getListing(newFullForwardPath, depth + 1, maxResults - count)); } catch (final IOException e) { logger.error( "Unable to get listing from " + newFullForwardPath + "; skipping this subdirectory"); } } // if is not a directory and is not a link and it matches // FILE_FILTER_REGEX - then let's add it if (!file.isDirectory() && !file.isSymbolicLink() && pathFilterMatches) { if (pattern == null || pattern.matcher(filename).matches()) { listing.add(newFileInfo(file, path)); count++; } } if (count >= maxResults) { break; } } return listing; }
From source file:org.apache.nutch.protocol.ftp.FtpResponse.java
private byte[] list2html(List<FTPFile> list, String path, boolean includeDotDot) { // StringBuffer x = new // StringBuffer("<!doctype html public \"-//ietf//dtd html//en\"><html><head>"); StringBuffer x = new StringBuffer("<html><head>"); x.append("<title>Index of " + path + "</title></head>\n"); x.append("<body><h1>Index of " + path + "</h1><pre>\n"); if (includeDotDot) { x.append("<a href='../'>../</a>\t-\t-\t-\n"); }/* ww w .ja va 2 s . c om*/ for (int i = 0; i < list.size(); i++) { FTPFile f = (FTPFile) list.get(i); String name = f.getName(); String time = HttpDateFormat.toString(f.getTimestamp()); if (f.isDirectory()) { // some ftp server LIST "." and "..", we skip them here if (name.equals(".") || name.equals("..")) continue; x.append("<a href='" + name + "/" + "'>" + name + "/</a>\t"); x.append(time + "\t-\n"); } else if (f.isFile()) { x.append("<a href='" + name + "'>" + name + "</a>\t"); x.append(time + "\t" + f.getSize() + "\n"); } else { // ignore isSymbolicLink() // ignore isUnknown() } } x.append("</pre></body></html>\n"); return new String(x).getBytes(); }
From source file:org.apache.oodt.cas.protocol.ftp.CommonsNetFtpProtocol.java
public List<ProtocolFile> ls() throws ProtocolException { try {//from w w w. j ava 2s. com String path = this.pwd().getPath(); FTPFile[] files = ftp.listFiles(); List<ProtocolFile> returnFiles = new LinkedList<ProtocolFile>(); for (FTPFile file : files) { if (file == null) { continue; } returnFiles.add(new ProtocolFile(path + "/" + file.getName(), file.isDirectory())); } return returnFiles; } catch (Exception e) { throw new ProtocolException("Failed to get file list : " + e.getMessage()); } }
From source file:org.apache.oodt.cas.protocol.ftp.CommonsNetFtpProtocol.java
/** * {@inheritDoc}// w w w . j a v a2 s. c om */ public List<ProtocolFile> ls(ProtocolFileFilter filter) throws ProtocolException { try { FTPFile[] files = ftp.listFiles(); List<ProtocolFile> returnFiles = new LinkedList<ProtocolFile>(); for (FTPFile file : files) { if (file == null) { continue; } String path = this.pwd().getPath(); ProtocolFile pFile = new ProtocolFile(path + "/" + file.getName(), file.isDirectory()); if (filter.accept(pFile)) { returnFiles.add(pFile); } } return returnFiles; } catch (Exception e) { throw new ProtocolException("Failed to get file list : " + e.getMessage()); } }
From source file:org.apache.sqoop.connector.mainframe.MainframeFTPClientUtils.java
public static List<String> listSequentialDatasets(String pdsName, TransferableContext context, LinkConfiguration linkConfiguration) throws IOException { List<String> datasets = new ArrayList<String>(); FTPClient ftp = null;/* ww w . ja v a2 s.co m*/ try { ftp = getFTPConnection(context, linkConfiguration); if (ftp != null) { setWorkingDirectory(context, ftp, pdsName); FTPFile[] ftpFiles = ftp.listFiles(); for (FTPFile f : ftpFiles) { if (f.getType() == FTPFile.FILE_TYPE) { datasets.add(f.getName()); } } } } catch (IOException ioe) { throw new IOException("Could not list datasets from " + pdsName + ":" + ioe.toString()); } finally { if (ftp != null) { closeFTPConnection(ftp); } } return datasets; }
From source file:org.apache.sqoop.util.MainframeFTPClientUtils.java
public static List<String> listSequentialDatasets(String pdsName, Configuration conf) throws IOException { List<String> datasets = new ArrayList<String>(); String dsName = pdsName;/*w w w.j av a 2 s . c o m*/ String fileName = ""; MainframeDatasetPath p = null; try { p = new MainframeDatasetPath(dsName, conf); } catch (Exception e) { LOG.error(e.getMessage()); LOG.error("MainframeDatasetPath helper class incorrectly initialised"); e.printStackTrace(); } String dsType = conf.get(MainframeConfiguration.MAINFRAME_INPUT_DATASET_TYPE); boolean isTape = Boolean.parseBoolean(conf.get(MainframeConfiguration.MAINFRAME_INPUT_DATASET_TAPE)); boolean isSequentialDs = false; boolean isGDG = false; if (dsType != null && p != null) { isSequentialDs = p.getMainframeDatasetType().toString() .equals(MainframeConfiguration.MAINFRAME_INPUT_DATASET_TYPE_SEQUENTIAL); isGDG = p.getMainframeDatasetType().toString() .equals(MainframeConfiguration.MAINFRAME_INPUT_DATASET_TYPE_GDG); pdsName = p.getMainframeDatasetFolder(); fileName = p.getMainframeDatasetFileName(); } FTPClient ftp = null; try { ftp = getFTPConnection(conf); if (ftp != null) { ftp.changeWorkingDirectory("'" + pdsName + "'"); FTPFile[] ftpFiles = null; if (isTape) { FTPListParseEngine parser = ftp.initiateListParsing( MainframeConfiguration.MAINFRAME_FTP_FILE_ENTRY_PARSER_CLASSNAME, ""); List<FTPFile> listing = new ArrayList<FTPFile>(); while (parser.hasNext()) { FTPFile[] files = parser.getNext(25); for (FTPFile file : files) { if (file != null) { listing.add(file); LOG.info(String.format("Name: %s Type: %s", file.getName(), file.getType())); } // skip nulls returned from parser } ftpFiles = new FTPFile[listing.size()]; for (int i = 0; i < listing.size(); i++) { ftpFiles[i] = listing.get(i); } LOG.info("Files returned from mainframe parser:-"); for (FTPFile f : ftpFiles) { LOG.info(String.format("Name: %s, Type: %s", f.getName(), f.getType())); } } } else { ftpFiles = ftp.listFiles(); } if (!isGDG) { for (FTPFile f : ftpFiles) { LOG.info(String.format("Name: %s Type: %s", f.getName(), f.getType())); if (f.getType() == FTPFile.FILE_TYPE) { // only add datasets if default behaviour of partitioned data sets // or if it is a sequential data set, only add if the file name matches exactly if (!isSequentialDs || isSequentialDs && f.getName().equals(fileName) && !fileName.equals("")) { datasets.add(f.getName()); } } } } else { LOG.info("GDG branch. File list:-"); for (FTPFile f : ftpFiles) { LOG.info(String.format("Name: %s Type: %s", f.getName(), f.getType())); } if (ftpFiles.length > 0 && ftpFiles[ftpFiles.length - 1].getType() == FTPFile.FILE_TYPE) { // for GDG - add the last file in the collection datasets.add(ftpFiles[ftpFiles.length - 1].getName()); } } } } catch (IOException ioe) { throw new IOException("Could not list datasets from " + pdsName + ":" + ioe.toString()); } finally { if (ftp != null) { closeFTPConnection(ftp); } } return datasets; }