List of usage examples for org.apache.hadoop.fs FileSystem isDirectory
@Deprecated public boolean isDirectory(Path f) throws IOException
From source file:org.apache.rya.reasoning.mr.MRReasoningUtils.java
License:Apache License
/** * Load serialized schema information from a file. */// w ww .jav a 2s .co m public static Schema loadSchema(Configuration conf) { SchemaWritable schema = new SchemaWritable(); try { FileSystem fs = FileSystem.get(conf); Path schemaPath = getSchemaPath(conf); if (fs.isDirectory(schemaPath)) { for (FileStatus status : fs.listStatus(schemaPath)) { schemaPath = status.getPath(); if (status.isFile() && status.getLen() > 0 && !schemaPath.getName().startsWith(DEBUG_OUT)) { break; } } } SequenceFile.Reader in = new SequenceFile.Reader(conf, SequenceFile.Reader.file(schemaPath)); NullWritable key = NullWritable.get(); in.next(key, schema); in.close(); } catch (IOException e) { e.printStackTrace(); } return schema; }
From source file:org.apache.rya.reasoning.mr.MRReasoningUtils.java
License:Apache License
/** * Delete an HDFS directory if it exists *//* ww w. ja v a2 s.c om*/ static void deleteIfExists(Configuration conf, String rel) throws IOException { FileSystem fs = FileSystem.get(conf); Path path = getOutputPath(conf, rel); if (fs.isDirectory(path) || fs.isFile(path)) { fs.delete(path, true); } }
From source file:org.apache.slider.common.tools.SliderUtils.java
License:Apache License
/** * Copy a directory to a new FS -both paths must be qualified. If * a directory needs to be created, supplied permissions can override * the default values. Existing directories are not touched * @param conf conf file//from w w w. j av a 2 s .co m * @param srcDirPath src dir * @param destDirPath dest dir * @param permission permission for the dest directory; null means "default" * @return # of files copies */ public static int copyDirectory(Configuration conf, Path srcDirPath, Path destDirPath, FsPermission permission) throws IOException, BadClusterStateException { FileSystem srcFS = FileSystem.get(srcDirPath.toUri(), conf); FileSystem destFS = FileSystem.get(destDirPath.toUri(), conf); //list all paths in the src. if (!srcFS.exists(srcDirPath)) { throw new FileNotFoundException("Source dir not found " + srcDirPath); } if (!srcFS.isDirectory(srcDirPath)) { throw new FileNotFoundException("Source dir not a directory " + srcDirPath); } GlobFilter dotFilter = new GlobFilter("[!.]*"); FileStatus[] entries = srcFS.listStatus(srcDirPath, dotFilter); int srcFileCount = entries.length; if (srcFileCount == 0) { return 0; } if (permission == null) { permission = FsPermission.getDirDefault(); } if (!destFS.exists(destDirPath)) { new SliderFileSystem(destFS, conf).createWithPermissions(destDirPath, permission); } Path[] sourcePaths = new Path[srcFileCount]; for (int i = 0; i < srcFileCount; i++) { FileStatus e = entries[i]; Path srcFile = e.getPath(); if (srcFS.isDirectory(srcFile)) { String msg = "Configuration dir " + srcDirPath + " contains a directory " + srcFile; log.warn(msg); throw new IOException(msg); } log.debug("copying src conf file {}", srcFile); sourcePaths[i] = srcFile; } log.debug("Copying {} files from {} to dest {}", srcFileCount, srcDirPath, destDirPath); FileUtil.copy(srcFS, sourcePaths, destFS, destDirPath, false, true, conf); return srcFileCount; }
From source file:org.apache.slider.core.build.InstanceBuilder.java
License:Apache License
public void takeSnapshotOfConfDir(Path appconfdir) throws IOException, BadConfigException, BadClusterStateException { FileSystem srcFS = FileSystem.get(appconfdir.toUri(), conf); if (!srcFS.isDirectory(appconfdir)) { throw new BadConfigException("Source Configuration directory is not valid: %s", appconfdir.toString()); }/* ww w .j av a2 s.com*/ // bulk copy FsPermission clusterPerms = coreFS.getInstanceDirectoryPermissions(); // first the original from wherever to the DFS SliderUtils.copyDirectory(conf, appconfdir, instancePaths.snapshotConfPath, clusterPerms); }
From source file:org.apache.slider.server.avro.RoleHistoryWriter.java
License:Apache License
/** * Find all history entries in a dir. The dir is created if it is * not already defined./*from w ww .java 2s . c o m*/ * * The scan uses the match pattern {@link SliderKeys#HISTORY_FILENAME_MATCH_PATTERN} * while dropping empty files and directories which match the pattern. * The list is then sorted with a comparator that sorts on filename, * relying on the filename of newer created files being later than the old ones. * * * * @param fs filesystem * @param dir dir to scan * @param includeEmptyFiles should empty files be included in the result? * @return a possibly empty list * @throws IOException IO problems * @throws FileNotFoundException if the target dir is actually a path */ public List<Path> findAllHistoryEntries(FileSystem fs, Path dir, boolean includeEmptyFiles) throws IOException { assert fs != null; assert dir != null; if (!fs.exists(dir)) { fs.mkdirs(dir); } else if (!fs.isDirectory(dir)) { throw new FileNotFoundException("Not a directory " + dir.toString()); } PathFilter filter = new GlobFilter(SliderKeys.HISTORY_FILENAME_GLOB_PATTERN); FileStatus[] stats = fs.listStatus(dir, filter); List<Path> paths = new ArrayList<Path>(stats.length); for (FileStatus stat : stats) { log.debug("Possible entry: {}", stat.toString()); if (stat.isFile() && (includeEmptyFiles || stat.getLen() > 0)) { paths.add(stat.getPath()); } } sortHistoryPaths(paths); return paths; }
From source file:org.apache.solr.hadoop.ForkedMapReduceIndexerTool.java
License:Apache License
private static int createTreeMergeInputDirList(Job job, Path outputReduceDir, FileSystem fs, Path fullInputList) throws FileNotFoundException, IOException { FileStatus[] dirs = listSortedOutputShardDirs(job, outputReduceDir, fs); int numFiles = 0; FSDataOutputStream out = fs.create(fullInputList); try {/*from w w w . j a v a2s.c om*/ Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (FileStatus stat : dirs) { LOG.debug("Adding path {}", stat.getPath()); Path dir = new Path(stat.getPath(), "data/index"); if (!fs.isDirectory(dir)) { throw new IllegalStateException("Not a directory: " + dir); } writer.write(dir.toString() + "\n"); numFiles++; } writer.close(); } finally { out.close(); } return numFiles; }
From source file:org.apache.solr.hadoop.MapReduceIndexerTool.java
License:Apache License
private int createTreeMergeInputDirList(Path outputReduceDir, FileSystem fs, Path fullInputList) throws FileNotFoundException, IOException { FileStatus[] dirs = listSortedOutputShardDirs(outputReduceDir, fs); int numFiles = 0; FSDataOutputStream out = fs.create(fullInputList); try {//from www. j a va 2s .co m Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (FileStatus stat : dirs) { LOG.debug("Adding path {}", stat.getPath()); Path dir = new Path(stat.getPath(), "data/index"); if (!fs.isDirectory(dir)) { throw new IllegalStateException("Not a directory: " + dir); } writer.write(dir.toString() + "\n"); numFiles++; } writer.close(); } finally { out.close(); } return numFiles; }
From source file:org.apache.sqoop.avro.AvroUtil.java
License:Apache License
/** * Get the schema of AVRO files stored in a directory *///from w ww .ja va 2 s .c o m public static Schema getAvroSchema(Path path, Configuration conf) throws IOException { FileSystem fs = path.getFileSystem(conf); Path fileToTest; if (fs.isDirectory(path)) { FileStatus[] fileStatuses = fs.listStatus(path, new PathFilter() { @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); } }); if (fileStatuses.length == 0) { return null; } fileToTest = fileStatuses[0].getPath(); } else { fileToTest = path; } SeekableInput input = new FsInput(fileToTest, conf); DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(); FileReader<GenericRecord> fileReader = DataFileReader.openReader(input, reader); Schema result = fileReader.getSchema(); fileReader.close(); return result; }
From source file:org.apache.sqoop.connector.hdfs.HdfsToInitializer.java
License:Apache License
/** * {@inheritDoc}/*www .ja va 2 s . com*/ */ @Override public void initialize(InitializerContext context, LinkConfiguration linkConfig, ToJobConfiguration jobConfig) { assert jobConfig != null; assert linkConfig != null; assert jobConfig.toJobConfig != null; assert jobConfig.toJobConfig.outputDirectory != null; Configuration configuration = HdfsUtils.createConfiguration(linkConfig); HdfsUtils.configurationToContext(configuration, context.getContext()); boolean appendMode = Boolean.TRUE.equals(jobConfig.toJobConfig.appendMode); // Verification that given HDFS directory either don't exists or is empty try { FileSystem fs = FileSystem.get(configuration); Path path = new Path(jobConfig.toJobConfig.outputDirectory); if (fs.exists(path)) { if (fs.isFile(path)) { throw new SqoopException(HdfsConnectorError.GENERIC_HDFS_CONNECTOR_0007, "Output directory already exists and is a file"); } if (fs.isDirectory(path) && !appendMode) { FileStatus[] fileStatuses = fs.listStatus(path); if (fileStatuses.length != 0) { throw new SqoopException(HdfsConnectorError.GENERIC_HDFS_CONNECTOR_0007, "Output directory is not empty"); } } } } catch (IOException e) { throw new SqoopException(HdfsConnectorError.GENERIC_HDFS_CONNECTOR_0007, "Unexpected exception", e); } // Building working directory String workingDirectory = jobConfig.toJobConfig.outputDirectory + "/." + UUID.randomUUID(); LOG.info("Using working directory: " + workingDirectory); context.getContext().setString(HdfsConstants.WORK_DIRECTORY, workingDirectory); }
From source file:org.apache.storm.verify.VerifyUtils.java
License:Apache License
public static void verifyHdfs(String hdfsUrl, String dir, List<String> expectedLines) throws Exception { List<String> lines = new ArrayList<String>(); FileSystem fileSystem = FileSystem.get(new URI(hdfsUrl), new Configuration()); Path path = new Path(dir); assert fileSystem.exists(path); assert fileSystem.isDirectory(path); FileStatus[] fileStatuses = fileSystem.listStatus(path); assert fileStatuses != null; for (FileStatus fileStatus : fileStatuses) { Path filePath = fileStatus.getPath(); InputStreamReader is = new InputStreamReader(fileSystem.open(filePath)); lines.addAll(IOUtils.readLines(is)); }//from w ww. j av a2s .c om Collections.sort(lines); Collections.sort(expectedLines); assert lines.equals(expectedLines) : "expectedLines = " + expectedLines + " actualines = " + lines; }