Example usage for org.apache.hadoop.fs FileSystem listStatus

List of usage examples for org.apache.hadoop.fs FileSystem listStatus

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem listStatus.

Prototype

public FileStatus[] listStatus(Path[] files) throws FileNotFoundException, IOException 

Source Link

Document

Filter files/directories in the given list of paths using default path filter.

Usage

From source file:AggregatedLogsPurger.java

License:Apache License

private long getLengthRecursively(FileSystem fs, Path path) throws IOException {
    long size = 0;
    for (FileStatus status : fs.listStatus(path)) {
        if (status.isDirectory()) {
            getLengthRecursively(fs, status.getPath());
        } else {//from  w ww .j  ava  2  s . c o  m
            size += status.getLen();
        }
    }
    return size;
}

From source file:a.TestConcatExample.java

License:Apache License

@Test
public void concatIsPermissive() throws IOException, URISyntaxException {
    MiniDFSCluster cluster = null;/*  www.  j av a  2 s  .c o  m*/
    final Configuration conf = WebHdfsTestUtil.createConf();
    conf.set("dfs.namenode.fs-limits.min-block-size", "1000"); // Allow tiny blocks for the test
    try {
        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
        cluster.waitActive();
        final FileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME);
        final FileSystem dfs = cluster.getFileSystem();

        final FileSystem fs = dfs; // WebHDFS has a bug in getLocatedBlocks

        Path root = new Path("/dir");
        fs.mkdirs(root);

        short origRep = 3;
        short secondRep = (short) (origRep - 1);
        Path f1 = new Path("/dir/f1");
        long size1 = writeFile(fs, f1, /* blocksize */ 4096, origRep, 5);
        long f1NumBlocks = fs.getFileBlockLocations(f1, 0, size1).length;
        assertEquals(5, f1NumBlocks);

        Path f2 = new Path("/dir/f2");
        long size2 = writeFile(fs, f2, /* blocksize (must divide 512 for checksum) */ 4096 - 512, secondRep, 4);
        long f2NumBlocks = fs.getFileBlockLocations(f2, 0, size2).length;
        assertEquals(5, f2NumBlocks);

        fs.concat(f1, new Path[] { f2 });
        FileStatus[] fileStatuses = fs.listStatus(root);

        // Only one file should remain
        assertEquals(1, fileStatuses.length);
        FileStatus fileStatus = fileStatuses[0];

        // And it should be named after the first file
        assertEquals("f1", fileStatus.getPath().getName());

        // The entire file takes the replication of the first argument
        assertEquals(origRep, fileStatus.getReplication());

        // As expected, the new concated file is the length of both the previous files
        assertEquals(size1 + size2, fileStatus.getLen());

        // And we should have the same number of blocks
        assertEquals(f1NumBlocks + f2NumBlocks,
                fs.getFileBlockLocations(fileStatus.getPath(), 0, size1 + size2).length);
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }

    }
}

From source file:alluxio.hadoop.FileSystemAclIntegrationTest.java

License:Apache License

public static void cleanup(org.apache.hadoop.fs.FileSystem fs) throws IOException {
    FileStatus[] statuses = fs.listStatus(new Path("/"));
    for (FileStatus f : statuses) {
        fs.delete(f.getPath(), true);//from  w  w w  .j  a  v  a  2 s.  co m
    }
}

From source file:Assignment3_P2_MergeStockAverageCount.StockPriceMergeDriver.java

/**
 * @param args the command line arguments
 *///w w w .  jav  a 2s.c o  m
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();

    // local file system handle
    FileSystem local = FileSystem.getLocal(conf);

    // hdfs file system handle
    FileSystem hdfs = FileSystem.get(conf);

    // local input directory
    Path inputDir = new Path(args[0]);

    // hdfs i/p  directory
    Path inputDir1 = new Path(args[1]);

    // local input files in local dir
    FileStatus[] inputFiles = local.listStatus(inputDir);

    // o/p stream
    FSDataOutputStream out = hdfs.create(inputDir1);

    // open each file and extract contents of file
    for (int i = 0; i < inputFiles.length; i++) {
        System.out.println("File name ----------------------------------------------------------------> "
                + inputFiles[i].getPath().getName());
        FSDataInputStream in = local.open(inputFiles[i].getPath());
        byte buffer[] = new byte[256];
        int bytesRead = 0;

        // extract all contents of file
        while ((bytesRead = in.read(buffer)) > 0) {
            out.write(buffer, 0, bytesRead);
        }

        // close input stream
        in.close();
    }

    Job job = Job.getInstance(conf, "Average Stock Price");
    job.setJarByClass(StockPriceMergeDriver.class);
    job.setMapperClass(StockPriceMerge_Mapper.class);
    job.setCombinerClass(StockPriceMerge_Reducer.class);
    job.setReducerClass(StockPriceMerge_Reducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(FloatWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[1])); // above programs output will be input for mapper
    FileOutputFormat.setOutputPath(job, new Path(args[2]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.cpu.MatrixMultiplicationCpu.java

License:Apache License

static void printOutput(Configuration conf) throws IOException {
    FileSystem fs = OUTPUT_DIR.getFileSystem(conf);
    FileStatus[] files = fs.listStatus(OUTPUT_DIR);
    for (int i = 0; i < files.length; i++) {
        if (files[i].getLen() > 0) {
            System.out.println("File " + files[i].getPath());
            if (files[i].getPath().getName().endsWith(".log")) {
                FSDataInputStream in = fs.open(files[i].getPath());
                IOUtils.copyBytes(in, System.out, conf, false);
                in.close();//ww w .java  2  s  .  com
            }
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:at.illecker.hadoop.rootbeer.examples.matrixmultiplication.MatrixMultiplicationBenchmark.java

License:Apache License

private void printOutput(Configuration conf) throws IOException {
    FileSystem fs = FileSystem.get(conf);
    FileStatus[] files = fs.listStatus(CONF_OUTPUT_DIR);
    for (int i = 0; i < files.length; i++) {
        if (files[i].getLen() > 0) {
            System.out.println("File " + files[i].getPath());
            FSDataInputStream in = fs.open(files[i].getPath());
            IOUtils.copyBytes(in, System.out, conf, false);
            in.close();/* w  w  w. j  av a 2  s  . c o  m*/
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:at.illecker.hama.hybrid.examples.hellohybrid.HelloHybridBSP.java

License:Apache License

static void printOutput(BSPJob job, Path path) throws IOException {
    FileSystem fs = path.getFileSystem(job.getConfiguration());
    FileStatus[] files = fs.listStatus(path);
    for (int i = 0; i < files.length; i++) {
        if (files[i].getLen() > 0) {
            System.out.println("File " + files[i].getPath());
            SequenceFile.Reader reader = null;
            try {
                reader = new SequenceFile.Reader(fs, files[i].getPath(), job.getConfiguration());

                IntWritable key = new IntWritable();
                NullWritable value = NullWritable.get();
                while (reader.next(key, value)) {
                    System.out.println("key: '" + key.get() + "' value: '" + value + "'\n");
                }/*from www. ja v  a  2 s .  c om*/
            } catch (IOException e) {
                FSDataInputStream in = fs.open(files[i].getPath());
                IOUtils.copyBytes(in, System.out, job.getConfiguration(), false);
                in.close();
            } finally {
                if (reader != null) {
                    reader.close();
                }
            }
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:at.illecker.hama.hybrid.examples.kmeans.KMeansHybridBSP.java

License:Apache License

static void printOutput(Configuration conf, FileSystem fs, String extensionFilter, Writable key, Writable value)
        throws IOException {
    FileStatus[] files = fs.listStatus(CONF_OUTPUT_DIR);
    for (int i = 0; i < files.length; i++) {
        if ((files[i].getLen() > 0) && (files[i].getPath().getName().endsWith(extensionFilter))) {
            printFile(conf, fs, files[i].getPath(), key, value);
        }/*ww  w .j a v  a  2s.  c om*/
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:at.illecker.hama.hybrid.examples.matrixmultiplication.MatrixMultiplicationHybridBenchmark.java

License:Apache License

static void printOutput(Configuration conf) throws IOException {
    FileSystem fs = FileSystem.get(conf);
    FileStatus[] files = fs.listStatus(new Path(OUTPUT_DIR));
    for (int i = 0; i < files.length; i++) {
        if (files[i].getLen() > 0) {
            System.out.println("File " + files[i].getPath());
            FSDataInputStream in = fs.open(files[i].getPath());
            IOUtils.copyBytes(in, System.out, conf, false);
            in.close();/*from w w w.  j  a  va 2  s . com*/
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:at.illecker.hama.hybrid.examples.matrixmultiplication.MatrixMultiplicationHybridBSP.java

License:Apache License

static void printOutput(Configuration conf) throws IOException {
    FileSystem fs = OUTPUT_DIR.getFileSystem(conf);
    FileStatus[] files = fs.listStatus(OUTPUT_DIR);
    for (int i = 0; i < files.length; i++) {
        if (files[i].getLen() > 0) {
            if (files[i].getPath().getName().endsWith(".log")) {
                System.out.println("File " + files[i].getPath());
                FSDataInputStream in = fs.open(files[i].getPath());
                IOUtils.copyBytes(in, System.out, conf, false);
                in.close();/*from  www . j ava  2  s .com*/
            }
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}