Example usage for org.apache.hadoop.fs FileSystem close

List of usage examples for org.apache.hadoop.fs FileSystem close

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Close this FileSystem instance.

Usage

From source file:org.mrgeo.format.CsvInputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBadValues() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {//from   w w w  . j ava  2 s  .c  o  m
        Job j = new Job(new Configuration());
        Configuration c = j.getConfiguration();
        fs.setConf(c);
        Path testFile = new Path(input, "testErrors.csv");
        testFile = fs.makeQualified(testFile);

        FileInputFormat.addInputPath(j, testFile);
        FileSplit split = new FileSplit(testFile, 0, 500, null);
        CsvInputFormat.CsvRecordReader reader = new CsvInputFormat.CsvRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        @SuppressWarnings("unused")
        int l = 0;

        //StringBuffer buf = new StringBuffer();

        // Test specific rows returned to make sure the values are as expected.
        Assert.assertTrue(reader.nextKeyValue());
        Geometry f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test1,1,1.5,30.0,40.0", f.getAttribute("string1"));
        Assert.assertNull(f.getAttribute("int1"));
        Assert.assertNull(f.getAttribute("double1"));
        Assert.assertNull(f.getAttribute("x"));
        Assert.assertNull(f.getAttribute("y"));
        // Row 2 check
        Assert.assertTrue(reader.nextKeyValue());
        f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test2", f.getAttribute("string1"));
        Assert.assertEquals(2, Integer.parseInt(f.getAttribute("int1")));
        Assert.assertEquals("", f.getAttribute("double1"));
        Assert.assertEquals("30.abc", f.getAttribute("x"));
        Assert.assertEquals(40.0, Double.parseDouble(f.getAttribute("y")), EPSILON);
        // Row 3 check
        Assert.assertTrue(reader.nextKeyValue());
        f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test3", f.getAttribute("string1"));
        Assert.assertEquals(3, Integer.parseInt(f.getAttribute("int1")));
        Assert.assertEquals(3.5, Double.parseDouble(f.getAttribute("double1")), EPSILON);
        Assert.assertEquals(30.0, Double.parseDouble(f.getAttribute("x")), EPSILON);
        Assert.assertEquals("40.abc", f.getAttribute("y"));
        // Row 4 check
        Assert.assertTrue(reader.nextKeyValue());
        f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test4", f.getAttribute("string1"));
        Assert.assertEquals("", f.getAttribute("int1"));
        Assert.assertEquals(4.5, Double.parseDouble(f.getAttribute("double1")), EPSILON);
        Assert.assertEquals(30.0, Double.parseDouble(f.getAttribute("x")), EPSILON);
        Assert.assertNull(f.getAttribute("y"));
        // Row 5 check
        Assert.assertTrue(reader.nextKeyValue());
        f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test5", f.getAttribute("string1"));
        Assert.assertEquals(5, Integer.parseInt(f.getAttribute("int1")));
        Assert.assertEquals(5.5, Double.parseDouble(f.getAttribute("double1")), EPSILON);
        Assert.assertEquals("", f.getAttribute("x"));
        Assert.assertEquals(40.0, Double.parseDouble(f.getAttribute("y")), EPSILON);
        // Row 6 check
        Assert.assertTrue(reader.nextKeyValue());
        f = reader.getCurrentValue();
        Assert.assertNotNull(f);
        Assert.assertTrue(f.isEmpty());
        Assert.assertEquals("test6", f.getAttribute("string1"));
        Assert.assertEquals("", f.getAttribute("int1"));
        Assert.assertEquals("", f.getAttribute("double1"));
        Assert.assertEquals("", f.getAttribute("x"));
        Assert.assertEquals("", f.getAttribute("y"));

        // end
        Assert.assertFalse(reader.nextKeyValue());
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.CsvOutputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {//from  ww  w .jav a 2s  .  c  o m
        String output = TestUtils.composeOutputDir(CsvOutputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(output, "testBasics.csv");
        testFile = fs.makeQualified(testFile);
        Path columns = new Path(testFile.toString() + ".columns");

        CsvOutputFormat.CsvRecordWriter writer = new CsvOutputFormat.CsvRecordWriter(columns, testFile);

        WritableGeometry f = GeometryFactory.createEmptyGeometry();

        f.setAttribute("string1", "foo");
        f.setAttribute("int1", "1");
        f.setAttribute("double1", "2.0");
        writer.write(new LongWritable(0), f);

        f.setAttribute("string1", "bar");
        f.setAttribute("int1", "3");
        f.setAttribute("double1", "4.0");
        writer.write(new LongWritable(1), f);

        writer.close(null);

        String input = TestUtils.composeInputDir(CsvOutputFormatTest.class);

        File csvBaselineFile = new File(input, "testBasics.csv");
        File csvOutputFile = new File(output, "testBasics.csv");
        TestUtils.compareTextFiles(csvBaselineFile.getAbsoluteFile(), csvOutputFile.getAbsoluteFile());

        File columnsBaselineFile = new File(input, "testBasics.csv.columns");
        File columnsOutputFile = new File(output, "testBasics.csv.columns");

        TestUtils.compareTextFiles(columnsBaselineFile.getAbsoluteFile(), columnsOutputFile.getAbsoluteFile());
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.OsmContentHandlerTest.java

License:Apache License

@Ignore
@Test//from   w w w . ja v a  2 s  .  c  om
public void OfftestBenchmark() throws Exception {
    // @TODO this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {
        String input = TestUtils.composeInputDir(OsmContentHandlerTest.class);

        Configuration c = new Configuration();
        c.set("xml.content.handler", OsmContentHandler.class.getCanonicalName());
        c.set("xml.pattern", "node");
        c.set("xml.root.tag", "osm");

        fs.setConf(c);
        Path testFile = new Path(input, "sample.osm");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "place");

        FileSplit split = new FileSplit(testFile, 0, 64 * 1048576, null);
        RecordReader<LongWritable, Geometry> reader = new SaxInputFormat<LongWritable, Geometry>()
                .createRecordReader(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));

        int l = 0;
        long start = new Date().getTime();
        while (reader.nextKeyValue()) {
            l++;
        }
        long elapsed = new Date().getTime() - start;
        log.debug("ms per record: {} record count: {}", (double) elapsed / (double) l, l);
        Assert.assertEquals(1, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.OsmInputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {/*from  www.j ava2  s . c  o  m*/
        String input = TestUtils.composeInputDir(OsmInputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(input, "sample.osm");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "place");

        FileSplit split = new FileSplit(testFile, 0, fs.getFileStatus(testFile).getLen(), null);
        OsmInputFormat.OsmRecordReader reader = new OsmInputFormat.OsmRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        int l = 0;
        while (reader.nextKeyValue() && l < 10000) {
            l++;
        }
        Assert.assertEquals(6, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.PgQueryInputFormatTest.java

License:Apache License

public RecordReader<LongWritable, Geometry> openReader(Path p) throws IOException {
    FileSystem fs = new RawLocalFileSystem();
    try {//from   w  ww .ja  va2s .  co m
        Job j = new Job(new Configuration());
        Configuration c = j.getConfiguration();
        fs.setConf(c);
        Path testFile = fs.makeQualified(p);

        c.set("mapred.input.dir", testFile.toString());
        PgQueryInputFormat format = new PgQueryInputFormat();
        HadoopVectorUtils.setupPgQueryInputFormat(j, "anthro", "anthro4server",
                "jdbc:postgresql://localhost:5432/anthro");
        InputSplit split = null;
        try {
            split = format.getSplits(j).get(0);
            return format.createRecordReader(split,
                    HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        } catch (Exception e) {
            return null;
        }
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.ShpInputFormatTest.java

License:Apache License

public RecordReader<LongWritable, Geometry> openReader(Path p) throws IOException, InterruptedException {
    Job j = new Job(new Configuration());
    Configuration c = j.getConfiguration();
    FileSystem fs = new RawLocalFileSystem();
    try {/*from ww  w.ja va2s . co  m*/
        fs.setConf(c);
        Path testFile = fs.makeQualified(p);

        c.set("mapred.input.dir", testFile.toString());
        ShpInputFormat format = new ShpInputFormat();
        InputSplit split = format.getSplits(j).get(0);
        return format.createRecordReader(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.XmlInputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {/*from  w  ww.j  av  a 2  s .c  o  m*/
        String input = TestUtils.composeInputDir(XmlInputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(input, "testBasics.xml");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "node");

        FileSplit split = new FileSplit(testFile, 0, 50, null);
        XmlInputFormat.XmlRecordReader reader = new XmlInputFormat.XmlRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        int l = 0;
        while (reader.nextKeyValue()) {
            System.out.printf("k: %s v: %s\n", reader.getCurrentKey(), reader.getCurrentValue());
            l++;
        }

        split = new FileSplit(testFile, 50, fs.getFileStatus(testFile).getLen() - 50, null);
        reader = new XmlInputFormat.XmlRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        while (reader.nextKeyValue()) {
            System.out.printf("k: %s v: %s\n", reader.getCurrentKey(), reader.getCurrentValue());
            l++;
        }

        Assert.assertEquals(3, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.hdfs.vector.CsvOutputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {//from w  w  w .java2  s  .  c  o  m
        String output = TestUtils.composeOutputDir(CsvOutputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(output, "testBasics.csv");
        testFile = fs.makeQualified(testFile);
        Path columns = new Path(testFile.toString() + ".columns");

        CsvOutputFormat.CsvRecordWriter writer = new CsvOutputFormat.CsvRecordWriter(columns, testFile);

        WritableGeometry f = GeometryFactory.createEmptyGeometry();

        f.setAttribute("string1", "foo");
        f.setAttribute("int1", "1");
        f.setAttribute("double1", "2.0");
        writer.write(new FeatureIdWritable(0), f);

        f.setAttribute("string1", "bar");
        f.setAttribute("int1", "3");
        f.setAttribute("double1", "4.0");
        writer.write(new FeatureIdWritable(1), f);

        writer.close(null);

        String input = TestUtils.composeInputDir(CsvOutputFormatTest.class);

        File csvBaselineFile = new File(input, "testBasics.csv");
        File csvOutputFile = new File(output, "testBasics.csv");
        TestUtils.compareTextFiles(csvBaselineFile.getAbsoluteFile(), csvOutputFile.getAbsoluteFile());

        File columnsBaselineFile = new File(input, "testBasics.csv.columns");
        File columnsOutputFile = new File(output, "testBasics.csv.columns");

        TestUtils.compareTextFiles(columnsBaselineFile.getAbsoluteFile(), columnsOutputFile.getAbsoluteFile());
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.opencloudengine.garuda.backend.hdfs.HdfsServiceImpl.java

License:Open Source License

@Override
public void downloadFile(String path, HttpServletResponse response) throws Exception {
    this.mustExists(path);
    FileSystem fs = fileSystemFactory.getFileSystem();
    Path fsPath = new Path(path);

    FileStatus fileStatus = fs.getFileStatus(fsPath);
    if (!fileStatus.isFile()) {
        this.notFileException(fsPath.toString());
    }/*from  ww w  . jav  a2s . c o m*/
    HdfsFileInfo fileInfo = new HdfsFileInfo(fileStatus, fs.getContentSummary(fsPath));

    FSDataInputStream in = fs.open(fsPath);
    String filename = fileInfo.getFilename();
    response.setHeader("Content-Length", "" + fileInfo.getLength());
    response.setHeader("Content-Transfer-Encoding", "binary");
    response.setHeader("Content-Type", "application/force-download");
    response.setHeader("Content-Disposition",
            MessageFormatter
                    .format("attachment; fullyQualifiedPath={}; filename={};",
                            URLEncoder.encode(fileInfo.getFullyQualifiedPath(), "UTF-8"), filename)
                    .getMessage());
    response.setStatus(200);

    ServletOutputStream out = response.getOutputStream();

    byte[] b = new byte[1024];
    int numBytes = 0;
    while ((numBytes = in.read(b)) > 0) {
        out.write(b, 0, numBytes);
    }

    in.close();
    out.close();
    fs.close();
}

From source file:org.opencloudengine.garuda.backend.hdfs.HdfsServiceImpl.java

License:Open Source License

@Override
public boolean createDirectory(String path, String owner, String group, String permission) throws Exception {
    this.rootCheck(path);
    this.mustNotExists(path);

    try {//from   ww  w  .j a v a2 s  . co m
        FileSystem fs = fileSystemFactory.getFileSystem();
        Path fsPath = new Path(path);

        if (fs.mkdirs(fsPath)) {
            this._setOwner(path, owner, group);
            this._setPermission(path, permission);
            fs.close();
        }
        return true;
    } catch (IOException ex) {
        throw new ServiceException(" ?  .", ex);
    }
}