Example usage for org.apache.hadoop.fs FileSystem setConf

List of usage examples for org.apache.hadoop.fs FileSystem setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem setConf.

Prototype

@Override
    public void setConf(Configuration conf) 

Source Link

Usage

From source file:org.mrgeo.format.CsvOutputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {/* w  ww .j  a  v a 2s  . c o m*/
        String output = TestUtils.composeOutputDir(CsvOutputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(output, "testBasics.csv");
        testFile = fs.makeQualified(testFile);
        Path columns = new Path(testFile.toString() + ".columns");

        CsvOutputFormat.CsvRecordWriter writer = new CsvOutputFormat.CsvRecordWriter(columns, testFile);

        WritableGeometry f = GeometryFactory.createEmptyGeometry();

        f.setAttribute("string1", "foo");
        f.setAttribute("int1", "1");
        f.setAttribute("double1", "2.0");
        writer.write(new LongWritable(0), f);

        f.setAttribute("string1", "bar");
        f.setAttribute("int1", "3");
        f.setAttribute("double1", "4.0");
        writer.write(new LongWritable(1), f);

        writer.close(null);

        String input = TestUtils.composeInputDir(CsvOutputFormatTest.class);

        File csvBaselineFile = new File(input, "testBasics.csv");
        File csvOutputFile = new File(output, "testBasics.csv");
        TestUtils.compareTextFiles(csvBaselineFile.getAbsoluteFile(), csvOutputFile.getAbsoluteFile());

        File columnsBaselineFile = new File(input, "testBasics.csv.columns");
        File columnsOutputFile = new File(output, "testBasics.csv.columns");

        TestUtils.compareTextFiles(columnsBaselineFile.getAbsoluteFile(), columnsOutputFile.getAbsoluteFile());
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.OsmContentHandlerTest.java

License:Apache License

@Ignore
@Test// w w  w . ja  v a 2  s.c o  m
public void OfftestBenchmark() throws Exception {
    // @TODO this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {
        String input = TestUtils.composeInputDir(OsmContentHandlerTest.class);

        Configuration c = new Configuration();
        c.set("xml.content.handler", OsmContentHandler.class.getCanonicalName());
        c.set("xml.pattern", "node");
        c.set("xml.root.tag", "osm");

        fs.setConf(c);
        Path testFile = new Path(input, "sample.osm");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "place");

        FileSplit split = new FileSplit(testFile, 0, 64 * 1048576, null);
        RecordReader<LongWritable, Geometry> reader = new SaxInputFormat<LongWritable, Geometry>()
                .createRecordReader(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));

        int l = 0;
        long start = new Date().getTime();
        while (reader.nextKeyValue()) {
            l++;
        }
        long elapsed = new Date().getTime() - start;
        log.debug("ms per record: {} record count: {}", (double) elapsed / (double) l, l);
        Assert.assertEquals(1, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.OsmContentHandlerTest.java

License:Apache License

@Ignore
@Test/*from  w w w  .  ja v  a2  s  .c om*/
public void OfftestBenchmark2() throws Exception {
    // @TODO this class and its unit tests are a work in progress.
    try {

        Configuration c = new Configuration();
        c.set("xml.content.handler", OsmContentHandler.class.getCanonicalName());
        c.set("xml.pattern", "node");
        c.set("xml.root.tag", "osm");

        FileSystem fs = HadoopFileUtils.getFileSystem();
        fs.setConf(c);
        Path testFile = new Path("/user/jason.surratt/", "georgia.osm");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "place");

        FileSplit split = new FileSplit(testFile, 0, fs.getFileStatus(testFile).getLen(), null);
        RecordReader<LongWritable, Geometry> reader = new SaxInputFormat<LongWritable, Geometry>()
                .createRecordReader(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));

        FileOutputStream fos = new FileOutputStream("georgia-points.txt");
        BufferedWriter wr = new BufferedWriter(new OutputStreamWriter(fos));

        Formatter formatter = new Formatter(wr, Locale.US);

        int l = 0;
        long start = new Date().getTime();
        while (reader.nextKeyValue() && l < 10000) {
            l++;
            Geometry f = reader.getCurrentValue();
            if (f instanceof Point) {
                Point p = (Point) f;
                formatter.format("%.7f %.7f\n", p.getX(), p.getY());
            }
        }

        formatter.close();

        long elapsed = new Date().getTime() - start;
        log.debug("ms per record: {} record count: {}", (double) elapsed / (double) l, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    }
}

From source file:org.mrgeo.format.OsmInputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {/*from  w ww. j  av  a  2  s  .  c  o  m*/
        String input = TestUtils.composeInputDir(OsmInputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(input, "sample.osm");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "place");

        FileSplit split = new FileSplit(testFile, 0, fs.getFileStatus(testFile).getLen(), null);
        OsmInputFormat.OsmRecordReader reader = new OsmInputFormat.OsmRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        int l = 0;
        while (reader.nextKeyValue() && l < 10000) {
            l++;
        }
        Assert.assertEquals(6, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.PgQueryInputFormatTest.java

License:Apache License

public RecordReader<LongWritable, Geometry> openReader(Path p) throws IOException {
    FileSystem fs = new RawLocalFileSystem();
    try {//w  w w. j  a  v a  2s . com
        Job j = new Job(new Configuration());
        Configuration c = j.getConfiguration();
        fs.setConf(c);
        Path testFile = fs.makeQualified(p);

        c.set("mapred.input.dir", testFile.toString());
        PgQueryInputFormat format = new PgQueryInputFormat();
        HadoopVectorUtils.setupPgQueryInputFormat(j, "anthro", "anthro4server",
                "jdbc:postgresql://localhost:5432/anthro");
        InputSplit split = null;
        try {
            split = format.getSplits(j).get(0);
            return format.createRecordReader(split,
                    HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        } catch (Exception e) {
            return null;
        }
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.ShpInputFormatTest.java

License:Apache License

public RecordReader<LongWritable, Geometry> openReader(Path p) throws IOException, InterruptedException {
    Job j = new Job(new Configuration());
    Configuration c = j.getConfiguration();
    FileSystem fs = new RawLocalFileSystem();
    try {//w  ww  . ja v  a  2s. co m
        fs.setConf(c);
        Path testFile = fs.makeQualified(p);

        c.set("mapred.input.dir", testFile.toString());
        ShpInputFormat format = new ShpInputFormat();
        InputSplit split = format.getSplits(j).get(0);
        return format.createRecordReader(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.format.XmlInputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {/*from   ww  w  .j a  v  a 2s  .c o  m*/
        String input = TestUtils.composeInputDir(XmlInputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(input, "testBasics.xml");
        testFile = fs.makeQualified(testFile);

        c.set("xml.pattern", "node");

        FileSplit split = new FileSplit(testFile, 0, 50, null);
        XmlInputFormat.XmlRecordReader reader = new XmlInputFormat.XmlRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        int l = 0;
        while (reader.nextKeyValue()) {
            System.out.printf("k: %s v: %s\n", reader.getCurrentKey(), reader.getCurrentValue());
            l++;
        }

        split = new FileSplit(testFile, 50, fs.getFileStatus(testFile).getLen() - 50, null);
        reader = new XmlInputFormat.XmlRecordReader();
        reader.initialize(split, HadoopUtils.createTaskAttemptContext(c, new TaskAttemptID()));
        while (reader.nextKeyValue()) {
            System.out.printf("k: %s v: %s\n", reader.getCurrentKey(), reader.getCurrentValue());
            l++;
        }

        Assert.assertEquals(3, l);
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}

From source file:org.mrgeo.hdfs.vector.CsvOutputFormatTest.java

License:Apache License

@Test
@Category(UnitTest.class)
public void testBasics() throws Exception {
    // this class and its unit tests are a work in progress.
    FileSystem fs = new RawLocalFileSystem();
    try {//from  ww w .j  ava2 s  . c  o  m
        String output = TestUtils.composeOutputDir(CsvOutputFormatTest.class);

        Configuration c = new Configuration();
        fs.setConf(c);
        Path testFile = new Path(output, "testBasics.csv");
        testFile = fs.makeQualified(testFile);
        Path columns = new Path(testFile.toString() + ".columns");

        CsvOutputFormat.CsvRecordWriter writer = new CsvOutputFormat.CsvRecordWriter(columns, testFile);

        WritableGeometry f = GeometryFactory.createEmptyGeometry();

        f.setAttribute("string1", "foo");
        f.setAttribute("int1", "1");
        f.setAttribute("double1", "2.0");
        writer.write(new FeatureIdWritable(0), f);

        f.setAttribute("string1", "bar");
        f.setAttribute("int1", "3");
        f.setAttribute("double1", "4.0");
        writer.write(new FeatureIdWritable(1), f);

        writer.close(null);

        String input = TestUtils.composeInputDir(CsvOutputFormatTest.class);

        File csvBaselineFile = new File(input, "testBasics.csv");
        File csvOutputFile = new File(output, "testBasics.csv");
        TestUtils.compareTextFiles(csvBaselineFile.getAbsoluteFile(), csvOutputFile.getAbsoluteFile());

        File columnsBaselineFile = new File(input, "testBasics.csv.columns");
        File columnsOutputFile = new File(output, "testBasics.csv.columns");

        TestUtils.compareTextFiles(columnsBaselineFile.getAbsoluteFile(), columnsOutputFile.getAbsoluteFile());
    } catch (Exception e) {
        e.printStackTrace();
        throw e;
    } finally {
        fs.close();
    }
}