Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:GraphCompare.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job1 = Job.getInstance(conf, "graph compare 1");

    job1.setJarByClass(GraphCompare.class);
    job1.setMapperClass(Map1.class);
    job1.setReducerClass(Reduce1.class);
    job1.setMapOutputKeyClass(Text.class);
    job1.setMapOutputValueClass(IntWritable.class);
    job1.setOutputKeyClass(IntWritable.class);
    job1.setOutputValueClass(IntWritable.class);

    job1.setNumReduceTasks(30);//from w  w  w .j ava  2 s .c  o m
    job1.getConfiguration().set("mapreduce.input.fileinputformat.split.maxsize", "25000");

    FileInputFormat.addInputPath(job1, new Path(args[0]));
    FileInputFormat.addInputPath(job1, new Path(args[1]));
    FileOutputFormat.setOutputPath(job1, new Path(args[2]));

    job1.waitForCompletion(true);

    Job job2 = Job.getInstance(conf, "graph compare 2");
    job2.setJarByClass(GraphCompare.class);
    job2.setMapperClass(Map2.class);
    job2.setReducerClass(Reduce2.class);
    job2.setMapOutputKeyClass(IntWritable.class);
    job2.setMapOutputValueClass(IntWritable.class);
    job2.setOutputKeyClass(IntWritable.class);
    job2.setOutputValueClass(IntWritable.class);

    job2.setNumReduceTasks(30);

    FileInputFormat.addInputPath(job2, new Path(args[2]));
    FileOutputFormat.setOutputPath(job2, new Path(args[3]));

    job2.waitForCompletion(true);
}

From source file:FormatStorageSerDeTest.java

License:Open Source License

public void testFormatStorageSerDe() throws Throwable {
    try {//from ww  w .  j av  a2s. c om
        FormatStorageSerDe serDe = new FormatStorageSerDe();
        Configuration conf = new Configuration();
        Properties tbl = createProperties();

        serDe.initialize(conf, tbl);

        Record record = new Record(7);
        record.addValue(new FieldValue((byte) 1, (short) 0));
        record.addValue(new FieldValue((short) 2, (short) 1));
        record.addValue(new FieldValue((int) 3, (short) 2));
        record.addValue(new FieldValue((long) 4, (short) 3));
        record.addValue(new FieldValue((float) 5.5, (short) 4));
        record.addValue(new FieldValue((double) 6.6, (short) 5));
        record.addValue(new FieldValue("hello konten", (short) 6));

        Object[] refer = { new Byte((byte) 1), new Short((short) 2), new Integer(3), new Long(4),
                new Float(5.5), new Double(6.6), new String("hello konten") };

        deserializeAndSerialize(serDe, record, refer, 1);

    } catch (Throwable e) {
        e.printStackTrace();
        fail("get exception:" + e.getMessage());
    }
}

From source file:FormatStorageSerDeTest.java

License:Open Source License

public void testFormatStorageSerDeNullField() throws Throwable {
    try {/*from w  w w.j  a  v  a 2  s . c o m*/
        FormatStorageSerDe serDe = new FormatStorageSerDe();
        Configuration conf = new Configuration();
        Properties tbl = createProperties();

        serDe.initialize(conf, tbl);

        Record record = new Record(7);
        record.addValue(new FieldValue((byte) 1, (short) 0));
        record.addValue(new FieldValue((short) 2, (short) 1));
        record.addValue(new FieldValue((int) 3, (short) 2));
        record.addValue(new FieldValue(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, null, (short) 3));
        record.addValue(new FieldValue((float) 5.5, (short) 4));
        record.addValue(new FieldValue((double) 6.6, (short) 5));
        record.addValue(new FieldValue("hello konten", (short) 6));

        Object[] refer = { new Byte((byte) 1), new Short((short) 2), new Integer(3), new Long(4),
                new Float(5.5), new Double(6.6), new String("hello konten") };

        deserializeAndSerialize(serDe, record, refer, 0);

    } catch (Throwable e) {
        e.printStackTrace();
        fail("get exception:" + e.getMessage());
    }
}

From source file:FormatStorageSerDeTest.java

License:Open Source License

public void testFormatStorageSerDeLessField() throws Throwable {
    try {//from   w  w w.jav a  2s . c om
        FormatStorageSerDe serDe = new FormatStorageSerDe();
        Configuration conf = new Configuration();
        Properties tbl = createProperties();

        serDe.initialize(conf, tbl);

        Record record = new Record(3);
        record.addValue(new FieldValue((byte) 1, (short) 0));
        record.addValue(new FieldValue((short) 2, (short) 1));
        record.addValue(new FieldValue((int) 3, (short) 2));

        Object[] refer = { new Byte((byte) 1), new Short((short) 2), new Integer(3), new Long(4),
                new Float(5.5), new Double(6.6), new String("hello konten") };

        Object row = serDe.deserialize(record);

    } catch (Throwable e) {
        e.printStackTrace();
        fail("get exception:" + e.getMessage());
    }
}

From source file:TestBAM.java

License:Open Source License

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        System.out.printf("Usage: hadoop jar <name.jar> %s <input.bam> <output_directory>\n",
                TestBAM.class.getCanonicalName());
        System.exit(0);//  w w  w .j a  v  a 2 s. c  om
    }

    int res = ToolRunner.run(new Configuration(), new TestBAM(), args);
    System.exit(res);
}

From source file:TestCreateFormatStorageFile.java

License:Open Source License

public static void main(String[] argv) {
    if (argv.length != 2) {
        System.out.println("TestCreateFormatStorageFile count compress");
        return;//from ww  w.  j  a  v  a 2s .com
    }

    byte compress = Byte.valueOf(argv[1]);

    try {
        Configuration conf = new Configuration();

        FieldMap fieldMap = new FieldMap();
        fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 0));

        Head head = new Head();
        head.setCompress(compress);
        head.setFieldMap(fieldMap);

        String fileName = "MR_input/TestCreateFormatStorageFile";

        FormatDataFile fd = new FormatDataFile(conf);

        fd.create(fileName, head);

        FieldMap fieldMap2 = new FieldMap();
        fieldMap2.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 0));
        Head head2 = new Head();
        head2.setCompress(compress);
        head2.setFieldMap(fieldMap2);
        String fileName2 = "MR_input/TestCreateFormatStorageFile_2";
        FormatDataFile fd2 = new FormatDataFile(conf);

        fd2.create(fileName2, head2);

        long begin = System.currentTimeMillis();
        int count = Integer.valueOf(argv[0]);
        for (int i = 0; i < count; i++) {
            Record record = new Record((short) 1);
            record.addValue(new FieldValue((long) (4 + i), (short) 0));

            fd.addRecord(record);

            Record record2 = new Record((short) 1);
            record2.addValue(new FieldValue((long) (100 + i), (short) 0));
            fd2.addRecord(record2);
        }

        fd.close();
        fd2.close();

        long end = System.currentTimeMillis();
        String string = "write " + count + " record over, delay: " + ((end - begin) / 1000) + " s . file size:"
                + fd.getFileLen() + "\n";
        System.out.println(string);
    } catch (Exception e) {
        e.printStackTrace();
        System.out.println("get exception:" + e.getMessage());
    }
}

From source file:InputDataUsage.java

License:Apache License

public static void main(String[] argv) throws IOException {
    Configuration conf = new Configuration();
    Path in = new Path("tfidf-vectors/part-r-00000");
    HashMap<Text, VectorWritable> doc = InputData.vectorizedTextReader(conf, in);

    for (java.util.Map.Entry<Text, VectorWritable> entry : doc.entrySet()) {
        System.out.println("Document ID: " + entry.getKey());
        System.out.println("Vector: " + entry.getValue());
        System.out.println("Dimensions: " + entry.getValue().get().size());
        break;/* w ww.j a v a 2  s. co  m*/
    }
}

From source file:IndexWords.java

License:Apache License

public static void main(String[] args) throws Exception {
    filterFile("edges.txt", "ms2786edges.txt", "list.txt");
    formatFile("usethese.txt", args[0]);
    cleanFiles("./", args[1]);
    int res = ToolRunner.run(new Configuration(), new IndexWords(), args);
    System.exit(res);/*ww w.j  av  a2s  .  co  m*/
}

From source file:PopularURLs.java

License:Open Source License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = new Job(conf, "popularurls");
    job.setJarByClass(PopularURLs.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);
    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    job.waitForCompletion(true);/*  w  w w .  j  a v  a2 s.  c o  m*/
}

From source file:TestIFormatFile.java

License:Open Source License

static void writetest(String fileName, int num, boolean var, boolean compress) throws IOException {
    long time = System.currentTimeMillis();
    Configuration conf = new Configuration();
    IFormatDataFile fdf = new IFormatDataFile(conf);
    IHead head = new IHead();

    IFieldMap fieldMap = new IFieldMap();
    fieldMap.addFieldType(new IFieldType.IFieldByteType());
    fieldMap.addFieldType(new IFieldType.IFieldShortType());
    fieldMap.addFieldType(new IFieldType.IFieldIntType());
    fieldMap.addFieldType(new IFieldType.IFieldLongType());
    fieldMap.addFieldType(new IFieldType.IFieldFloatType());
    fieldMap.addFieldType(new IFieldType.IFieldDoubleType());
    if (var)
        fieldMap.addFieldType(new IFieldType.IFieldStringType());

    head.setFieldMap(fieldMap);/*w w w .  java2s.  c  om*/

    head.setPrimaryIndex((short) 2);
    head.setCompress((byte) (compress ? 1 : 0));
    head.setCompressStyle(ConstVar.LZOCompress);

    IUserDefinedHeadInfo udi = new IUserDefinedHeadInfo();
    udi.addInfo(0, fileName);

    fdf.create(fileName, head);
    for (int i = 0; i < num; i++) {
        IRecord record = new IRecord();
        record.addFieldValue(new IFieldValue((byte) i));
        record.addFieldValue(new IFieldValue((short) (2 * i)));
        record.addFieldValue(new IFieldValue(3 * i));
        record.addFieldValue(new IFieldValue((long) 4 * i));
        record.addFieldValue(new IFieldValue((float) 5 * i));
        record.addFieldValue(new IFieldValue((double) 6 * i, (short) 5));
        if (var) {
            StringBuffer sb = new StringBuffer(100);
            for (int j = 0; j < i % 100 + 1; j++) {
                sb.append("a");
            }
            record.addFieldValue(new IFieldValue(sb.toString()));
        }

        fdf.addRecord(record);
        if (i % 1000000 == 0)
            System.out.println(i);
    }
    fdf.close();
    System.out.println(num + "\trecords-->\twritetime:\t" + (System.currentTimeMillis() - time));
}