Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:TestPerformance.java

License:Open Source License

private static void genifdf(String fileName, int recordnum, boolean var) throws IOException {
    Configuration conf = new Configuration();
    IFormatDataFile fdf = new IFormatDataFile(conf);
    IHead head = new IHead();

    IFieldMap fieldMap = new IFieldMap();
    fieldMap.addFieldType(new IFieldType.IFieldByteType());
    fieldMap.addFieldType(new IFieldType.IFieldShortType());
    fieldMap.addFieldType(new IFieldType.IFieldIntType());
    fieldMap.addFieldType(new IFieldType.IFieldLongType());
    fieldMap.addFieldType(new IFieldType.IFieldFloatType());
    fieldMap.addFieldType(new IFieldType.IFieldDoubleType());
    if (var)
        fieldMap.addFieldType(new IFieldType.IFieldStringType());

    head.setFieldMap(fieldMap);/*  w w  w . j  a va2s  .  com*/

    IUserDefinedHeadInfo udi = new IUserDefinedHeadInfo();
    udi.addInfo(0, fileName);

    fdf.create(fileName, head);
    for (int i = 0; i < recordnum; i++) {
        IRecord record = new IRecord();
        record.addFieldValue(new IFieldValue((byte) r.nextInt()));
        record.addFieldValue(new IFieldValue((short) (r.nextInt())));
        record.addFieldValue(new IFieldValue(r.nextInt()));
        record.addFieldValue(new IFieldValue(r.nextLong()));
        record.addFieldValue(new IFieldValue(r.nextFloat()));
        record.addFieldValue(new IFieldValue(r.nextDouble()));
        if (var) {
            StringBuffer sb = new StringBuffer(100);
            for (int j = 0; j < i % 100 + 1; j++) {
                sb.append("a");
            }
            record.addFieldValue(new IFieldValue(sb.toString()));
        }

        fdf.addRecord(record);
        if ((i + 1) % 10000000 == 0)
            System.out.println(i + 1);
    }
    fdf.close();
}

From source file:PT.java

License:Open Source License

public static void testreadcolumnseq(String filename, int num, boolean compress, String mode) throws Exception {

    Path path = new Path(filename);
    ArrayList<Short> vector = new ArrayList<Short>(10);

    if (mode == null || mode.equals("-1")) {
        for (short i = 0; i < 7; i++) {
            vector.add(i);/*from   w w w  .j  a  va 2 s.  c  o m*/
        }
    } else if (mode.equals("half")) {
        short x = 0;
        vector.add(x);
        x = 1;
        vector.add(x);
        x = 2;
        vector.add(x);
        x = 3;
        vector.add(x);
    } else {
        vector.add(Short.valueOf(mode));
    }

    Configuration conf = new Configuration();
    ColumnStorageClient client = new ColumnStorageClient(path, vector, conf);

    if (compress) {
        for (int i = 0; i < num; i++) {
            client.getNextRecord();
        }

    } else {

        for (int i = 0; i < num; i++) {
            client.getRecordByLine(i);
        }
    }

    client.close();

}

From source file:PT.java

License:Open Source License

public static void testreadcolumnrand(String filename, int num, int size, String mode) throws Exception {
    Path path = new Path(filename);
    ArrayList<Short> vector = new ArrayList<Short>();

    if (mode == null || mode.equals("-1")) {
        for (short i = 0; i < 7; i++) {
            vector.add(i);/*from  w w w  .  j  a  v  a 2  s .c  o  m*/
        }
    } else if (mode.equals("half")) {
        short x = 0;
        vector.add(x);
        x = 1;
        vector.add(x);
        x = 2;
        vector.add(x);
        x = 3;
        vector.add(x);
    } else {
        vector.add(Short.valueOf(mode));
    }

    Configuration conf = new Configuration();
    ColumnStorageClient client = new ColumnStorageClient(path, vector, conf);
    Random r = new Random();
    for (int i = 0; i < num; i++) {
        client.getRecordByLine(r.nextInt(size));
        if (i % 1000000 == 0) {
        }
    }
    client.close();

}

From source file:PT.java

License:Open Source License

private static FormatDataFile[] createfdfs(String filename, boolean compress) throws Exception {
    if (fs.exists(new Path(filename)))
        fs.delete(new Path(filename), true);
    String fn = filename.endsWith("/") ? filename : (filename + "/");

    String byteFileName = fn + "Column_Byte";
    String shortFileName = fn + "Column_Short";
    String intFileName = fn + "Column_Int";
    String longFileName = fn + "Column_Long";
    String floatFileName = fn + "Column_Float";
    String doubleFileName = fn + "Column_Double";
    String stringFileName = fn + "Column_String";
    FormatDataFile[] fdfs = new FormatDataFile[7];
    Configuration conf = new Configuration();

    FieldMap byteFieldMap = new FieldMap();
    byteFieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
    Head byteHead = new Head();
    byteHead.setCompress(compress ? (byte) 1 : (byte) 0);
    byteHead.setCompressStyle(ConstVar.LZOCompress);
    byteHead.setFieldMap(byteFieldMap);/*from   ww w  .  j av a2s.com*/
    FormatDataFile byteFD = new FormatDataFile(conf);
    byteFD.create(byteFileName, byteHead);

    FieldMap shortFieldMap = new FieldMap();
    shortFieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
    Head shortHead = new Head();
    shortHead.setCompress(compress ? (byte) 1 : (byte) 0);
    shortHead.setCompressStyle(ConstVar.LZOCompress);
    shortHead.setFieldMap(shortFieldMap);
    FormatDataFile shortFD = new FormatDataFile(conf);
    shortFD.create(shortFileName, shortHead);

    FieldMap intFieldMap = new FieldMap();
    intFieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
    Head intHead = new Head();
    intHead.setCompress(compress ? (byte) 1 : (byte) 0);
    intHead.setCompressStyle(ConstVar.LZOCompress);
    intHead.setFieldMap(intFieldMap);
    FormatDataFile intFD = new FormatDataFile(conf);
    intFD.create(intFileName, intHead);

    FieldMap longFieldMap = new FieldMap();
    longFieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
    Head longHead = new Head();
    longHead.setFieldMap(longFieldMap);
    longHead.setCompress(compress ? (byte) 1 : (byte) 0);
    longHead.setCompressStyle(ConstVar.LZOCompress);
    FormatDataFile longFD = new FormatDataFile(conf);
    longFD.create(longFileName, longHead);

    FieldMap floatFieldMap = new FieldMap();
    floatFieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
    Head floatHead = new Head();
    floatHead.setCompress(compress ? (byte) 1 : (byte) 0);
    floatHead.setCompressStyle(ConstVar.LZOCompress);
    floatHead.setFieldMap(floatFieldMap);
    FormatDataFile floatFD = new FormatDataFile(conf);
    floatFD.create(floatFileName, floatHead);

    FieldMap doubleFieldMap = new FieldMap();
    doubleFieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));
    Head doubleHead = new Head();
    doubleHead.setCompress(compress ? (byte) 1 : (byte) 0);
    doubleHead.setCompressStyle(ConstVar.LZOCompress);
    doubleHead.setFieldMap(doubleFieldMap);
    FormatDataFile doubleFD = new FormatDataFile(conf);
    doubleFD.create(doubleFileName, doubleHead);

    FieldMap strFieldMap = new FieldMap();
    strFieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6));
    Head strHead = new Head();
    strHead.setCompress(compress ? (byte) 1 : (byte) 0);
    strHead.setCompressStyle(ConstVar.LZOCompress);
    strHead.setFieldMap(strFieldMap);
    FormatDataFile strFD = new FormatDataFile(conf);
    strFD.create(stringFileName, strHead);

    fdfs[0] = byteFD;
    fdfs[1] = shortFD;
    fdfs[2] = intFD;
    fdfs[3] = longFD;
    fdfs[4] = floatFD;
    fdfs[5] = doubleFD;
    fdfs[6] = strFD;
    return fdfs;
}

From source file:PT.java

License:Open Source License

public static FormatDataFile createfdf(String filename, boolean compress, short keyindex) throws Exception {
    Head head = new Head();
    FieldMap fieldMap = new FieldMap();

    fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
    fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
    fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
    fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
    fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
    fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));
    fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6));

    head.setFieldMap(fieldMap);/*w  w w. java  2  s  . c  om*/
    head.setPrimaryIndex(keyindex);

    if (compress) {
        head.setCompress((byte) 1);
        head.setCompressStyle(ConstVar.LZOCompress);
    }

    Configuration conf = new Configuration();
    FormatDataFile fd = new FormatDataFile(conf);
    fd.create(filename, head);
    return fd;

}

From source file:removeDup.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: removeDup <in> [<in>...] <out>");
        System.exit(2);//from www. j a v  a 2s  .c om
    }
    Job job = new Job(conf, "removeDup");
    job.setJarByClass(removeDup.class);
    job.setMapperClass(Map.class);
    //job.setCombinerClass(Reduce.class);
    job.setReducerClass(Reduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:BasicCrawler.java

License:Apache License

private void writeToFile(String url, Set<WebURL> links) {
    Configuration conf = new Configuration();
    FileSystem fs1;//w  w  w . j a v a 2  s .  c  o  m
    try {
        //         fs1 = FileSystem.get(
        //               URI.create("src/main/resources/url_map.txt"), conf);
        //         OutputStream file3 = fs1.create(new Path("src/main/resources/url_map.txt"));
        bw.write(url + "," + links.toString() + "\n");
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:ReadAllTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    if (args.length < 2) {
        System.out.println("ReadAllTest: must supply the HDFS uri and file to read");
        System.exit(1);//from   ww  w.j  a v a2 s . c  o  m
    }
    String hdfsUri = args[0];
    String fileName = args[1];
    final Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(new URI(hdfsUri), conf);

    byte ORIGINAL[] = new byte[10];
    for (int i = 0; i < ORIGINAL.length; i++) {
        ORIGINAL[i] = (byte) i;
    }
    FSDataOutputStream out = fs.create(new Path(fileName), (short) 1);
    try {
        out.write(ORIGINAL);
    } finally {
        out.close();
    }
    byte input[] = new byte[ORIGINAL.length];
    FSDataInputStream in = fs.open(new Path(fileName));
    try {
        in.readFully(input);
    } finally {
        in.close();
    }
    in = fs.open(new Path(fileName));
    try {
        in.readFully(0, input);
    } finally {
        in.close();
    }
}

From source file:WriteFDFPerformance.java

License:Open Source License

static void writefdf(String filename, int num) throws Exception {
    Head head = new Head();
    FieldMap fieldMap = new FieldMap();

    fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
    fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
    fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
    fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
    fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
    fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));
    fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6));

    head.setFieldMap(fieldMap);//from w  w  w.ja va 2s. c o  m

    Configuration conf = new Configuration();
    FormatDataFile fd = new FormatDataFile(conf);
    fd.create(filename, head);

    for (int i = 0; i < num; i++) {
        Record record = new Record((short) 7);
        record.addValue(new FieldValue((byte) (1 + i), (short) 0));
        record.addValue(new FieldValue((short) (2 + i), (short) 1));
        record.addValue(new FieldValue((int) (3 + i), (short) 2));
        record.addValue(new FieldValue((long) (4 + i), (short) 3));
        record.addValue(new FieldValue((float) (5.5 + i), (short) 4));
        record.addValue(new FieldValue((double) (6.6 + i), (short) 5));
        record.addValue(new FieldValue(i + " " + str.substring(r.nextInt(str.length() / 2),
                str.length() / 2 - 1 + r.nextInt(str.length() / 2)), (short) 6));
        fd.addRecord(record);
    }

    fd.close();

}

From source file:WriteFDFPerformance.java

License:Open Source License

static void writefdfbyte() throws Exception {
    Head head = new Head();
    FieldMap fieldMap = new FieldMap();

    fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));

    head.setFieldMap(fieldMap);//from w ww .  j  a  va2s .c om

    Configuration conf = new Configuration();
    FormatDataFile fd = new FormatDataFile(conf);
    fd.create("test1/test", head);

    for (int i = -128; i <= 127; i++) {
        Record record = new Record((short) 1);
        record.addValue(new FieldValue((byte) (i), (short) 0));
        System.out.println(i);
        fd.addRecord(record);
    }

    fd.close();

}