Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:HBaseAppender.java

License:Apache License

/**
 * Public setter so property can be configured in log4j.properties
 *//*w w w .j a  v a  2  s  .c o m*/
public void setTableName(String tableName) {
    this.tableName = tableName;
    Configuration conf = HBaseConfiguration.create(new Configuration());
    try {
        this.htable = new HTable(conf, tableName);
    } catch (IOException e) {
        errorHandler.error("Error opening HBase table", e, ErrorCode.GENERIC_FAILURE);
    }
}

From source file:PT1.java

License:Open Source License

static void testreadcolumnseq(String filename, int num, boolean compress, String mode) throws Exception {

    Path path = new Path(filename);
    ArrayList<Short> vector = new ArrayList<Short>(10);

    if (mode == null || mode.equals("-1")) {
        for (short i = 0; i < 7; i++) {
            vector.add(i);/*from w w w  .jav  a 2 s  .c o  m*/
        }
    } else if (mode.equals("half")) {
        short x = 0;
        vector.add(x);
        x = 1;
        vector.add(x);
        x = 2;
        vector.add(x);
        x = 3;
        vector.add(x);
    } else {
        vector.add(Short.valueOf(mode));
    }

    Configuration conf = new Configuration();
    ColumnStorageClient client = new ColumnStorageClient(path, vector, conf);

    if (compress) {
        for (int i = 0; i < num; i++) {
            client.getNextRecord();
        }

    } else {

        for (int i = 0; i < num; i++) {
            client.getRecordByLine(i);
        }
    }

    client.close();

}

From source file:PT1.java

License:Open Source License

static void testreadcolumnrand(String filename, int num, int size, String mode) throws Exception {
    Path path = new Path(filename);
    ArrayList<Short> vector = new ArrayList<Short>();

    if (mode == null || mode.equals("-1")) {
        for (short i = 0; i < 7; i++) {
            vector.add(i);//w w  w .  ja v a 2 s  .c  om
        }
    } else if (mode.equals("half")) {
        short x = 0;
        vector.add(x);
        x = 1;
        vector.add(x);
        x = 2;
        vector.add(x);
        x = 3;
        vector.add(x);
    } else {
        vector.add(Short.valueOf(mode));
    }

    Configuration conf = new Configuration();
    ColumnStorageClient client = new ColumnStorageClient(path, vector, conf);
    Random r = new Random();
    for (int i = 0; i < num; i++) {
        client.getRecordByLine(r.nextInt(size));
        if (i % 1000000 == 0) {
        }
    }
    client.close();

}

From source file:PT1.java

License:Open Source License

private static FormatDataFile createfdf(String filename, boolean compress) throws Exception {
    Head head = new Head();
    FieldMap fieldMap = new FieldMap();

    fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0));
    fieldMap.addField(new Field(ConstVar.FieldType_Short, ConstVar.Sizeof_Short, (short) 1));
    fieldMap.addField(new Field(ConstVar.FieldType_Int, ConstVar.Sizeof_Int, (short) 2));
    fieldMap.addField(new Field(ConstVar.FieldType_Long, ConstVar.Sizeof_Long, (short) 3));
    fieldMap.addField(new Field(ConstVar.FieldType_Float, ConstVar.Sizeof_Float, (short) 4));
    fieldMap.addField(new Field(ConstVar.FieldType_Double, ConstVar.Sizeof_Double, (short) 5));
    fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6));

    head.setFieldMap(fieldMap);//from  w  w w  .j a v a 2s .  c o  m
    if (compress) {
        head.setCompress((byte) 1);
        head.setCompressStyle(ConstVar.LZOCompress);
    }

    Configuration conf = new Configuration();
    FormatDataFile fd = new FormatDataFile(conf);
    fd.create(filename, head);
    return fd;

}

From source file:BP.java

License:Apache License

public static void main(final String[] args) throws Exception {
    final int result = ToolRunner.run(new Configuration(), new BP(), args);

    System.exit(result);//from  w  w w. j a  v  a2  s  .c om
}

From source file:StressTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    // conf.addResource(new Path("../../env_vars"));

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: TweetCategorizer <in> <out>");
        System.exit(2);//from   w w w .j  a  v a2  s  . c  o  m
    }

    Job job = new Job(conf, "categorize tweets");
    job.setJarByClass(TweetCategorizer.class);
    job.setMapperClass(TokenizerMapper.class);
    // job.setCombinerClass(IntSumReducer.class);
    // job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:DisplayFuzzyKMeans.java

License:Apache License

public static void main(String[] args) throws Exception {
    DistanceMeasure measure = new ManhattanDistanceMeasure();

    Path samples = new Path("samples");
    Path output = new Path("output");
    Configuration conf = new Configuration();
    HadoopUtil.delete(conf, output);/*from w w  w  .  j a  v a2s. c  om*/
    HadoopUtil.delete(conf, samples);
    RandomUtils.useTestSeed();
    DisplayClustering.generateSamples();
    writeSampleData(samples);
    boolean runClusterer = true;
    int maxIterations = 10;
    float threshold = 0.001F;
    float m = 1.1F;
    if (runClusterer) {
        runSequentialFuzzyKClusterer(conf, samples, output, measure, maxIterations, m, threshold);
    } else {
        int numClusters = 3;
        runSequentialFuzzyKClassifier(conf, samples, output, measure, numClusters, maxIterations, m, threshold);
    }
    new DisplayFuzzyKMeans();
}

From source file:PerTaskTally.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);/*from  ww  w . j  ava  2 s.co m*/
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(PerMapTally.class);
    job.setMapperClass(TokenizerMapper.class);
    // Aniket changes starts
    /* Here the partitioner is being called*/
    job.setPartitionerClass(WordPartitioner.class);
    // Aniket changes ends
    // Part 4 Aniket changes starts
    /* Here I am just disabling the combiner */
    // job.setCombinerClass(IntSumReducer.class);
    // Part 4 Aniket changes ends
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:WordCount_PerTaskTally.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();

    GenericOptionsParser parser = new GenericOptionsParser(conf, args);
    String[] otherArgs = parser.getRemainingArgs();

    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);// www.j  ava2s. c om
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount_PerTaskTally.class);
    job.setMapperClass(TokenizerMapper.class);

    // disable combiner

    // job.setCombinerClass(IntSumReducer.class);

    job.setPartitionerClass(WordPartitioner.class);
    job.setNumReduceTasks(5);

    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:SleepJobWithArray.java

License:Apache License

public static void main(String[] args) throws Exception {
    int res = ToolRunner.run(new Configuration(), new SleepJobWithArray(), args);
    System.exit(res);// ww w  . jav  a2 s .  c o m
}