Example usage for org.apache.hadoop.conf Configuration setClass

List of usage examples for org.apache.hadoop.conf Configuration setClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClass.

Prototype

public void setClass(String name, Class<?> theClass, Class<?> xface) 

Source Link

Document

Set the value of the name property to the name of a theClass implementing the given interface xface.

Usage

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOReadTest.java

License:Apache License

/**
 * This test validates functionality of {@link
 * HadoopFormatIO.Read#withConfiguration(Configuration) withConfiguration(Configuration)} function
 * when value class is not provided by the user in configuration.
 *///from   w  w  w.  j  a v a2 s.  c o  m
@Test
public void testReadValidationFailsMissingValueClassInConf() {
    Configuration configuration = new Configuration();
    configuration.setClass("mapreduce.job.inputformat.class", EmployeeInputFormat.class, InputFormat.class);
    configuration.setClass("key.class", Text.class, Object.class);
    thrown.expect(IllegalArgumentException.class);
    HadoopFormatIO.<Text, Employee>read().withConfiguration(configuration);
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOReadTest.java

License:Apache License

@Test
public void testValidateConfigurationWithDBInputFormat() {
    Configuration conf = new Configuration();
    conf.setClass("key.class", LongWritable.class, Object.class);
    conf.setClass("value.class", Text.class, Object.class);
    conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class);

    thrown.expect(IllegalArgumentException.class);
    HadoopFormatIO.<String, String>read().withConfiguration(new SerializableConfiguration(conf).get())
            .withKeyTranslation(myKeyTranslate).withValueTranslation(myValueTranslate);
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOReadTest.java

License:Apache License

private static SerializableConfiguration loadTestConfiguration(Class<?> inputFormatClassName, Class<?> keyClass,
        Class<?> valueClass) {
    Configuration conf = new Configuration();
    conf.setClass("mapreduce.job.inputformat.class", inputFormatClassName, InputFormat.class);
    conf.setClass("key.class", keyClass, Object.class);
    conf.setClass("value.class", valueClass, Object.class);
    return new SerializableConfiguration(conf);
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOSequenceFileTest.java

License:Apache License

private static Configuration getConfiguration(Class<?> outputFormatClass, Class<?> keyClass,
        Class<?> valueClass, String path, Integer reducersCount, String jobId) {
    Configuration conf = new Configuration();

    conf.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, outputFormatClass, OutputFormat.class);
    conf.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, keyClass, Object.class);
    conf.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, valueClass, Object.class);
    conf.setInt(HadoopFormatIO.NUM_REDUCES, reducersCount);
    conf.set(HadoopFormatIO.OUTPUT_DIR, path);
    conf.set(HadoopFormatIO.JOB_ID, jobId);
    return conf;//  w w w.  j av a2s .  c om
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOTest.java

License:Apache License

private static Configuration loadTestConfiguration(Class<?> outputFormatClassName, Class<?> keyClass,
        Class<?> valueClass) {
    Configuration conf = new Configuration();
    conf.setClass(MRJobConfig.OUTPUT_FORMAT_CLASS_ATTR, outputFormatClassName, OutputFormat.class);
    conf.setClass(MRJobConfig.OUTPUT_KEY_CLASS, keyClass, Object.class);
    conf.setClass(MRJobConfig.OUTPUT_VALUE_CLASS, valueClass, Object.class);
    conf.setInt(MRJobConfig.NUM_REDUCES, REDUCERS_COUNT);
    conf.set(MRJobConfig.ID, String.valueOf(1));
    return conf;/*  ww  w .j  a  v a  2s  .c  om*/
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOTest.java

License:Apache License

/**
 * This test validates functionality of {@link
 * HadoopFormatIO.Write.Builder#withConfiguration(Configuration) withConfiguration(Configuration)}
 * function when Hadoop OutputFormat class is not provided by the user in configuration.
 *//*from  w  w  w .j  ava  2  s.  c  o  m*/
@Test
public void testWriteValidationFailsMissingOutputFormatInConf() {
    Configuration configuration = new Configuration();
    configuration.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, Text.class, Object.class);
    configuration.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, Employee.class, Object.class);

    HadoopFormatIO.Write<Text, Employee> writeWithWrongConfig = HadoopFormatIO.<Text, Employee>write()
            .withConfiguration(configuration).withPartitioning()
            .withExternalSynchronization(new HDFSSynchronization(getLocksDirPath()));

    p.apply(Create.of(TestEmployeeDataSet.getEmployeeData()))
            .setTypeDescriptor(TypeDescriptors.kvs(new TypeDescriptor<Text>() {
            }, new TypeDescriptor<Employee>() {
            })).apply("Write", writeWithWrongConfig);

    thrown.expect(Pipeline.PipelineExecutionException.class);
    thrown.expectMessage("Configuration must contain \"mapreduce.job.outputformat.class\"");

    p.run().waitUntilFinish();
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOTest.java

License:Apache License

/**
 * This test validates functionality of {@link
 * HadoopFormatIO.Write.Builder#withConfiguration(Configuration) withConfiguration(Configuration)}
 * function when key class is not provided by the user in configuration.
 *///from   w  w w  .  j av  a  2 s .c o  m
@Test
public void testWriteValidationFailsMissingKeyClassInConf() {
    Configuration configuration = new Configuration();
    configuration.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, TextOutputFormat.class, OutputFormat.class);
    configuration.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, Employee.class, Object.class);

    runValidationPipeline(configuration);

    thrown.expect(Pipeline.PipelineExecutionException.class);
    thrown.expectMessage("Configuration must contain \"mapreduce.job.output.key.class\"");

    p.run().waitUntilFinish();
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOTest.java

License:Apache License

/**
 * This test validates functionality of {@link
 * HadoopFormatIO.Write.Builder#withConfiguration(Configuration) withConfiguration(Configuration)}
 * function when value class is not provided by the user in configuration.
 *//*from   w  w  w .jav a  2  s .  co m*/
@Test
public void testWriteValidationFailsMissingValueClassInConf() {
    Configuration configuration = new Configuration();
    configuration.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, TextOutputFormat.class, OutputFormat.class);
    configuration.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, Text.class, Object.class);

    runValidationPipeline(configuration);

    thrown.expect(Pipeline.PipelineExecutionException.class);
    thrown.expectMessage("Configuration must contain \"mapreduce.job.output.value.class\"");

    p.run().waitUntilFinish();
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOTest.java

License:Apache License

/**
 * This test validates functionality of {@link
 * HadoopFormatIO.Write.Builder#withConfiguration(Configuration) withConfiguration(Configuration)}
 * function when job id is not provided by the user in configuration.
 *///from ww w.  j  a v a 2 s.com
@Test
public void testWriteValidationFailsMissingJobIDInConf() {
    Configuration configuration = new Configuration();
    configuration.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, TextOutputFormat.class, OutputFormat.class);
    configuration.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, Text.class, Object.class);
    configuration.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, Employee.class, Object.class);
    configuration.set(HadoopFormatIO.OUTPUT_DIR, tmpFolder.getRoot().getAbsolutePath());

    runValidationPipeline(configuration);

    thrown.expect(Pipeline.PipelineExecutionException.class);
    thrown.expectMessage("Configuration must contain \"mapreduce.job.id\"");

    p.run().waitUntilFinish();
}

From source file:org.apache.beam.sdk.io.hadoop.inputformat.HadoopInputFormatIOIT.java

License:Apache License

private static void setupHadoopConfiguration(PostgresIOTestPipelineOptions options) {
    Configuration conf = new Configuration();
    DBConfiguration.configureDB(conf, "org.postgresql.Driver", DatabaseTestHelper.getPostgresDBUrl(options),
            options.getPostgresUsername(), options.getPostgresPassword());
    conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tableName);
    conf.setStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, "id", "name");
    conf.set(DBConfiguration.INPUT_ORDER_BY_PROPERTY, "id ASC");
    conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, TestRowDBWritable.class, DBWritable.class);

    conf.setClass("key.class", LongWritable.class, Object.class);
    conf.setClass("value.class", TestRowDBWritable.class, Object.class);
    conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class);

    hadoopConfiguration = new SerializableConfiguration(conf);
}