Example usage for org.apache.hadoop.conf Configuration setClass

List of usage examples for org.apache.hadoop.conf Configuration setClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClass.

Prototype

public void setClass(String name, Class<?> theClass, Class<?> xface) 

Source Link

Document

Set the value of the name property to the name of a theClass implementing the given interface xface.

Usage

From source file:org.apache.beam.sdk.io.hadoop.inputformat.integration.tests.HIFIOHBaseIT.java

License:Apache License

private Configuration getHBaseConfiguration() {
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.quorum", options.getServerIp());
    conf.set("hbase.zookeeper.property.clientPort", String.format("%d", options.getServerPort()));
    conf.set("hbase.mapreduce.inputtable", TABLE_NAME);
    conf.setClass(HadoopInputFormatIOConstants.INPUTFORMAT_CLASSNAME,
            org.apache.hadoop.hbase.mapreduce.TableInputFormat.class, Object.class);
    conf.setClass(HadoopInputFormatIOConstants.KEY_CLASS, ImmutableBytesWritable.class, Object.class);
    conf.setClass(HadoopInputFormatIOConstants.VALUE_CLASS, org.apache.hadoop.hbase.client.Result.class,
            Object.class);
    return conf;/*from  w ww.  j a v a2  s  .c  o  m*/
}

From source file:org.apache.beam.sdk.io.hadoop.inputformat.integration.tests.HIFIOHDFSIT.java

License:Apache License

private Configuration getHDFSConfiguration() {
    Configuration conf = new Configuration();
    conf.set("mapred.input.dir", StringUtils.escapeString(filePath));
    conf.setClass(HadoopInputFormatIOConstants.INPUTFORMAT_CLASSNAME, TextInputFormat.class, Object.class);
    conf.setClass(HadoopInputFormatIOConstants.KEY_CLASS, LongWritable.class, Object.class);
    conf.setClass(HadoopInputFormatIOConstants.VALUE_CLASS, Text.class, Object.class);
    return conf;//from w w  w  .ja v a2 s  . c  o  m
}

From source file:org.apache.beam.sdk.io.hadoop.inputformat.integration.tests.HIFIOWithPostgresIT.java

License:Apache License

private static Configuration getPostgresConfiguration() throws IOException {
    Configuration conf = new Configuration();
    conf.set("mapreduce.jdbc.driver.class", DRIVER_CLASS_PROPERTY);
    conf.set("mapreduce.jdbc.url", urlProperty);
    conf.set("mapreduce.jdbc.username", options.getUserName());
    conf.set("mapreduce.jdbc.password", options.getPassword());
    conf.set("mapreduce.jdbc.input.table.name", INPUT_TABLE_NAME_PROPERTY);
    conf.set("mapreduce.jdbc.input.query", "SELECT * FROM " + INPUT_TABLE_NAME_PROPERTY);
    conf.setClass(HadoopInputFormatIOConstants.INPUTFORMAT_CLASSNAME, DBInputFormat.class, InputFormat.class);
    conf.setClass(HadoopInputFormatIOConstants.KEY_CLASS, LongWritable.class, Object.class);
    conf.setClass(HadoopInputFormatIOConstants.VALUE_CLASS, DBInputWritable.class, Object.class);
    conf.setClass("mapreduce.jdbc.input.class", DBInputWritable.class, Object.class);
    return conf;//from w  w  w.j a v  a2 s .  com
}

From source file:org.apache.blur.mapreduce.lib.BlurOutputFormat.java

License:Apache License

public static void setDocumentBufferStrategy(Configuration configuration,
        Class<? extends DocumentBufferStrategy> documentBufferStrategyClass) {
    configuration.setClass(BLUR_OUTPUT_DOCUMENT_BUFFER_STRATEGY, documentBufferStrategyClass,
            DocumentBufferStrategy.class);
}

From source file:org.apache.blur.spark.BlurMRBulkLoadSparkProcessor.java

License:Apache License

@Override
protected Function2<JavaPairRDD<String, RowMutation>, Time, Void> getFunction() {
    return new Function2<JavaPairRDD<String, RowMutation>, Time, Void>() {
        @Override// w w w. j  a  va  2  s . co  m
        public Void call(JavaPairRDD<String, RowMutation> rdd, Time time) throws Exception {

            // Blur Table Details
            Iface client = getBlurClient();
            TableDescriptor tableDescriptor = client.describe(getBlurTableName());
            Configuration conf = new Configuration();
            // Blur specific Configuration
            conf.setClass(MAPREDUCE_PARTITIONER_CLASS, BlurPartitioner.class, Partitioner.class);
            conf.set(MAPRED_OUTPUT_COMMITTER_CLASS, BlurOutputCommitter.class.getName());

            // Partition RDD to match Blur Table Shard Count. Used Custom
            // Partitioner to channel correct BlurMutate to correct Shard.
            BlurSparkPartitioner blurSparkPartitioner = new BlurSparkPartitioner(
                    tableDescriptor.getShardCount());
            JavaPairRDD<Text, BlurMutate> flatMapToPair = rdd
                    .flatMapToPair(new PairFlatMapFunction<Tuple2<String, RowMutation>, Text, BlurMutate>() {
                        @Override
                        public Iterable<Tuple2<Text, BlurMutate>> call(Tuple2<String, RowMutation> tuple2)
                                throws Exception {
                            RowMutation rowMutation = tuple2._2;
                            final List<BlurMutate> result = new ArrayList<BlurMutate>();
                            List<RecordMutation> recordMutations = rowMutation.getRecordMutations();
                            String rowId = rowMutation.getRowId();
                            for (RecordMutation recordMutation : recordMutations) {
                                Record record = recordMutation.getRecord();
                                String family = record.getFamily();
                                String recordId = record.getRecordId();
                                List<BlurColumn> columns = toColumns(record.getColumns());

                                BlurRecord blurRecord = new BlurRecord();
                                blurRecord.setRowId(rowId);
                                blurRecord.setFamily(family);
                                blurRecord.setRecordId(recordId);
                                blurRecord.setColumns(columns);
                                result.add(new BlurMutate(MUTATE_TYPE.REPLACE, blurRecord));
                            }
                            return new Iterable<Tuple2<Text, BlurMutate>>() {
                                @Override
                                public Iterator<Tuple2<Text, BlurMutate>> iterator() {
                                    final Iterator<BlurMutate> iterator = result.iterator();
                                    return new Iterator<Tuple2<Text, BlurMutate>>() {

                                        @Override
                                        public boolean hasNext() {
                                            return iterator.hasNext();
                                        }

                                        @Override
                                        public Tuple2<Text, BlurMutate> next() {
                                            BlurMutate blurMutate = iterator.next();
                                            return new Tuple2<Text, BlurMutate>(
                                                    new Text(blurMutate.getRecord().getRowId()), blurMutate);
                                        }

                                        @Override
                                        public void remove() {

                                        }
                                    };
                                }
                            };
                        }

                        private List<BlurColumn> toColumns(List<Column> columns) {
                            List<BlurColumn> cols = new ArrayList<BlurColumn>();
                            for (Column column : columns) {
                                cols.add(new BlurColumn(column.getName(), column.getValue()));
                            }
                            return cols;
                        }
                    });

            final JavaPairRDD<Text, BlurMutate> pRdd = flatMapToPair.partitionBy(blurSparkPartitioner)
                    .persist(getStorageLevel());
            Job job = new Job(conf);
            BlurOutputFormat.setupJob(job, tableDescriptor);
            Path path = new Path(getOutputPath());
            FileSystem fileSystem = path.getFileSystem(conf);
            Path qualified = fileSystem.makeQualified(path);
            BlurOutputFormat.setOutputPath(job, qualified);
            setupBlurHadoopConfig(job.getConfiguration());
            // Write the RDD to Blur Table
            if (pRdd.count() > 0) {
                pRdd.saveAsNewAPIHadoopFile(tableDescriptor.getTableUri(), Text.class, BlurMutate.class,
                        BlurOutputFormat.class, job.getConfiguration());
                client.loadData(getBlurTableName(), qualified.toString());
            }
            return null;
        }
    };
}

From source file:org.apache.camel.component.hdfs2.HdfsOsgiHelper.java

License:Apache License

/**
 * By using this constructor it is possible to perform static initialization of {@link FileSystem}.
 *///from  ww w.  j  ava 2s  .c o  m
public HdfsOsgiHelper(Map<String, String> fileSystems) {
    try {
        // get bundle classloader for camel-hdfs2 bundle
        ClassLoader cl = getClass().getClassLoader();
        Configuration conf = new Configuration();
        for (String key : fileSystems.keySet()) {
            URI uri = URI.create(key);
            conf.setClass(String.format("fs.%s.impl", uri.getScheme()), cl.loadClass(fileSystems.get(key)),
                    FileSystem.class);
            FileSystem.get(uri, conf);
        }
    } catch (Exception e) {
        LOG.debug(e.getMessage());
    }
}

From source file:org.apache.crunch.hadoop.mapreduce.lib.output.CrunchMultipleOutputs.java

License:Apache License

/**
 * Adds a named output for the job.//www  . j ava  2 s  . co  m
 * <p/>
 *
 * @param job               job to add the named output
 * @param namedOutput       named output name, it has to be a word, letters
 *                          and numbers only, cannot be the word 'part' as
 *                          that is reserved for the default output.
 * @param outputFormatClass OutputFormat class.
 * @param keyClass          key class
 * @param valueClass        value class
 */
public static void addNamedOutput(Job job, String namedOutput, Class<? extends OutputFormat> outputFormatClass,
        Class<?> keyClass, Class<?> valueClass) {
    checkNamedOutputName(job, namedOutput, true);
    Configuration conf = job.getConfiguration();
    conf.set(MULTIPLE_OUTPUTS, conf.get(MULTIPLE_OUTPUTS, "") + " " + namedOutput);
    conf.setClass(MO_PREFIX + namedOutput + FORMAT, outputFormatClass, OutputFormat.class);
    conf.setClass(MO_PREFIX + namedOutput + KEY, keyClass, Object.class);
    conf.setClass(MO_PREFIX + namedOutput + VALUE, valueClass, Object.class);
}

From source file:org.apache.crunch.types.avro.AvroMode.java

License:Apache License

public void configureFactory(Configuration conf) {
    if (factory != null) {
        conf.setClass(propName, factory.getClass(), ReaderWriterFactory.class);
    }/*  w w  w. j  av  a  2s .  c  o m*/
}

From source file:org.apache.giraph.graph.TestEdgeListVertex.java

License:Apache License

@Before
public void setUp() {
    try {//w  w  w.j  a  va  2  s  .  co  m
        job = new GiraphJob("TestEdgeArrayVertex");
    } catch (IOException e) {
        throw new RuntimeException("setUp: Failed", e);
    }
    job.setVertexClass(IFDLEdgeListVertex.class);
    Configuration conf = job.getConfiguration();
    conf.setClass(GiraphJob.VERTEX_INDEX_CLASS, IntWritable.class, WritableComparable.class);
    conf.setClass(GiraphJob.VERTEX_VALUE_CLASS, FloatWritable.class, Writable.class);
    conf.setClass(GiraphJob.EDGE_VALUE_CLASS, DoubleWritable.class, Writable.class);
    conf.setClass(GiraphJob.MESSAGE_VALUE_CLASS, LongWritable.class, Writable.class);
    vertex = (IFDLEdgeListVertex) BspUtils
            .<IntWritable, FloatWritable, DoubleWritable, LongWritable>createVertex(conf);
}

From source file:org.apache.giraph.TestVertexTypes.java

License:Apache License

@Test
public void testMatchingType() throws SecurityException, NoSuchMethodException, NoSuchFieldException {
    @SuppressWarnings("rawtypes")
    GraphMapper<?, ?, ?, ?> mapper = new GraphMapper();
    Configuration conf = new Configuration();
    conf.setClass(GiraphJob.VERTEX_CLASS, GeneratedVertexMatch.class, BasicVertex.class);
    conf.setClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS, SimpleSuperstepVertexInputFormat.class,
            VertexInputFormat.class);
    conf.setClass(GiraphJob.VERTEX_COMBINER_CLASS, GeneratedVertexMatchCombiner.class, VertexCombiner.class);
    mapper.determineClassTypes(conf);//ww w  .j av a2s .co m
}