Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public <U> Class<? extends U> getClass(String name, Class<? extends U> defaultValue, Class<U> xface) 

Source Link

Document

Get the value of the name property as a Class implementing the interface specified by xface.

Usage

From source file:org.apache.avro.hadoop.io.AvroSerialization.java

License:Apache License

/**
 * Gets the data model class for de/seralization.
 *
 * @param conf The configuration.//from   w w w  .  j  a v a2 s. c o  m
 */
public static Class<? extends GenericData> getDataModelClass(Configuration conf) {
    return conf.getClass(CONF_DATA_MODEL, ReflectData.class, GenericData.class);
}

From source file:org.apache.avro.mapred.AvroJob.java

License:Apache License

/** Return the job's data model implementation class. */
public static Class<? extends GenericData> getDataModelClass(Configuration conf) {
    return (Class<? extends GenericData>) conf.getClass(CONF_DATA_MODEL, ReflectData.class, GenericData.class);
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormats.java

License:Apache License

/**
 * Creates object from class specified in the configuration under specified {@code
 * configClassKey}./*from   w  w w  . ja  va 2 s  . c om*/
 *
 * @param conf hadoop Configuration where is stored class name of returned object
 * @param configClassKey key for class name
 * @param defaultClass Default class if any result was not found under specified {@code
 *     configClassKey}
 * @param xface interface of given class
 * @return created object
 */
private static <T> T createInstanceFromConfig(Configuration conf, String configClassKey,
        @Nullable Class<? extends T> defaultClass, Class<T> xface) {
    try {
        String className = conf.get(configClassKey);
        Preconditions.checkArgument(className != null || defaultClass != null, String.format(
                "Configuration does not contains any value under %s key. Unable to initialize class instance from configuration. ",
                configClassKey));

        Class<? extends T> requiredClass = conf.getClass(configClassKey, defaultClass, xface);

        return requiredClass.getConstructor().newInstance();
    } catch (InstantiationException | IllegalAccessException | NoSuchMethodException
            | InvocationTargetException e) {
        throw new IllegalArgumentException(String.format(
                "Unable to create instance of object from configuration under key %s.", configClassKey), e);
    }
}

From source file:org.apache.blur.mapreduce.lib.BlurOutputFormat.java

License:Apache License

public static DocumentBufferStrategy getDocumentBufferStrategy(Configuration configuration) {
    Class<? extends DocumentBufferStrategy> clazz = configuration.getClass(BLUR_OUTPUT_DOCUMENT_BUFFER_STRATEGY,
            DocumentBufferStrategyFixedSize.class, DocumentBufferStrategy.class);
    try {/*from   w  ww . j  av a 2  s  . com*/
        Constructor<? extends DocumentBufferStrategy> constructor = clazz
                .getConstructor(new Class[] { Configuration.class });
        return constructor.newInstance(new Object[] { configuration });
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.flink.runtime.fs.hdfs.HadoopFileSystem.java

License:Apache License

@Override
public Class<?> getHadoopWrapperClassNameForFileSystem(String scheme) {
    Configuration hadoopConf = getHadoopConfiguration();
    Class<? extends org.apache.hadoop.fs.FileSystem> clazz;
    // We can activate this block once we drop Hadoop1 support (only hd2 has the getFileSystemClass-method)
    //      try {
    //         clazz = org.apache.hadoop.fs.FileSystem.getFileSystemClass(scheme, hadoopConf);
    //      } catch (IOException e) {
    //         LOG.info("Flink could not load the Hadoop File system implementation for scheme "+scheme);
    //         return null;
    //      }/*  www .j  av a 2  s .c  o  m*/
    clazz = hadoopConf.getClass("fs." + scheme + ".impl", null, org.apache.hadoop.fs.FileSystem.class);

    if (clazz != null && LOG.isDebugEnabled()) {
        LOG.debug("Flink supports " + scheme + " with the Hadoop file system wrapper, impl " + clazz);
    }
    return clazz;
}

From source file:org.apache.giraph.graph.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed {@link GraphPartitionerFactory}.
 *
 * @param <I> Vertex id//from w  ww . jav a 2s  .  c o  m
 * @param <V> Vertex data
 * @param <E> Edge data
 * @param <M> Message data
 * @param conf Configuration to check
 * @return User's graph partitioner
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> Class<? extends GraphPartitionerFactory<I, V, E, M>> getGraphPartitionerClass(
        Configuration conf) {
    return (Class<? extends GraphPartitionerFactory<I, V, E, M>>) conf.getClass(
            GiraphJob.GRAPH_PARTITIONER_FACTORY_CLASS, HashPartitionerFactory.class,
            GraphPartitionerFactory.class);
}

From source file:org.apache.giraph.graph.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed {@link VertexInputFormat}.
 *
 * @param <I> Vertex id//from ww w  . j a  v a  2 s.c  o  m
 * @param <V> Vertex data
 * @param <E> Edge data
 * @param <M> Message data
 * @param conf Configuration to check
 * @return User's vertex input format class
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static <I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable> Class<? extends VertexInputFormat<I, V, E, M>> getVertexInputFormatClass(
        Configuration conf) {
    return (Class<? extends VertexInputFormat<I, V, E, M>>) conf.getClass(GiraphJob.VERTEX_INPUT_FORMAT_CLASS,
            null, VertexInputFormat.class);
}

From source file:org.apache.giraph.graph.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed {@link VertexOutputFormat}.
 *
 * @param <I> Vertex id//from   w ww . j  a  v a 2s .  com
 * @param <V> Vertex data
 * @param <E> Edge data
 * @param conf Configuration to check
 * @return User's vertex output format class
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static <I extends WritableComparable, V extends Writable, E extends Writable> Class<? extends VertexOutputFormat<I, V, E>> getVertexOutputFormatClass(
        Configuration conf) {
    return (Class<? extends VertexOutputFormat<I, V, E>>) conf.getClass(GiraphJob.VERTEX_OUTPUT_FORMAT_CLASS,
            null, VertexOutputFormat.class);
}

From source file:org.apache.giraph.graph.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed {@link AggregatorWriter}.
 *
 * @param conf Configuration to check/*from  w ww  . j  a  va 2s.c om*/
 * @return User's aggregator writer class
 */
public static Class<? extends AggregatorWriter> getAggregatorWriterClass(Configuration conf) {
    return conf.getClass(GiraphJob.AGGREGATOR_WRITER_CLASS, TextAggregatorWriter.class, AggregatorWriter.class);
}

From source file:org.apache.giraph.graph.BspUtils.java

License:Apache License

/**
 * Get the user's subclassed {@link VertexCombiner}.
 *
 * @param <I> Vertex id/*from   w  ww .  java  2 s. co m*/
 * @param <M> Message data
 * @param conf Configuration to check
 * @return User's vertex combiner class
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static <I extends WritableComparable, M extends Writable> Class<? extends VertexCombiner<I, M>> getVertexCombinerClass(
        Configuration conf) {
    return (Class<? extends VertexCombiner<I, M>>) conf.getClass(GiraphJob.VERTEX_COMBINER_CLASS, null,
            VertexCombiner.class);
}