Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public <U> Class<? extends U> getClass(String name, Class<? extends U> defaultValue, Class<U> xface) 

Source Link

Document

Get the value of the name property as a Class implementing the interface specified by xface.

Usage

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static Class<? extends InputFormat> getInputFormat(final Configuration conf) {
    return conf.getClass(JOB_INPUT_FORMAT, null, InputFormat.class);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static Class<? extends MongoSplitter> getSplitterClass(final Configuration conf) {
    return conf.getClass(MONGO_SPLITTER_CLASS, null, MongoSplitter.class);
}

From source file:com.moz.fiji.mapreduce.bulkimport.impl.FijiBulkImporters.java

License:Apache License

/**
 * <p>Create an instance of the bulk importer specified by the
 * {@link org.apache.hadoop.conf.Configuration}.</p>
 *
 * The configuration would have stored the bulk importer
 * name only if it was configured by a FijiBulkImportJob, so don't try
 * calling this method with any old Configuration object.
 *
 * @param <K> The map input key for the bulk importer.
 * @param <V> The map input value for the bulk importer.
 * @param conf The job configuration./*from  w w w. ja v a  2s . co  m*/
 * @return a brand-spankin'-new FijiBulkImporter instance.
 * @throws IOException If the bulk importer cannot be loaded.
 */
@SuppressWarnings("unchecked")
public static <K, V> FijiBulkImporter<K, V> create(Configuration conf) throws IOException {
    final Class<? extends FijiBulkImporter> bulkImporterClass = conf
            .getClass(FijiConfKeys.FIJI_BULK_IMPORTER_CLASS, null, FijiBulkImporter.class);
    if (null == bulkImporterClass) {
        throw new IOException("Unable to load bulk importer class");
    }

    return ReflectionUtils.newInstance(bulkImporterClass, conf);
}

From source file:com.moz.fiji.mapreduce.gather.impl.GatherMapper.java

License:Apache License

/**
 * Initialize the gatherer instance to execute.
 *
 * @param conf the Configuration to use to initialize the gatherer instance.
 * @return a FijiGatherer instance./*from  w  w w.j a  v  a2  s. com*/
 */
protected FijiGatherer<K, V> createGatherer(Configuration conf) {
    @SuppressWarnings("unchecked")
    Class<? extends FijiGatherer<K, V>> gatherClass = (Class<? extends FijiGatherer<K, V>>) conf
            .getClass(FijiConfKeys.FIJI_GATHERER_CLASS, null, FijiGatherer.class);
    if (null == gatherClass) {
        LOG.error("Null " + FijiConfKeys.FIJI_GATHERER_CLASS + " in createGatherer()?");
        return null;
    }
    return ReflectionUtils.newInstance(gatherClass, conf);
}

From source file:com.moz.fiji.mapreduce.pivot.impl.FijiPivoters.java

License:Apache License

/**
 * Create an instance of {@link FijiPivoter} as specified from a given
 * {@link org.apache.hadoop.conf.Configuration}.
 *
 * @param conf The job configuration./*from   w  ww .j  a va  2  s .com*/
 * @return a new {@link FijiPivoter} instance.
 * @throws IOException if the class cannot be loaded.
 */
public static FijiPivoter create(Configuration conf) throws IOException {
    final Class<? extends FijiPivoter> tableMapperClass = conf.getClass(FijiConfKeys.FIJI_PIVOTER_CLASS, null,
            FijiPivoter.class);
    if (null == tableMapperClass) {
        throw new IOException("Unable to load pivoter class");
    }
    return ReflectionUtils.newInstance(tableMapperClass, conf);
}

From source file:com.moz.fiji.mapreduce.produce.impl.FijiProducers.java

License:Apache License

/**
 * Creates an instance of the producer specified by the
 * {@link org.apache.hadoop.conf.Configuration}.
 *
 * <p>The configuration would have stored the producer name only if
 * it was configured by a FijiProduceJobBuilder, so don't try calling this
 * method with any old Configuration object.</p>
 *
 * @param conf The job configuration.//  w w  w .  j  a v a  2s  .co m
 * @return a brand-spankin'-new FijiProducer instance.
 * @throws IOException If the producer name cannot be instantiated from the configuration.
 */
public static FijiProducer create(Configuration conf) throws IOException {
    final Class<? extends FijiProducer> producerClass = conf.getClass(FijiConfKeys.FIJI_PRODUCER_CLASS, null,
            FijiProducer.class);
    if (null == producerClass) {
        throw new IOException("Producer class could not be found in configuration.");
    }
    return ReflectionUtils.newInstance(producerClass, conf);
}

From source file:com.phantom.hadoop.examples.dancing.DistributedPentomino.java

License:Apache License

public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    if (args.length == 0) {
        System.out.println("Usage: pentomino <output> [-depth #] [-height #] [-width #]");
        ToolRunner.printGenericCommandUsage(System.out);
        return 2;
    }/*from   ww  w  .  j a  v a  2s  .  c  om*/
    // check for passed parameters, otherwise use defaults
    int width = conf.getInt(Pentomino.WIDTH, PENT_WIDTH);
    int height = conf.getInt(Pentomino.HEIGHT, PENT_HEIGHT);
    int depth = conf.getInt(Pentomino.DEPTH, PENT_DEPTH);
    for (int i = 0; i < args.length; i++) {
        if (args[i].equalsIgnoreCase("-depth")) {
            depth = Integer.parseInt(args[++i].trim());
        } else if (args[i].equalsIgnoreCase("-height")) {
            height = Integer.parseInt(args[++i].trim());
        } else if (args[i].equalsIgnoreCase("-width")) {
            width = Integer.parseInt(args[++i].trim());
        }
    }
    // now set the values within conf for M/R tasks to read, this
    // will ensure values are set preventing MAPREDUCE-4678
    conf.setInt(Pentomino.WIDTH, width);
    conf.setInt(Pentomino.HEIGHT, height);
    conf.setInt(Pentomino.DEPTH, depth);
    Class<? extends Pentomino> pentClass = conf.getClass(Pentomino.CLASS, OneSidedPentomino.class,
            Pentomino.class);
    int numMaps = conf.getInt(MRJobConfig.NUM_MAPS, DEFAULT_MAPS);
    Path output = new Path(args[0]);
    Path input = new Path(output + "_input");
    FileSystem fileSys = FileSystem.get(conf);
    try {
        Job job = new Job(conf);
        FileInputFormat.setInputPaths(job, input);
        FileOutputFormat.setOutputPath(job, output);
        job.setJarByClass(PentMap.class);

        job.setJobName("dancingElephant");
        Pentomino pent = ReflectionUtils.newInstance(pentClass, conf);
        pent.initialize(width, height);
        long inputSize = createInputDirectory(fileSys, input, pent, depth);
        // for forcing the number of maps
        FileInputFormat.setMaxInputSplitSize(job, (inputSize / numMaps));

        // the keys are the prefix strings
        job.setOutputKeyClass(Text.class);
        // the values are puzzle solutions
        job.setOutputValueClass(Text.class);

        job.setMapperClass(PentMap.class);
        job.setReducerClass(Reducer.class);

        job.setNumReduceTasks(1);

        return (job.waitForCompletion(true) ? 0 : 1);
    } finally {
        fileSys.delete(input, true);
    }
}

From source file:com.ricemap.spateDB.core.SpatialSite.java

License:Apache License

public static Class<? extends Shape> getShapeClass(Configuration conf) {
    return conf.getClass(ShapeClass, Point3d.class, Shape.class);
}

From source file:com.scaleoutsoftware.soss.hserver.DatasetInputFormat.java

License:Apache License

/**
 * Constructs an instance of the underlying input format to be wrapped by the dataset input format.
 *
 * @param configuration configuration//from www.  ja v a2s .  c  om
 * @return underlying input format
 */
@SuppressWarnings("unchecked")
private InputFormat<K, V> getUnderlyingInputFormat(Configuration configuration) throws IOException {
    if (underlyingInputFormat == null) {
        Class<? extends InputFormat> underlyingInputFormatClass = configuration
                .getClass(underlyingInputFormatPropertyName, null, InputFormat.class);
        if (underlyingInputFormatClass == null)
            throw new IOException("The underlying input format is not specified.");
        try {
            underlyingInputFormat = underlyingInputFormatClass.getDeclaredConstructor().newInstance();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    return underlyingInputFormat;
}

From source file:com.scaleoutsoftware.soss.hserver.JobScheduler.java

License:Apache License

private org.apache.hadoop.mapreduce.OutputCommitter createOutputCommitter(boolean newApiCommitter, JobID jobId,
        Configuration conf) throws IOException, InterruptedException, ClassNotFoundException {
    org.apache.hadoop.mapreduce.OutputCommitter committer = null;

    LOG.info("OutputCommitter set in config " + conf.get("mapred.output.committer.class"));

    if (newApiCommitter) {
        HadoopVersionSpecificCode hadoopVersionSpecificCode = HadoopVersionSpecificCode
                .getInstance(VersionInfo.getVersion(), conf);
        org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID = hadoopVersionSpecificCode
                .createTaskAttemptId(jobId, true, 0);
        org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = hadoopVersionSpecificCode
                .createTaskAttemptContext(conf, taskAttemptID);
        OutputFormat outputFormat = ReflectionUtils.newInstance(taskContext.getOutputFormatClass(), conf);
        committer = outputFormat.getOutputCommitter(taskContext);
    } else {// w ww .j a  va  2s .  c o m
        committer = ReflectionUtils.newInstance(conf.getClass("mapred.output.committer.class",
                FileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class), conf);
    }
    LOG.info("OutputCommitter is " + committer.getClass().getName());
    return committer;
}