Example usage for org.apache.hadoop.conf Configuration getClassByName

List of usage examples for org.apache.hadoop.conf Configuration getClassByName

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClassByName.

Prototype

public Class<?> getClassByName(String name) throws ClassNotFoundException 

Source Link

Document

Load a class by name.

Usage

From source file:org.apache.sqoop.mapreduce.db.SQLServerDBRecordReader.java

License:Apache License

/**
 * Load the SQLFailureHandler configured for use by the record reader.
 *///from w ww.j  a  v  a 2s. c  o  m
public void initialize(InputSplit inputSplit, TaskAttemptContext context)
        throws IOException, InterruptedException {
    // Load the configured connection failure handler
    Configuration conf = getConf();
    if (conf == null) {
        LOG.error("Configuration cannot be NULL");
    }

    Class connHandlerClass;
    try {
        String className = conf.get(SQLServerDBInputFormat.IMPORT_FAILURE_HANDLER_CLASS);

        // Get the class-name set in configuration
        connHandlerClass = conf.getClassByName(className);
    } catch (ClassNotFoundException ex) {
        LOG.error("Failed to find class: " + SQLServerDBInputFormat.IMPORT_FAILURE_HANDLER_CLASS);
        throw new IOException(ex);
    }

    // Verify handler class is a subclass of SQLFailureHandler
    if (!SQLFailureHandler.class.isAssignableFrom(connHandlerClass)) {
        String error = "A subclass of " + SQLFailureHandler.class.getName()
                + " is expected. Actual class set is: " + connHandlerClass.getName();
        LOG.error(error);
        throw new IOException(error);
    }
    LOG.trace("Using connection handler class: " + connHandlerClass);

    // Load the configured connection failure handler
    failureHandler = ReflectionUtils.newInstance((Class<? extends SQLFailureHandler>) connHandlerClass, conf);

    // Initialize the connection handler with using job configuration
    failureHandler.initialize(conf);

    // Get the split-by column
    splitColumn = getDBConf().getInputOrderBy();
    if (splitColumn == null || splitColumn.length() == 0) {
        throw new IOException("Split column must be set");
    }

    // Ensure the split-column is not escaped so that we can use it to search
    // in the record map
    int splitColLen = splitColumn.length();
    if (splitColLen > 2 && splitColumn.charAt(0) == '[' && splitColumn.charAt(splitColLen - 1) == ']') {
        splitColumn = splitColumn.substring(1, splitColLen - 1);
    }
}

From source file:org.apache.sqoop.mapreduce.MergeJob.java

License:Apache License

public boolean runMergeJob() throws IOException {
    Configuration conf = options.getConf();
    Job job = createJob(conf);/*w w  w .  java  2  s .c  o  m*/

    String userClassName = options.getClassName();
    if (null == userClassName) {
        // Shouldn't get here.
        throw new IOException("Record class name not specified with " + "--class-name.");
    }

    // Set the external jar to use for the job.
    String existingJar = options.getExistingJarName();
    if (existingJar != null) {
        // User explicitly identified a jar path.
        LOG.debug("Setting job jar to user-specified jar: " + existingJar);
        job.getConfiguration().set("mapred.jar", existingJar);
    } else {
        // Infer it from the location of the specified class, if it's on the
        // classpath.
        try {
            Class<? extends Object> userClass = conf.getClassByName(userClassName);
            if (null != userClass) {
                String userJar = Jars.getJarPathForClass(userClass);
                LOG.debug("Setting job jar based on user class " + userClassName + ": " + userJar);
                job.getConfiguration().set("mapred.jar", userJar);
            } else {
                LOG.warn("Specified class " + userClassName + " is not in a jar. "
                        + "MapReduce may not find the class");
            }
        } catch (ClassNotFoundException cnfe) {
            throw new IOException(cnfe);
        }
    }

    try {
        Path oldPath = new Path(options.getMergeOldPath());
        Path newPath = new Path(options.getMergeNewPath());

        Configuration jobConf = job.getConfiguration();
        FileSystem fs = FileSystem.get(jobConf);
        oldPath = oldPath.makeQualified(fs);
        newPath = newPath.makeQualified(fs);

        propagateOptionsToJob(job);

        FileInputFormat.addInputPath(job, oldPath);
        FileInputFormat.addInputPath(job, newPath);

        jobConf.set(MERGE_OLD_PATH_KEY, oldPath.toString());
        jobConf.set(MERGE_NEW_PATH_KEY, newPath.toString());
        jobConf.set(MERGE_KEY_COL_KEY, options.getMergeKeyCol());
        jobConf.set(MERGE_SQOOP_RECORD_KEY, userClassName);

        FileOutputFormat.setOutputPath(job, new Path(options.getTargetDir()));

        if (ExportJobBase.isSequenceFiles(jobConf, newPath)) {
            job.setInputFormatClass(SequenceFileInputFormat.class);
            job.setOutputFormatClass(SequenceFileOutputFormat.class);
            job.setMapperClass(MergeRecordMapper.class);
        } else {
            job.setMapperClass(MergeTextMapper.class);
            job.setOutputFormatClass(RawKeyTextOutputFormat.class);
        }

        jobConf.set("mapred.output.key.class", userClassName);
        job.setOutputValueClass(NullWritable.class);

        job.setReducerClass(MergeReducer.class);

        // Set the intermediate data types.
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(MergeRecord.class);

        // Make sure Sqoop and anything else we need is on the classpath.
        cacheJars(job, null);
        setJob(job);
        return this.runJob(job);
    } catch (InterruptedException ie) {
        throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }
}

From source file:org.apache.tez.engine.common.ConfigUtils.java

License:Apache License

public static Class<? extends CompressionCodec> getIntermediateOutputCompressorClass(Configuration conf,
        Class<DefaultCodec> defaultValue) {
    Class<? extends CompressionCodec> codecClass = defaultValue;
    String name = conf.get(TezJobConfig.TEZ_ENGINE_INTERMEDIATE_OUTPUT_COMPRESS_CODEC);
    if (name != null) {
        try {//from   www. j a v  a2  s  . c o m
            codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class);
        } catch (ClassNotFoundException e) {
            throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
        }
    }
    return codecClass;
}

From source file:org.apache.tez.engine.common.ConfigUtils.java

License:Apache License

public static Class<? extends CompressionCodec> getIntermediateInputCompressorClass(Configuration conf,
        Class<DefaultCodec> defaultValue) {
    Class<? extends CompressionCodec> codecClass = defaultValue;
    String name = conf.get(TezJobConfig.TEZ_ENGINE_INTERMEDIATE_INPUT_COMPRESS_CODEC);
    if (name != null) {
        try {/*from   ww  w.j  av  a2s  .c  o m*/
            codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class);
        } catch (ClassNotFoundException e) {
            throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
        }
    }
    return codecClass;
}

From source file:org.apache.tez.runtime.library.common.ConfigUtils.java

License:Apache License

public static Class<? extends CompressionCodec> getIntermediateOutputCompressorClass(Configuration conf,
        Class<DefaultCodec> defaultValue) {
    Class<? extends CompressionCodec> codecClass = defaultValue;
    String name = conf.get(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS_CODEC);
    if (name != null) {
        try {/*from  ww  w  . j  a  v a  2 s .  com*/
            codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class);
        } catch (ClassNotFoundException e) {
            throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
        }
    }
    return codecClass;
}

From source file:org.apache.tez.runtime.library.common.ConfigUtils.java

License:Apache License

public static Class<? extends CompressionCodec> getIntermediateInputCompressorClass(Configuration conf,
        Class<DefaultCodec> defaultValue) {
    Class<? extends CompressionCodec> codecClass = defaultValue;
    String name = conf.get(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS_CODEC);
    if (name != null) {
        try {/*from  w ww .  j a  v a2 s  .  c om*/
            codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class);
        } catch (ClassNotFoundException e) {
            throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
        }
    }
    return codecClass;
}

From source file:org.apache.tez.runtime.library.common.TezRuntimeUtils.java

License:Apache License

@SuppressWarnings("unchecked")
public static Combiner instantiateCombiner(Configuration conf, TaskContext taskContext) throws IOException {
    Class<? extends Combiner> clazz;
    String className = conf.get(TezRuntimeConfiguration.TEZ_RUNTIME_COMBINER_CLASS);
    if (className == null) {
        LOG.info("No combiner specified via " + TezRuntimeConfiguration.TEZ_RUNTIME_COMBINER_CLASS
                + ". Combiner will not be used");
        return null;
    }//from   www  . j  av  a 2s  .com
    LOG.info("Using Combiner class: " + className);
    try {
        clazz = (Class<? extends Combiner>) conf.getClassByName(className);
    } catch (ClassNotFoundException e) {
        throw new IOException("Unable to load combiner class: " + className);
    }

    Combiner combiner = null;

    Constructor<? extends Combiner> ctor;
    try {
        ctor = clazz.getConstructor(TaskContext.class);
        combiner = ctor.newInstance(taskContext);
    } catch (SecurityException e) {
        throw new IOException(e);
    } catch (NoSuchMethodException e) {
        throw new IOException(e);
    } catch (IllegalArgumentException e) {
        throw new IOException(e);
    } catch (InstantiationException e) {
        throw new IOException(e);
    } catch (IllegalAccessException e) {
        throw new IOException(e);
    } catch (InvocationTargetException e) {
        throw new IOException(e);
    }
    return combiner;
}

From source file:org.apache.tez.runtime.library.common.TezRuntimeUtils.java

License:Apache License

@SuppressWarnings("unchecked")
public static Partitioner instantiatePartitioner(Configuration conf) throws IOException {
    Class<? extends Partitioner> clazz;
    try {//w  w w  .j  a v a  2s.  c o m
        clazz = (Class<? extends Partitioner>) conf
                .getClassByName(conf.get(TezRuntimeConfiguration.TEZ_RUNTIME_PARTITIONER_CLASS));
    } catch (ClassNotFoundException e) {
        throw new IOException("Unable to find Partitioner class specified in config : "
                + conf.get(TezRuntimeConfiguration.TEZ_RUNTIME_PARTITIONER_CLASS), e);
    }

    LOG.info("Using partitioner class: " + clazz.getName());

    Partitioner partitioner = null;

    try {
        Constructor<? extends Partitioner> ctorWithConf = clazz.getConstructor(Configuration.class);
        partitioner = ctorWithConf.newInstance(conf);
    } catch (SecurityException e) {
        throw new IOException(e);
    } catch (NoSuchMethodException e) {
        try {
            // Try a 0 argument constructor.
            partitioner = clazz.newInstance();
        } catch (InstantiationException e1) {
            throw new IOException(e1);
        } catch (IllegalAccessException e1) {
            throw new IOException(e1);
        }
    } catch (IllegalArgumentException e) {
        throw new IOException(e);
    } catch (InstantiationException e) {
        throw new IOException(e);
    } catch (IllegalAccessException e) {
        throw new IOException(e);
    } catch (InvocationTargetException e) {
        throw new IOException(e);
    }
    return partitioner;
}

From source file:org.archive.bacon.io.SequenceFileStorage.java

License:Apache License

/**
 * Most of this method is cut/pasted from the Hadoop
 * SequenceFileOutputFormat.  The big difference is that we use the
 * key and value types given to this Pig storage class rather than
 * using the ones set by the job configuration.
 *//*from   www  .  j  a va2  s .c  o  m*/
public OutputFormat getOutputFormat() throws IOException {
    return new SequenceFileOutputFormat() {
        public RecordWriter getRecordWriter(TaskAttemptContext context)
                throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();

            Class keyClass, valueClass;
            try {
                keyClass = conf.getClassByName(keyType);
                valueClass = conf.getClassByName(valueType);
            } catch (ClassNotFoundException cnfe) {
                throw new IOException(cnfe);
            }

            // Instantiate null objects for the key and value types.
            // See getWritable() for their use.
            try {
                nullKey = (Writable) keyClass.newInstance();
                nullValue = (Writable) valueClass.newInstance();
            } catch (ReflectiveOperationException roe) {
                throw new IOException(roe);
            }

            CompressionCodec codec = null;
            CompressionType compressionType = CompressionType.NONE;
            if (getCompressOutput(context)) {
                // find the kind of compression to do
                compressionType = getOutputCompressionType(context);

                // find the right codec
                Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
                codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
            }
            // get the path of the temporary output file 
            Path file = getDefaultWorkFile(context, "");
            FileSystem fs = file.getFileSystem(conf);
            final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, keyClass, valueClass,
                    compressionType, codec, context);

            return new RecordWriter() {

                public void write(Object key, Object value) throws IOException {

                    out.append(key, value);
                }

                public void close(TaskAttemptContext context) throws IOException {
                    out.close();
                }
            };
        }
    };
}

From source file:org.archive.hadoop.pig.SequenceFileStorage.java

License:Apache License

/**
 * Most of this method is cut/pasted from the Hadoop
 * SequenceFileOutputFormat.  The big difference is that we use the
 * key and value types given to this Pig storage class rather than
 * using the ones set by the job configuration.
 *//*from  ww  w .j ava2s .c  o  m*/
public OutputFormat getOutputFormat() throws IOException {
    return new SequenceFileOutputFormat() {
        public RecordWriter getRecordWriter(TaskAttemptContext context)
                throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();

            Class keyClass, valueClass;
            try {
                keyClass = conf.getClassByName(keyType);
                valueClass = conf.getClassByName(valueType);
            } catch (ClassNotFoundException cnfe) {
                throw new IOException(cnfe);
            }

            // Instantiate null objects for the key and value types.
            // See getWritable() for their use.
            try {
                nullKey = (Writable) keyClass.newInstance();
                nullValue = (Writable) valueClass.newInstance();
            } catch (Exception roe) {
                throw new IOException(roe);
            }

            CompressionCodec codec = null;
            CompressionType compressionType = CompressionType.NONE;
            if (getCompressOutput(context)) {
                // find the kind of compression to do
                compressionType = getOutputCompressionType(context);

                // find the right codec
                Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
                codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
            }
            // get the path of the temporary output file 
            Path file = getDefaultWorkFile(context, "");
            FileSystem fs = file.getFileSystem(conf);
            final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, keyClass, valueClass,
                    compressionType, codec, context);

            return new RecordWriter() {

                public void write(Object key, Object value) throws IOException {

                    out.append(key, value);
                }

                public void close(TaskAttemptContext context) throws IOException {
                    out.close();
                }
            };
        }
    };
}