Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public <U> Class<? extends U> getClass(String name, Class<? extends U> defaultValue, Class<U> xface) 

Source Link

Document

Get the value of the name property as a Class implementing the interface specified by xface.

Usage

From source file:com.google.cloud.bigtable.mapreduce.Import.java

License:Open Source License

/**
 * Create a {@link Filter} to apply to all incoming keys ({@link KeyValue KeyValues}) to
 * optionally not include in the job output
 * @param conf {@link Configuration} from which to load the filter
 * @return the filter to use for the task, or <tt>null</tt> if no filter to should be used
 * @throws IllegalArgumentException if the filter is misconfigured
 *///from  www. ja v a  2s.  c  om
public static Filter instantiateFilter(Configuration conf) {
    // get the filter, if it was configured    
    Class<? extends Filter> filterClass = conf.getClass(FILTER_CLASS_CONF_KEY, null, Filter.class);
    if (filterClass == null) {
        LOG.debug("No configured filter class, accepting all keyvalues.");
        return null;
    }
    LOG.debug("Attempting to create filter:" + filterClass);
    String[] filterArgs = conf.getStrings(FILTER_ARGS_CONF_KEY);
    ArrayList<byte[]> quotedArgs = toQuotedByteArrays(filterArgs);
    try {
        Method m = filterClass.getMethod("createFilterFromArguments", ArrayList.class);
        return (Filter) m.invoke(null, quotedArgs);
    } catch (IllegalAccessException e) {
        LOG.error("Couldn't instantiate filter!", e);
        throw new RuntimeException(e);
    } catch (SecurityException e) {
        LOG.error("Couldn't instantiate filter!", e);
        throw new RuntimeException(e);
    } catch (NoSuchMethodException e) {
        LOG.error("Couldn't instantiate filter!", e);
        throw new RuntimeException(e);
    } catch (IllegalArgumentException e) {
        LOG.error("Couldn't instantiate filter!", e);
        throw new RuntimeException(e);
    } catch (InvocationTargetException e) {
        LOG.error("Couldn't instantiate filter!", e);
        throw new RuntimeException(e);
    }
}

From source file:com.google.cloud.bigtable.mapreduce.Import.java

License:Open Source License

/**
 * Sets up the actual job./*w w w  . ja v  a  2  s.  c  o m*/
 * @param conf The current configuration.
 * @param args The command line parameters.
 * @return The newly created job.
 * @throws IOException When setting up the job fails.
 */
public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException {
    TableName tableName = TableName.valueOf(args[0]);
    conf.set(TABLE_NAME, tableName.getNameAsString());
    Path inputDir = new Path(args[1]);
    Job job = Job.getInstance(conf, conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));
    job.setJarByClass(Importer.class);
    FileInputFormat.setInputPaths(job, inputDir);
    job.setInputFormatClass(SequenceFileInputFormat.class);
    String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);

    // make sure we get the filter in the jars
    try {
        Class<? extends Filter> filter = conf.getClass(FILTER_CLASS_CONF_KEY, null, Filter.class);
        if (filter != null) {
            TableMapReduceUtil.addDependencyJars(conf, filter);
        }
    } catch (Exception e) {
        throw new IOException(e);
    }

    if (hfileOutPath != null) {
        job.setMapperClass(KeyValueImporter.class);
        try (Connection conn = ConnectionFactory.createConnection(conf);
                Table table = conn.getTable(tableName);
                RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
            job.setReducerClass(KeyValueSortReducer.class);
            Path outputDir = new Path(hfileOutPath);
            FileOutputFormat.setOutputPath(job, outputDir);
            job.setMapOutputKeyClass(ImmutableBytesWritable.class);
            job.setMapOutputValueClass(KeyValue.class);
            HFileOutputFormat2.configureIncrementalLoad(job, table, regionLocator);
            TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
                    com.google.common.base.Preconditions.class);
        }
    } else {
        // No reducers.  Just write straight to table.  Call initTableReducerJob
        // because it sets up the TableOutputFormat.
        job.setMapperClass(Importer.class);
        TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
        job.setNumReduceTasks(0);
    }
    return job;
}

From source file:com.hotels.corc.mapred.CorcInputFormat.java

License:Apache License

/**
 * Gets the ConverterFactory from the configuration
 *//*from  w ww  .java  2  s .  co  m*/
static ConverterFactory getConverterFactory(Configuration conf) {
    Class<? extends ConverterFactory> converterFactoryClass = conf.getClass(CONVERTER_FACTORY, null,
            ConverterFactory.class);
    if (converterFactoryClass == null) {
        throw new RuntimeException("ConverterFactory class was not set on the configuration");
    }
    LOG.debug("Got input ConverterFactory class from conf: {}", converterFactoryClass);
    return ReflectionUtils.newInstance(converterFactoryClass, conf);
}

From source file:com.inmobi.conduit.distcp.tools.util.DistCpUtils.java

License:Apache License

/**
 * Returns the class that implements a copy strategy. Looks up the implementation for
 * a particular strategy from distcp-default.xml
 *
 * @param conf - Configuration object/*from w ww  .j av  a  2 s  .c  om*/
 * @param options - Handle to input options
 * @return Class implementing the strategy specified in options.
 */
public static Class<? extends InputFormat> getStrategy(Configuration conf, DistCpOptions options) {
    String confLabel = "distcp." + options.getCopyStrategy().toLowerCase(Locale.getDefault())
            + ".strategy.impl";
    return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
}

From source file:com.inmobi.grill.driver.hive.HiveDriver.java

License:Apache License

@Override
public void configure(Configuration conf) throws GrillException {
    this.conf = new HiveConf(conf, HiveDriver.class);
    connectionClass = conf.getClass(GRILL_HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class,
            ThriftConnection.class);
    isEmbedded = (connectionClass.getName().equals(EmbeddedThriftConnection.class.getName()));
}

From source file:com.inmobi.grill.driver.jdbc.JDBCDriver.java

License:Apache License

protected void init(Configuration conf) throws GrillException {
    queryContextMap = new ConcurrentHashMap<QueryHandle, JdbcQueryContext>();
    rewriterCache = new ConcurrentHashMap<Class<? extends QueryRewriter>, QueryRewriter>();
    asyncQueryPool = Executors.newCachedThreadPool(new ThreadFactory() {
        @Override//  ww  w  .  j av  a2 s .c  o m
        public Thread newThread(Runnable runnable) {
            Thread th = new Thread(runnable);
            th.setName("grill-driver-jdbc-" + thid.incrementAndGet());
            return th;
        }
    });

    Class<? extends ConnectionProvider> cpClass = conf.getClass(JDBC_CONNECTION_PROVIDER,
            DataSourceConnectionProvider.class, ConnectionProvider.class);
    try {
        connectionProvider = cpClass.newInstance();
    } catch (Exception e) {
        LOG.error("Error initializing connection provider: " + e.getMessage(), e);
        throw new GrillException(e);
    }
}

From source file:com.inmobi.grill.driver.jdbc.JDBCDriver.java

License:Apache License

protected synchronized QueryRewriter getQueryRewriter(Configuration conf) throws GrillException {
    QueryRewriter rewriter;/*from w  ww.  jav a2 s.  co  m*/
    Class<? extends QueryRewriter> queryRewriterClass = conf.getClass(JDBC_QUERY_REWRITER_CLASS,
            DummyQueryRewriter.class, QueryRewriter.class);
    if (rewriterCache.containsKey(queryRewriterClass)) {
        rewriter = rewriterCache.get(queryRewriterClass);
    } else {
        try {
            rewriter = queryRewriterClass.newInstance();
        } catch (Exception e) {
            LOG.error("Unable to create rewriter object", e);
            throw new GrillException(e);
        }
        rewriterCache.put(queryRewriterClass, rewriter);
    }
    return rewriter;
}

From source file:com.knewton.mapreduce.util.SerializationUtils.java

License:Apache License

public static TDeserializer getDeserializerFromConf(Configuration conf) {
    Class<? extends TProtocolFactory> protocolFactoryClass = conf.getClass(
            PropertyConstants.SERIALIZATION_FACTORY_PARAMETER.txt, SERIALIZATION_FACTORY_PARAMETER_DEFAULT,
            TProtocolFactory.class);
    TProtocolFactory protocolFactory = ReflectionUtils.newInstance(protocolFactoryClass, conf);
    return new TDeserializer(protocolFactory);
}

From source file:com.marklogic.mapreduce.DocumentReader.java

License:Apache License

public DocumentReader(Configuration conf) {
    super(conf);//from  www.  j  a  v  a2 s.  co m
    valueClass = conf.getClass(INPUT_VALUE_CLASS, DatabaseDocument.class, Writable.class);
}

From source file:com.marklogic.mapreduce.KeyValueReader.java

License:Apache License

public KeyValueReader(Configuration conf) {
    super(conf);//  w  w w  .j av  a  2  s . c  o  m
    keyClass = conf.getClass(INPUT_KEY_CLASS, Text.class, WritableComparable.class);
    valueClass = conf.getClass(INPUT_VALUE_CLASS, Text.class, Writable.class);
}