Example usage for org.apache.hadoop.conf Configuration setClass

List of usage examples for org.apache.hadoop.conf Configuration setClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setClass.

Prototype

public void setClass(String name, Class<?> theClass, Class<?> xface) 

Source Link

Document

Set the value of the name property to the name of a theClass implementing the given interface xface.

Usage

From source file:co.cask.cdap.internal.app.runtime.batch.StreamDecoderDetectionTest.java

License:Apache License

@Test
public void testDecoderDetection() throws IOException {
    Configuration hConf = new Configuration();

    hConf.setClass(Job.MAP_CLASS_ATTR, IdentityMapper.class, Mapper.class);
    StreamInputFormat.inferDecoderClass(hConf,
            MapReduceRuntimeService.getInputValueType(hConf, Void.class, getMapperTypeToken(hConf)));
    Assert.assertSame(IdentityStreamEventDecoder.class, StreamInputFormat.getDecoderClass(hConf));

    hConf.setClass(Job.MAP_CLASS_ATTR, NoTypeMapper.class, Mapper.class);
    StreamInputFormat.inferDecoderClass(hConf,
            MapReduceRuntimeService.getInputValueType(hConf, StreamEvent.class, getMapperTypeToken(hConf)));
    Assert.assertSame(IdentityStreamEventDecoder.class, StreamInputFormat.getDecoderClass(hConf));

    hConf.setClass(Job.MAP_CLASS_ATTR, TextMapper.class, Mapper.class);
    StreamInputFormat.inferDecoderClass(hConf,
            MapReduceRuntimeService.getInputValueType(hConf, Void.class, getMapperTypeToken(hConf)));
    Assert.assertSame(TextStreamEventDecoder.class, StreamInputFormat.getDecoderClass(hConf));

    try {/*from   www  .  j  a  v a 2  s  . c  o  m*/
        hConf.setClass(Job.MAP_CLASS_ATTR, InvalidTypeMapper.class, Mapper.class);
        StreamInputFormat.inferDecoderClass(hConf,
                MapReduceRuntimeService.getInputValueType(hConf, Void.class, getMapperTypeToken(hConf)));
        Assert.fail("Expected Exception");
    } catch (IllegalArgumentException e) {
        // Expected
    }
}

From source file:co.cask.cdap.internal.app.runtime.spark.AbstractSparkContext.java

License:Apache License

/**
 * Sets the input {@link Dataset} with splits in the {@link Configuration}
 *
 * @param datasetName the name of the {@link Dataset} to read from
 * @return updated {@link Configuration}
 * @throws {@link IllegalArgumentException} if the {@link Dataset} to read is not {@link BatchReadable}
 *//*w w w .ja  v a2s.co  m*/
Configuration setInputDataset(String datasetName) {
    Configuration hConf = new Configuration(getHConf());
    Dataset dataset = basicSparkContext.getDataSet(datasetName);
    List<Split> inputSplits;
    if (dataset instanceof BatchReadable) {
        BatchReadable curDataset = (BatchReadable) dataset;
        inputSplits = curDataset.getSplits();
    } else {
        throw new IllegalArgumentException("Failed to read dataset " + datasetName
                + ". The dataset does not implement" + " BatchReadable");
    }
    hConf.setClass(MRJobConfig.INPUT_FORMAT_CLASS_ATTR, SparkDatasetInputFormat.class, InputFormat.class);
    hConf.set(SparkDatasetInputFormat.HCONF_ATTR_INPUT_DATASET, datasetName);
    hConf.set(SparkContextConfig.HCONF_ATTR_INPUT_SPLIT_CLASS, inputSplits.get(0).getClass().getName());
    hConf.set(SparkContextConfig.HCONF_ATTR_INPUT_SPLITS, new Gson().toJson(inputSplits));
    return hConf;
}

From source file:co.cask.cdap.internal.app.runtime.spark.AbstractSparkContext.java

License:Apache License

/**
 * Sets the output {@link Dataset} with splits in the {@link Configuration}
 *
 * @param datasetName the name of the {@link Dataset} to write to
 * @return updated {@link Configuration}
 *///from w  ww.j  a v  a  2s .  c  o  m
Configuration setOutputDataset(String datasetName) {
    Configuration hConf = new Configuration(getHConf());
    hConf.set(SparkDatasetOutputFormat.HCONF_ATTR_OUTPUT_DATASET, datasetName);
    hConf.setClass(MRJobConfig.OUTPUT_FORMAT_CLASS_ATTR, SparkDatasetOutputFormat.class, OutputFormat.class);
    return hConf;
}

From source file:com.ailk.oci.ocnosql.tools.load.csvbulkload.CsvBulkImportUtil.java

License:Apache License

/**
 * Configure an {@link ImportPreUpsertKeyValueProcessor} for a CSV bulk import job.
 *
 * @param conf job configuration/*from  w  w w .j  av a 2s . c o m*/
 * @param processorClass class to be used for performing pre-upsert processing
 */
public static void configurePreUpsertProcessor(Configuration conf,
        Class<? extends ImportPreUpsertKeyValueProcessor> processorClass) {
    conf.setClass(PhoenixCsvToKeyValueMapper.UPSERT_HOOK_CLASS_CONFKEY, processorClass,
            ImportPreUpsertKeyValueProcessor.class);
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceProfileTest.java

License:Apache License

/**
 * fs is inconsistent between prod and temp.
 * @throws Exception if failed//from   www  . ja v a  2s  .  c o m
 */
@Test(expected = IOException.class)
public void convert_inconsistent_fs() throws Exception {
    Configuration conf = new Configuration();
    conf.setClass("fs.mock.impl", MockFs.class, FileSystem.class);
    Map<String, String> attributes = new HashMap<>();
    attributes.put(KEY_PATH, folder.getRoot().toURI().toString());
    attributes.put(KEY_TEMP, "mock://" + folder.getRoot().toURI().toString());
    DirectDataSourceProfile profile = new DirectDataSourceProfile("testing", HadoopDataSource.class, "context",
            attributes);
    HadoopDataSourceProfile.convert(profile, conf);
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private static void addOutput(Job job, String name, Class<?> formatClass, Class<?> keyClass,
        Class<?> valueClass) {
    assert job != null;
    assert name != null;
    assert formatClass != null;
    assert keyClass != null;
    assert valueClass != null;
    if (isValidName(name) == false) {
        throw new IllegalArgumentException(MessageFormat.format("Output name \"{0}\" is not valid", name));
    }/* w  w w.  j av a 2  s .  com*/
    Configuration conf = job.getConfiguration();
    Set<String> names = new TreeSet<>(conf.getStringCollection(K_NAMES));
    if (names.contains(name)) {
        throw new IllegalArgumentException(
                MessageFormat.format("Output name \"{0}\" is already declared", name));
    }
    names.add(name);
    conf.setStrings(K_NAMES, names.toArray(new String[names.size()]));
    conf.setClass(getPropertyName(K_FORMAT_PREFIX, name), formatClass, OutputFormat.class);
    conf.setClass(getPropertyName(K_KEY_PREFIX, name), keyClass, Object.class);
    conf.setClass(getPropertyName(K_VALUE_PREFIX, name), valueClass, Object.class);
}

From source file:com.bah.culvert.Client.java

License:Apache License

public static void setDatabaseAdapter(Configuration conf, Class<? extends DatabaseAdapter> adapterClass) {
    conf.setClass(DATABASE_ADAPTER_CONF_KEY, adapterClass, DatabaseAdapter.class);
}

From source file:com.bah.culvert.data.index.Index.java

License:Apache License

/**
 * Set the database adapter to use for this index.
 * @param conf The conf to set the database adapter in.
 * @param adapterClass The database adapter class to set.
 *///from w  ww . ja  v a2s . c o  m
public static void setDatabaseAdapter(Configuration conf, Class<? extends DatabaseAdapter> adapterClass) {
    conf.setClass(DATABASE_ADAPTER_CONF_KEY, adapterClass, DatabaseAdapter.class);
}

From source file:com.basho.riak.hadoop.config.RiakConfig.java

License:Apache License

/**
 * Set the {@link KeyLister} implementation to use.
 * /*from   w ww. j a v  a 2  s .com*/
 * @param conf
 *            the {@link Configuration} to update
 * @param lister
 *            the {@link KeyLister} to use
 * @return the configuration updated with a serialized version of the lister
 *         provided
 */
public static <T extends KeyLister> Configuration setKeyLister(Configuration conf, T lister)
        throws IOException {
    conf.setClass(KEY_LISTER_CLASS_PROPERTY, lister.getClass(), KeyLister.class);
    conf.setStrings(KEY_LISTER_INIT_STRING_PROPERTY, lister.getInitString());
    return conf;
}

From source file:com.cloudera.crunch.type.avro.Avros.java

License:Open Source License

public static void configureReflectDataFactory(Configuration conf) {
    conf.setClass(REFLECT_DATA_FACTORY_CLASS, REFLECT_DATA_FACTORY.getClass(), ReflectDataFactory.class);
}