Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:co.cask.cdap.explore.service.ExploreServiceUtilsTest.java

License:Apache License

@Test
public void hijackConfFileTest() throws Exception {
    Configuration conf = new Configuration(false);
    conf.set("foo", "bar");
    Assert.assertEquals(1, conf.size());

    File tempDir = tmpFolder.newFolder();

    File confFile = tmpFolder.newFile("hive-site.xml");

    try (FileOutputStream os = new FileOutputStream(confFile)) {
        conf.writeXml(os);//from  ww w .  jav  a  2  s  .  c  o m
    }

    File newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir);

    conf = new Configuration(false);
    conf.addResource(newConfFile.toURI().toURL());

    Assert.assertEquals(3, conf.size());
    Assert.assertEquals("false", conf.get(Job.MAPREDUCE_JOB_USER_CLASSPATH_FIRST));
    Assert.assertEquals("false", conf.get(Job.MAPREDUCE_JOB_CLASSLOADER));
    Assert.assertEquals("bar", conf.get("foo"));

    // check yarn-site changes
    confFile = tmpFolder.newFile("yarn-site.xml");
    conf = new YarnConfiguration();

    try (FileOutputStream os = new FileOutputStream(confFile)) {
        conf.writeXml(os);
    }

    String yarnApplicationClassPath = "$PWD/*," + conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            Joiner.on(",").join(YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH));

    newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir);

    conf = new Configuration(false);
    conf.addResource(newConfFile.toURI().toURL());

    Assert.assertEquals(yarnApplicationClassPath, conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH));

    // check mapred-site changes
    confFile = tmpFolder.newFile("mapred-site.xml");
    conf = new YarnConfiguration();

    try (FileOutputStream os = new FileOutputStream(confFile)) {
        conf.writeXml(os);
    }

    String mapredApplicationClassPath = "$PWD/*," + conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
            MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH);

    newConfFile = ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir);

    conf = new Configuration(false);
    conf.addResource(newConfFile.toURI().toURL());

    Assert.assertEquals(mapredApplicationClassPath, conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH));

    // Ensure conf files that are not hive-site.xml/mapred-site.xml/yarn-site.xml are unchanged
    confFile = tmpFolder.newFile("core-site.xml");
    Assert.assertEquals(confFile, ExploreServiceUtils.updateConfFileForExplore(confFile, tempDir));
}

From source file:co.cask.cdap.hbase.wd.RowKeyDistributorTestBase.java

License:Apache License

@BeforeClass
public static void beforeClass() throws Exception {
    if (!runBefore) {
        return;/*from w w  w. j  a  va 2s.c om*/
    }

    testingUtility = new HBaseTestingUtility();
    Configuration hConf = testingUtility.getConfiguration();
    hConf.set("yarn.is.minicluster", "true");
    // Tune down the connection thread pool size
    hConf.setInt("hbase.hconnection.threads.core", 5);
    hConf.setInt("hbase.hconnection.threads.max", 10);
    // Tunn down handler threads in regionserver
    hConf.setInt("hbase.regionserver.handler.count", 10);

    // Set to random port
    hConf.setInt("hbase.master.port", Networks.getRandomPort());
    hConf.setInt("hbase.master.info.port", Networks.getRandomPort());
    hConf.setInt("hbase.regionserver.port", Networks.getRandomPort());
    hConf.setInt("hbase.regionserver.info.port", Networks.getRandomPort());

    testingUtility.startMiniCluster();
    hTable = testingUtility.createTable(TABLE, CF);
}

From source file:co.cask.cdap.hbase.wd.RowKeyDistributorTestBase.java

License:Apache License

private void testMapReduceInternal(long origKeyPrefix, Scan scan, int numValues, int startWithValue,
        int seekIntervalMinValue, int seekIntervalMaxValue)
        throws IOException, InterruptedException, ClassNotFoundException {
    int valuesCountInSeekInterval = writeTestData(origKeyPrefix, numValues, startWithValue,
            seekIntervalMinValue, seekIntervalMaxValue);

    // Reading data
    Configuration conf = new Configuration(testingUtility.getConfiguration());
    conf.set("fs.defaultFS", "file:///");
    conf.set("fs.default.name", "file:///");
    conf.setInt("mapreduce.local.map.tasks.maximum", 16);
    conf.setInt("mapreduce.local.reduce.tasks.maximum", 16);
    Job job = Job.getInstance(conf, "testMapReduceInternal()-Job");
    TableMapReduceUtil.initTableMapperJob(TABLE_NAME, scan, RowCounterMapper.class,
            ImmutableBytesWritable.class, Result.class, job);

    // Substituting standard TableInputFormat which was set in TableMapReduceUtil.initTableMapperJob(...)
    job.setInputFormatClass(WdTableInputFormat.class);
    keyDistributor.addInfo(job.getConfiguration());

    job.setOutputFormatClass(NullOutputFormat.class);
    job.setNumReduceTasks(0);/*from   w w w.  j a  v a 2  s  .  c o  m*/

    boolean succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);

    long mapInputRecords = job.getCounters().findCounter(RowCounterMapper.Counters.ROWS).getValue();
    Assert.assertEquals(valuesCountInSeekInterval, mapInputRecords);

    // Need to kill the job after completion, after it could leave MRAppMaster running not terminated.
    // Not sure what causing this, but maybe problem in MiniYarnCluster
    job.killJob();
}

From source file:co.cask.cdap.hive.ConfCodecTest.java

License:Apache License

@Test
public void testCConfCodec() throws Exception {
    // Serialize//from   w w  w .  j a  v a 2 s. c  o  m
    CConfiguration conf = CConfiguration.create();
    conf.set("foo", "bar");

    Configuration hconf = HBaseConfiguration.create();
    hconf.set("hfoo", "hbar");

    Map<String, String> confMap = Maps.newHashMap();
    ConfigurationUtil.set(confMap, Constants.Explore.CCONF_KEY, CConfCodec.INSTANCE, conf);
    ConfigurationUtil.set(confMap, Constants.Explore.HCONF_KEY, HConfCodec.INSTANCE, hconf);

    // Deserialize
    CConfiguration newConf = ConfigurationUtil.get(confMap, Constants.Explore.CCONF_KEY, CConfCodec.INSTANCE);
    Assert.assertEquals("bar", newConf.get("foo"));

    Configuration newHconf = ConfigurationUtil.get(confMap, Constants.Explore.HCONF_KEY, HConfCodec.INSTANCE);
    Assert.assertEquals("hbar", newHconf.get("hfoo"));
}

From source file:co.cask.cdap.internal.app.preview.DefaultPreviewManager.java

License:Apache License

/**
 * Create injector for the given application id.
 *//*from ww  w.j  av a 2s  .c om*/
@VisibleForTesting
Injector createPreviewInjector(ApplicationId applicationId, Set<String> datasetNames) throws IOException {
    CConfiguration previewcConf = CConfiguration.copy(cConf);
    java.nio.file.Path previewDirPath = Paths.get(cConf.get(Constants.CFG_LOCAL_DATA_DIR), "preview")
            .toAbsolutePath();

    Files.createDirectories(previewDirPath);
    java.nio.file.Path previewDir = Files.createDirectories(
            Paths.get(previewDirPath.toAbsolutePath().toString(), applicationId.getApplication()));
    previewcConf.set(Constants.CFG_LOCAL_DATA_DIR, previewDir.toString());
    previewcConf.set(Constants.Dataset.DATA_DIR, previewDir.toString());
    Configuration previewhConf = new Configuration(hConf);
    previewhConf.set(Constants.CFG_LOCAL_DATA_DIR, previewDir.toString());
    previewcConf.setIfUnset(Constants.CFG_DATA_LEVELDB_DIR, previewDir.toString());
    previewcConf.setBoolean(Constants.Explore.EXPLORE_ENABLED, false);

    return Guice.createInjector(new ConfigModule(previewcConf, previewhConf), new IOModule(),
            new AuthenticationContextModules().getMasterModule(), new SecurityModules().getStandaloneModules(),
            new PreviewSecureStoreModule(secureStore), new PreviewDiscoveryRuntimeModule(discoveryService),
            new LocationRuntimeModule().getStandaloneModules(), new ConfigStoreModule().getStandaloneModule(),
            new PreviewRunnerModule(artifactRepository, artifactStore, authorizerInstantiator,
                    authorizationEnforcer, privilegesManager, streamAdmin, streamCoordinatorClient,
                    preferencesStore),
            new ProgramRunnerRuntimeModule().getStandaloneModules(),
            new PreviewDataModules().getDataFabricModule(transactionManager),
            new PreviewDataModules().getDataSetsModule(datasetFramework, datasetNames),
            new DataSetServiceModules().getStandaloneModules(),
            new MetricsClientRuntimeModule().getStandaloneModules(),
            new LoggingModules().getStandaloneModules(), new NamespaceStoreModule().getStandaloneModules(),
            new MessagingClientModule(), new AbstractModule() {
                @Override
                protected void configure() {
                }

                @Provides
                @Named(Constants.Service.MASTER_SERVICES_BIND_ADDRESS)
                @SuppressWarnings("unused")
                public InetAddress providesHostname(CConfiguration cConf) {
                    String address = cConf.get(Constants.Preview.ADDRESS);
                    return Networks.resolve(address, new InetSocketAddress("localhost", 0).getAddress());
                }
            });
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.AbstractBatchReadableInputFormat.java

License:Apache License

/**
 * Sets dataset and splits information into the given {@link Configuration}.
 *
 * @param hConf            configuration to modify
 * @param datasetName      name of the dataset
 * @param datasetArguments arguments for the dataset
 * @param splits           list of splits on the dataset
 * @throws IOException// w w w  .  j a  va2 s .c o  m
 */
public static void setDatasetSplits(Configuration hConf, String datasetName,
        Map<String, String> datasetArguments, List<Split> splits) throws IOException {
    hConf.set(DATASET_NAME, datasetName);
    hConf.set(DATASET_ARGS, GSON.toJson(datasetArguments, DATASET_ARGS_TYPE));

    // Encode the list of splits with size followed by that many of DataSetInputSplit objects.
    ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput();
    dataOutput.writeInt(splits.size());
    for (Split split : splits) {
        new DataSetInputSplit(split).write(dataOutput);
    }
    hConf.set(SPLITS, Bytes.toStringBinary(dataOutput.toByteArray()));
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.AbstractBatchWritableOutputFormat.java

License:Apache License

/**
 * Sets dataset information into the given {@link Configuration}.
 *
 * @param hConf       configuration to modify
 * @param datasetName name of the dataset
 * @param datasetArgs arguments for the dataset
 *//*from w ww . ja  v  a  2s .c  om*/
public static void setDataset(Configuration hConf, String datasetName, Map<String, String> datasetArgs) {
    hConf.set(DATASET_NAME, datasetName);
    hConf.set(DATASET_ARGS, GSON.toJson(datasetArgs, DATASET_ARGS_TYPE));
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.input.MultipleInputs.java

License:Apache License

/**
 * Add a {@link Path} with a custom {@link InputFormat} and
 * {@link Mapper} to the list of inputs for the map-reduce job.
 *
 * @param job The {@link Job}//  w  ww  .j  ava2 s .c om
 * @param namedInput name of the input
 * @param inputFormatClass the name of the InputFormat class to be used for this input
 * @param inputConfigs the configurations to be used for this input
 * @param mapperClass {@link Mapper} class to use for this path
 */
@SuppressWarnings("unchecked")
public static void addInput(Job job, String namedInput, String inputFormatClass,
        Map<String, String> inputConfigs, Class<? extends Mapper> mapperClass) {
    Configuration conf = job.getConfiguration();

    Map<String, MapperInput> map = getInputMap(conf);
    // this shouldn't happen, because it is already protected against in BasicMapReduceContext#addInput
    if (map.containsKey(namedInput)) {
        throw new IllegalArgumentException("Input already configured: " + namedInput);
    }
    map.put(namedInput, new MapperInput(inputFormatClass, inputConfigs, mapperClass));
    conf.set(INPUT_CONFIGS, GSON.toJson(map));

    job.setInputFormatClass(DelegatingInputFormat.class);
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.output.MultipleOutputs.java

License:Apache License

/**
 * Adds a named output for the job.//from  w ww .j av  a  2  s.  c  o  m
 *
 * @param job               job to add the named output
 * @param namedOutput       named output name, it has to be a word, letters
 *                          and numbers only (alphanumeric)
 * @param outputFormatClass name of the OutputFormat class.
 * @param keyClass          key class
 * @param valueClass        value class
 * @param outputConfigs     configurations for the output
 */
@SuppressWarnings("unchecked")
public static void addNamedOutput(Job job, String namedOutput, String outputFormatClass, Class<?> keyClass,
        Class<?> valueClass, Map<String, String> outputConfigs) {
    assertValidName(namedOutput);
    checkNamedOutputName(namedOutput, getNamedOutputsList(job), false);
    Configuration conf = job.getConfiguration();
    conf.set(MULTIPLE_OUTPUTS, conf.get(MULTIPLE_OUTPUTS, "") + " " + namedOutput);
    conf.set(MO_PREFIX + namedOutput + FORMAT, outputFormatClass);
    conf.setClass(MO_PREFIX + namedOutput + KEY, keyClass, Object.class);
    conf.setClass(MO_PREFIX + namedOutput + VALUE, valueClass, Object.class);
    ConfigurationUtil.setNamedConfigurations(conf, computePrefixName(namedOutput), outputConfigs);
}

From source file:co.cask.cdap.internal.app.runtime.batch.MapperWrapper.java

License:Apache License

/**
 * Wraps the mapper defined in the job with this {@link MapperWrapper} if it is defined.
 * @param job The MapReduce job//from  ww w  .  j  a  va 2  s  . c  om
 */
public static void wrap(Job job) {
    // NOTE: we don't use job.getMapperClass() as we don't need to load user class here
    Configuration conf = job.getConfiguration();
    String mapClass = conf.get(MRJobConfig.MAP_CLASS_ATTR, Mapper.class.getName());
    conf.set(MapperWrapper.ATTR_MAPPER_CLASS, mapClass);
    job.setMapperClass(MapperWrapper.class);
}