Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:cn.lhfei.hbase.basic.AppConfig.java

License:Apache License

public static Configuration getConfiguration() {
    Configuration conf = HBaseConfiguration.create();

    conf.set("hbase.master", "192.168.118.1286000");
    conf.set("hbase.zookeeper.quorum", "192.168.118.128");
    //conf.set("zookeeper.znode.parent", "/cloudland");   // Path must start with / character
    conf.set("zookeeper.znode.parent", "/hbase-unsecure");
    return conf;/*  w w  w  .  ja va  2s . c  o m*/
}

From source file:co.cask.cdap.app.runtime.spark.distributed.SparkExecutionServiceTest.java

License:Apache License

@BeforeClass
public static void init() throws IOException {
    Configuration hConf = new Configuration();
    hConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEMP_FOLDER.newFolder().getAbsolutePath());
    dfsCluster = new MiniDFSCluster.Builder(hConf).numDataNodes(1).build();
    dfsCluster.waitClusterUp();//w  w w  .  j av a2  s  .  c om
    locationFactory = new FileContextLocationFactory(dfsCluster.getFileSystem().getConf());
}

From source file:co.cask.cdap.common.conf.CConfiguration.java

License:Apache License

public void copyTxProperties(org.apache.hadoop.conf.Configuration destination) {
    Properties props = getProps();
    for (String property : props.stringPropertyNames()) {
        if (property.startsWith("data.tx") || property.startsWith("tx.persist")) {
            destination.set(property, get(property));
        }/*  w  w w .j a  v  a 2 s  . co  m*/
    }
}

From source file:co.cask.cdap.common.conf.CConfigurationUtil.java

License:Apache License

public static void copyTxProperties(CConfiguration cConf, org.apache.hadoop.conf.Configuration destination) {
    Properties props = cConf.getProps();
    for (String property : props.stringPropertyNames()) {
        if (property.startsWith("data.tx") || property.startsWith("tx.persist")) {
            destination.set(property, cConf.get(property));
        }/*  ww w  . ja  v a  2  s. c  o  m*/
    }
}

From source file:co.cask.cdap.common.conf.ConfigurationUtil.java

License:Apache License

public static <T> void set(Configuration conf, String key, Codec<T> codec, T obj) throws IOException {
    String value = new String(codec.encode(obj), Charsets.UTF_8);
    LOG.trace("Serializing {} {}", key, value);
    conf.set(key, value);
}

From source file:co.cask.cdap.common.conf.ConfigurationUtil.java

License:Apache License

/**
 * Sets all key-value pairs from the given {@link Map} into the given {@link Configuration}.
 *
 * @return the {@link Configuration} instance provided
 *///from   w w  w . j av a  2s.  c  o  m
public static Configuration setAll(@Nullable Map<String, String> map, Configuration conf) {
    if (map == null) {
        return conf;
    }
    for (Map.Entry<String, String> entry : map.entrySet()) {
        conf.set(entry.getKey(), entry.getValue());
    }
    return conf;
}

From source file:co.cask.cdap.common.conf.ConfigurationUtil.java

License:Apache License

/**
 * Prefixes the specified configurations with the given prefix, and sets them onto the job's configuration.
 *
 * @param conf the Configuration object on which the configurations will be set on
 * @param confKeyPrefix the String to prefix the keys of the configuration
 * @param namedConf the configuration values to be set
 *///  w ww  . j a va  2 s . c  o  m
public static void setNamedConfigurations(Configuration conf, String confKeyPrefix,
        Map<String, String> namedConf) {
    for (Map.Entry<String, String> entry : namedConf.entrySet()) {
        conf.set(confKeyPrefix + entry.getKey(), entry.getValue());
    }
}

From source file:co.cask.cdap.common.io.DFSSeekableInputStreamTest.java

License:Apache License

@BeforeClass
public static void init() throws IOException {
    Configuration hConf = new Configuration();
    hConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TMP_FOLDER.newFolder().getAbsolutePath());
    dfsCluster = new MiniDFSCluster.Builder(hConf).numDataNodes(1).build();
    dfsCluster.waitClusterUp();/* w w  w.jav a  2s .c  o  m*/
    locationFactory = new FileContextLocationFactory(dfsCluster.getFileSystem().getConf());
}

From source file:co.cask.cdap.common.io.LocationsTest.java

License:Apache License

@Test
public void absolutePathTests() throws IOException {
    // Test HDFS:
    Configuration conf = new Configuration();
    conf.set("fs.defaultFS", "hdfs://1.2.3.4:8020/");
    LocationFactory locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
    Location location1 = locationFactory.create(TEST_PATH);
    Location location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());

    // Test file:
    conf = new Configuration();
    conf.set("fs.defaultFS", "file:///");
    locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
    location1 = locationFactory.create(TEST_PATH);
    location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());

    // Test LocalLocation
    locationFactory = new LocalLocationFactory(new File(TEST_BASE_PATH));
    location1 = locationFactory.create(TEST_PATH);
    location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());
}

From source file:co.cask.cdap.common.logging.LogCollectorTest.java

License:Apache License

@Test
public void testCollectionDFS() throws IOException {

    MiniDFSCluster dfsCluster = null;//w  w  w.  ja v a2s  . c  o  m

    try {
        File dfsPath = tempFolder.newFolder();
        System.setProperty("test.build.data", dfsPath.toString());
        System.setProperty("test.cache.data", dfsPath.toString());

        System.err.println("Starting up Mini HDFS cluster...");
        Configuration hConf = new Configuration();
        CConfiguration conf = CConfiguration.create();
        //conf.setInt("dfs.block.size", 1024*1024);
        dfsCluster = new MiniDFSCluster.Builder(hConf).nameNodePort(0).numDataNodes(1).format(true)
                .manageDataDfsDirs(true).manageNameDfsDirs(true).build();
        dfsCluster.waitClusterUp();
        System.err.println("Mini HDFS is started.");

        // Add HDFS info to conf
        hConf.set("fs.defaultFS", dfsCluster.getFileSystem().getUri().toString());
        // set a root directory for log collection
        conf.set(Constants.CFG_LOG_COLLECTION_ROOT, "/logtemp");

        testCollection(conf, hConf);
    } finally {
        if (dfsCluster != null) {
            System.err.println("Shutting down Mini HDFS cluster...");
            dfsCluster.shutdown();
            System.err.println("Mini HDFS is shut down.");
        }
    }
}