Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:com.taobao.adfs.util.Utilities.java

License:Apache License

public static Configuration loadConfiguration(String prefix) {
    if (System.getProperty("conf.prefix") != null)
        prefix = System.getProperty("conf.prefix");
    if (prefix == null)
        prefix = "";
    Configuration conf = new Configuration(false);
    System.setProperty("conf", System.getProperty("conf", "conf") + ":" + getLibPath().getParent() + "/conf");
    List<File> confFiles = getConfFile(null, prefix + "-default.xml");
    confFiles.addAll(getConfFile(null, prefix + "-site.xml"));
    for (File confFile : confFiles) {
        conf.addResource(new Path(confFile.getPath()));
        logDebug(logger, "add conf file=", confFile);
    }/*from  ww  w.  j  a  v  a 2s.com*/
    if (confFiles.isEmpty())
        logDebug(logger, "no conf file is added");

    return loadConfiguration(conf);
}

From source file:com.taobao.datax.plugins.common.DFSUtils.java

License:Open Source License

/**
 * Get {@link Configuration}.//w w  w. j  a  v a 2  s. c  o m
 * 
 * @param dir
 *            directory path in hdfs
 * 
 * @param ugi
 *            hadoop ugi
 * 
 * @param conf
 *            hadoop-site.xml path
 * 
 * @return {@link Configuration}
 * 
 * @throws java.io.IOException*/

public static Configuration getConf(String dir, String ugi, String conf) throws IOException {

    URI uri = null;
    Configuration cfg = null;
    String scheme = null;
    try {
        uri = new URI(dir);
        scheme = uri.getScheme();
        if (null == scheme) {
            throw new IOException("HDFS Path missing scheme, check path begin with hdfs://ip:port/ .");
        }

        cfg = confs.get(scheme);
    } catch (URISyntaxException e) {
        throw new IOException(e.getMessage(), e.getCause());
    }

    if (cfg == null) {
        cfg = new Configuration();

        cfg.setClassLoader(DFSUtils.class.getClassLoader());

        List<String> configs = new ArrayList<String>();
        if (!StringUtils.isBlank(conf) && new File(conf).exists()) {
            configs.add(conf);
        } else {
            /*
             * For taobao internal use e.g. if bazhen.csy start a new datax
             * job, datax will use /home/bazhen.csy/config/hadoop-site.xml
             * as configuration xml
             */
            String confDir = System.getenv("HADOOP_CONF_DIR");

            if (null == confDir) {
                //for taobao internal use, it is ugly
                configs.add(System.getProperty("user.home") + "/config/hadoop-site.xml");
            } else {
                //run in hadoop-0.19
                if (new File(confDir + "/hadoop-site.xml").exists()) {
                    configs.add(confDir + "/hadoop-site.xml");
                } else {
                    configs.add(confDir + "/core-default.xml");
                    configs.add(confDir + "/core-site.xml");
                }
            }
        }

        for (String config : configs) {
            log.info(String.format("HdfsReader use %s for hadoop configuration .", config));
            cfg.addResource(new Path(config));
        }

        /* commented by bazhen.csy */
        // log.info("HdfsReader use default ugi " +
        // cfg.get(ParamsKey.HdfsReader.ugi));

        if (uri.getScheme() != null) {
            String fsname = String.format("%s://%s:%s", uri.getScheme(), uri.getHost(), uri.getPort());
            log.info("fs.default.name=" + fsname);
            cfg.set("fs.default.name", fsname);
        }
        if (ugi != null) {
            cfg.set("hadoop.job.ugi", ugi);

            /*
             * commented by bazhen.csy log.info("use specification ugi:" +
             * cfg.get(ParamsKey.HdfsReader.ugi));
             */
        }
        confs.put(scheme, cfg);
    }

    return cfg;
}

From source file:com.thinkbiganalytics.datalake.authorization.hdfs.HDFSUtil.java

License:Apache License

/**
 * @param configResources : Hadoop configuration resource
 *//*w w  w . ja v  a  2  s  . c om*/
public static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(':') + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath");
    }
    return config;
}

From source file:com.thinkbiganalytics.kerberos.KerberosTicketGenerator.java

License:Apache License

public UserGroupInformation generateKerberosTicket(KerberosTicketConfiguration kerberosTicketConfiguration)
        throws IOException {
    Configuration config = new Configuration();

    String[] resources = kerberosTicketConfiguration.getHadoopConfigurationResources().split(",");
    for (String resource : resources) {
        config.addResource(new Path(resource));
    }// w  w w .  j  av  a2s.c  om

    config.set("hadoop.security.authentication", "Kerberos");

    UserGroupInformation.setConfiguration(config);

    log.debug("Generating Kerberos ticket for principal: " + kerberosTicketConfiguration.getKerberosPrincipal()
            + " at key tab location: " + kerberosTicketConfiguration.getKeytabLocation());
    return UserGroupInformation.loginUserFromKeytabAndReturnUGI(
            kerberosTicketConfiguration.getKerberosPrincipal(),
            kerberosTicketConfiguration.getKeytabLocation());
}

From source file:com.thinkbiganalytics.kerberos.TestKerberosKinit.java

License:Apache License

private static Configuration createConfigurationFromList(String configurationFiles) {
    Configuration config = new Configuration();
    String[] resources = configurationFiles.split(",");
    for (String resource : resources) {
        config.addResource(new Path(resource));
    }//from  w w  w. j  a  va2s .  c  om
    return config;
}

From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java

License:Apache License

public static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;//w  w w. j  a va 2 s  .c  om
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath");
    }
    return config;
}

From source file:com.thinkbiganalytics.nifi.v2.hdfs.AbstractHadoopProcessor.java

License:Apache License

private static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;/*from w w w  .ja  v a2  s.  c o  m*/
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException(
                "Could not find any of the " + HADOOP_CONFIGURATION_RESOURCES.getName() + " on the classpath");
    }
    return config;
}

From source file:com.toddbodnar.simpleHive.helpers.GetConfiguration.java

public static Configuration get() {
    Configuration c = new Configuration();
    for (String file : settings.config_files_xml) {
        c.addResource(new Path(file));
    }/*from w  w w . ja  v  a 2s  .  c  om*/
    return c;
}

From source file:com.trovit.hdfstree.fsinspectors.HDFSInspector.java

License:Apache License

private Configuration getHadoopConf() throws Exception {
    Configuration conf = new Configuration();
    String hadoop_home = System.getenv("HADOOP_HOME");
    if (hadoop_home == null) {
        throw new Exception("HADOOP_HOME is not defined in the system.");
    }/*from w w w.j av  a2 s . c o  m*/
    conf.addResource(new Path(hadoop_home + "/conf/hdfs-site.xml"));
    conf.addResource(new Path(hadoop_home + "/conf/mapred-site.xml"));
    conf.addResource(new Path(hadoop_home + "/conf/core-site.xml"));
    return conf;
}

From source file:com.tugo.dt.ApplicationTest.java

@Test
public void testApplication1() throws IOException, Exception {
    try {/*w w  w .  j a v  a2 s .co m*/
        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);
        conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
        lma.prepareDAG(new TestApp(), conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(10000); // runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}