Example usage for org.apache.hadoop.conf Configuration getStrings

List of usage examples for org.apache.hadoop.conf Configuration getStrings

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getStrings.

Prototype

public String[] getStrings(String name, String... defaultValue) 

Source Link

Document

Get the comma delimited values of the name property as an array of Strings.

Usage

From source file:org.apache.sentry.provider.db.generic.service.persistent.DelegateSentryStore.java

License:Apache License

public DelegateSentryStore(Configuration conf) throws Exception {
    this.privilegeOperator = new PrivilegeOperatePersistence(conf);
    this.conf = conf;
    //delegated old sentryStore
    this.delegate = new SentryStore(conf);
    adminGroups = ImmutableSet// ww  w.  j av a 2 s. com
            .copyOf(toTrimmed(Sets.newHashSet(conf.getStrings(ServerConfig.ADMIN_GROUPS, new String[] {}))));
}

From source file:org.apache.sentry.provider.db.service.thrift.SentryPolicyStoreProcessor.java

License:Apache License

SentryPolicyStoreProcessor(String name, Configuration conf, SentryStore store) throws Exception {
    super();/*from   ww w  . j  ava  2 s .co m*/
    this.name = name;
    this.conf = conf;
    this.sentryStore = store;
    this.notificationHandlerInvoker = new NotificationHandlerInvoker(conf, createHandlers(conf));
    adminGroups = ImmutableSet.copyOf(
            toTrimedLower(Sets.newHashSet(conf.getStrings(ServerConfig.ADMIN_GROUPS, new String[] {}))));
    Iterable<String> pluginClasses = ConfUtilties.CLASS_SPLITTER.split(
            conf.get(ServerConfig.SENTRY_POLICY_STORE_PLUGINS, ServerConfig.SENTRY_POLICY_STORE_PLUGINS_DEFAULT)
                    .trim());
    for (String pluginClassStr : pluginClasses) {
        Class<?> clazz = conf.getClassByName(pluginClassStr);
        if (!SentryPolicyStorePlugin.class.isAssignableFrom(clazz)) {
            throw new IllegalArgumentException("Sentry Plugin [" + pluginClassStr + "] is not a "
                    + SentryPolicyStorePlugin.class.getName());
        }
        SentryPolicyStorePlugin plugin = (SentryPolicyStorePlugin) clazz.newInstance();
        plugin.initialize(conf, sentryStore);
        sentryPlugins.add(plugin);
    }
    initMetrics();
}

From source file:org.apache.sqoop.ConnFactory.java

License:Apache License

/**
 * Create the ManagerFactory instances that should populate
 * the factories list./* ww w.j  ava  2 s.c o m*/
 */
private void instantiateFactories(Configuration conf) {
    loadManagersFromConfDir(conf);
    String[] classNameArray = conf.getStrings(FACTORY_CLASS_NAMES_KEY, DEFAULT_FACTORY_CLASS_NAMES);

    for (String className : classNameArray) {
        try {
            className = className.trim(); // Ignore leading/trailing whitespace.
            ManagerFactory factory = ReflectionUtils
                    .newInstance((Class<? extends ManagerFactory>) conf.getClassByName(className), conf);
            LOG.debug("Loaded manager factory: " + className);
            factories.add(factory);
        } catch (ClassNotFoundException cnfe) {
            LOG.error("Could not load ManagerFactory " + className + " (not found)");
        }
    }
}

From source file:org.apache.tez.common.TezYARNUtils.java

License:Apache License

public static String getFrameworkClasspath(Configuration conf, boolean usingArchive) {
    StringBuilder classpathBuilder = new StringBuilder();

    // Add any additional user-specified classpath
    String additionalClasspath = conf.get(TezConfiguration.TEZ_CLUSTER_ADDITIONAL_CLASSPATH_PREFIX);
    if (additionalClasspath != null && !additionalClasspath.trim().isEmpty()) {
        classpathBuilder.append(additionalClasspath).append(File.pathSeparator);
    }//ww  w  .  ja va  2s .c o m

    // Add PWD:PWD/*
    classpathBuilder.append(Environment.PWD.$()).append(File.pathSeparator)
            .append(Environment.PWD.$() + File.separator + "*").append(File.pathSeparator);

    // Next add the tez libs, if specified via an archive.
    if (usingArchive) {
        // Add PWD/tezlib/*
        classpathBuilder.append(Environment.PWD.$()).append(File.separator).append(TezConstants.TEZ_TAR_LR_NAME)
                .append(File.separator).append("*").append(File.pathSeparator);

        // Add PWD/tezlib/lib/*
        classpathBuilder.append(Environment.PWD.$()).append(File.separator).append(TezConstants.TEZ_TAR_LR_NAME)
                .append(File.separator).append("lib").append(File.separator).append("*")
                .append(File.pathSeparator);
    }

    // Last add HADOOP_CLASSPATH, if it's required.
    if (conf.getBoolean(TezConfiguration.TEZ_USE_CLUSTER_HADOOP_LIBS,
            TezConfiguration.TEZ_USE_CLUSTER_HADOOP_LIBS_DEFAULT)) {
        for (String c : conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
                YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
            classpathBuilder.append(c.trim()).append(File.pathSeparator);
        }
    } else {
        // Setup HADOOP_CONF_DIR after PWD and tez-libs, if it's required.
        classpathBuilder.append(Environment.HADOOP_CONF_DIR.$()).append(File.pathSeparator);
    }

    String classpath = classpathBuilder.toString();
    return StringInterner.weakIntern(classpath);
}

From source file:org.apache.tez.http.SSLFactory.java

License:Apache License

/**
 * Creates an SSLFactory.//from  w ww.j a v  a  2  s  .c  o  m
 *
 * @param mode SSLFactory mode, client or server.
 * @param conf Hadoop configuration from where the SSLFactory configuration
 *             will be read.
 */
public SSLFactory(Mode mode, Configuration conf) {
    this.conf = conf;
    if (mode == null) {
        throw new IllegalArgumentException("mode cannot be NULL");
    }
    this.mode = mode;
    requireClientCert = conf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, DEFAULT_SSL_REQUIRE_CLIENT_CERT);
    Configuration sslConf = readSSLConfiguration(mode);

    Class<? extends KeyStoresFactory> klass = conf.getClass(KEYSTORES_FACTORY_CLASS_KEY,
            FileBasedKeyStoresFactory.class, KeyStoresFactory.class);
    keystoresFactory = ReflectionUtils.newInstance(klass, sslConf);

    enabledProtocols = conf.getStrings(SSL_ENABLED_PROTOCOLS, DEFAULT_SSL_ENABLED_PROTOCOLS);
}

From source file:org.elasticsearch.hadoop.yarn.util.YarnUtils.java

License:Apache License

public static Map<String, String> setupEnv(Configuration cfg) {
    Map<String, String> env = new LinkedHashMap<String, String>(); // System.getenv()
    // add Hadoop Classpath
    for (String c : cfg.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            YarnCompat.DEFAULT_PLATFORM_APPLICATION_CLASSPATH())) {
        addToEnv(env, Environment.CLASSPATH.name(), c.trim());
    }// ww  w  . j  a v  a 2 s.  c  om
    // add es-hadoop jar / current folder jars
    addToEnv(env, Environment.CLASSPATH.name(), "./*");

    //
    // some es-yarn constants
    //
    addToEnv(env, EsYarnConstants.FS_URI, cfg.get(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS));

    return env;
}

From source file:org.hdl.caffe.yarn.app.CaffeContainer.java

License:Apache License

public Map<String, String> setJavaEnv(Configuration conf, String caffeProcessorJar) {
    // Set the java environment
    Map<String, String> env = new HashMap<String, String>();

    // Add TFServer.jar location to classpath
    StringBuilder classPathEnv = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$$())
            .append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./*");

    // Add hadoop's jar location to classpath
    for (String c : conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) {
        classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR);
        classPathEnv.append(c.trim());/*from w  ww . jav  a  2s  .c  o  m*/
    }
    classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./log4j.properties");

    // add the runtime classpath needed for tests to work
    if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
        classPathEnv.append(':');
        classPathEnv.append(System.getProperty("java.class.path"));
    }

    if (caffeProcessorJar != null) {
        classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR);
        classPathEnv.append(caffeProcessorJar);
    }
    env.put("CLASSPATH", classPathEnv.toString());
    return env;
}

From source file:org.hdl.tensorflow.yarn.util.Utils.java

License:Apache License

public static Map<String, String> setJavaEnv(Configuration conf) {
    Map<String, String> env = new HashMap<>();

    StringBuilder classPathEnv = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$$())
            .append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./*");

    for (String c : conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) {
        classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR);
        classPathEnv.append(c.trim());/*from ww w  .  j a  v  a  2  s  . c o m*/
    }

    if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
        classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR);
        classPathEnv.append(System.getProperty("java.class.path"));
    }

    env.put("CLASSPATH", classPathEnv.toString());
    return env;
}

From source file:org.schedoscope.export.ftp.FtpExportJsonMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {

    super.setup(context);
    Configuration conf = context.getConfiguration();
    hcatSchema = HCatInputFormat.getTableSchema(conf);

    tableName = conf.get(FtpUploadOutputFormat.FTP_EXPORT_TABLE_NAME);

    Set<String> anonFields = ImmutableSet
            .copyOf(conf.getStrings(BaseExportJob.EXPORT_ANON_FIELDS, new String[0]));

    String salt = conf.get(BaseExportJob.EXPORT_ANON_SALT, "");

    HCatRecordJsonSerializer serializer = new HCatRecordJsonSerializer(conf, hcatSchema);

    converter = new HCatToAvroRecordConverter(serializer, anonFields, salt);

    HCatToAvroSchemaConverter schemaConverter = new HCatToAvroSchemaConverter(anonFields);
    avroSchema = schemaConverter.convertSchema(hcatSchema, tableName);
}

From source file:org.schedoscope.export.kafka.KafkaExportMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {

    super.setup(context);
    Configuration conf = context.getConfiguration();
    hcatSchema = HCatInputFormat.getTableSchema(conf);

    keyName = conf.get(KafkaOutputFormat.KAFKA_EXPORT_KEY_NAME);
    tableName = conf.get(KafkaOutputFormat.KAFKA_EXPORT_TABLE_NAME);

    HCatUtils.checkKeyType(hcatSchema, keyName);

    Set<String> anonFields = ImmutableSet
            .copyOf(conf.getStrings(BaseExportJob.EXPORT_ANON_FIELDS, new String[0]));
    String salt = conf.get(BaseExportJob.EXPORT_ANON_SALT, "");
    HCatRecordJsonSerializer serializer = new HCatRecordJsonSerializer(conf, hcatSchema);
    converter = new HCatToAvroRecordConverter(serializer, anonFields, salt);

    HCatToAvroSchemaConverter schemaConverter = new HCatToAvroSchemaConverter(anonFields);
    avroSchema = schemaConverter.convertSchema(hcatSchema, tableName);
}