Example usage for org.apache.hadoop.conf Configuration toString

List of usage examples for org.apache.hadoop.conf Configuration toString

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:org.apache.falcon.workflow.util.OozieActionConfigurationHelper.java

License:Apache License

public static Configuration createActionConf() throws IOException {
    Configuration conf = new Configuration();
    Path confPath = new Path("file:///" + System.getProperty("oozie.action.conf.xml"));

    final boolean actionConfExists = confPath.getFileSystem(conf).exists(confPath);
    LOG.info("Oozie Action conf {} found ? {}", confPath, actionConfExists);
    if (actionConfExists) {
        LOG.info("Oozie Action conf found, adding path={}, conf={}", confPath, conf.toString());
        conf.addResource(confPath);// w  ww.j  ava  2 s  .  c  o  m
        dumpConf(conf, "oozie action conf ");
    }

    String tokenFile = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
    if (tokenFile != null) {
        if (Shell.WINDOWS) {
            if (tokenFile.charAt(0) == '"') {
                tokenFile = tokenFile.substring(1);
            }
            if (tokenFile.charAt(tokenFile.length() - 1) == '"') {
                tokenFile = tokenFile.substring(0, tokenFile.length() - 1);
            }
        }

        conf.set("mapreduce.job.credentials.binary", tokenFile);
        System.setProperty("mapreduce.job.credentials.binary", tokenFile);
        conf.set("tez.credentials.path", tokenFile);
        System.setProperty("tez.credentials.path", tokenFile);
    }

    conf.set("datanucleus.plugin.pluginRegistryBundleCheck", "LOG");
    conf.setBoolean("hive.exec.mode.local.auto", false);

    return conf;
}

From source file:org.apache.nifi.processors.hadoop.AbstractHadoopProcessor.java

License:Apache License

private static Configuration getConfigurationFromResources(final Configuration config, String configResources)
        throws IOException {
    boolean foundResources = false;
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;/*from w ww  . j  av  a 2s  .  c o m*/
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException(
                "Could not find any of the " + HADOOP_CONFIGURATION_RESOURCES.getName() + " on the classpath");
    }
    return config;
}

From source file:org.apache.nifi.processors.hadoop.AbstractHadoopProcessor.java

License:Apache License

HdfsResources resetHDFSResources(String configResources, ProcessContext context) throws IOException {
    Configuration config = new ExtendedConfiguration(getLogger());
    config.setClassLoader(Thread.currentThread().getContextClassLoader());

    getConfigurationFromResources(config, configResources);

    // give sub-classes a chance to process configuration
    preProcessConfiguration(config, context);

    // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout
    checkHdfsUriForTimeout(config);// ww  w .j  ava2s .c  om

    // disable caching of Configuration and FileSystem objects, else we cannot reconfigure the processor without a complete
    // restart
    String disableCacheName = String.format("fs.%s.impl.disable.cache",
            FileSystem.getDefaultUri(config).getScheme());
    config.set(disableCacheName, "true");

    // If kerberos is enabled, create the file system as the kerberos principal
    // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time
    FileSystem fs;
    UserGroupInformation ugi;
    synchronized (RESOURCES_LOCK) {
        if (SecurityUtil.isSecurityEnabled(config)) {
            String principal = context.getProperty(kerberosProperties.getKerberosPrincipal())
                    .evaluateAttributeExpressions().getValue();
            String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab())
                    .evaluateAttributeExpressions().getValue();
            ugi = SecurityUtil.loginKerberos(config, principal, keyTab);
            fs = getFileSystemAsUser(config, ugi);
        } else {
            config.set("ipc.client.fallback-to-simple-auth-allowed", "true");
            config.set("hadoop.security.authentication", "simple");
            ugi = SecurityUtil.loginSimple(config);
            fs = getFileSystemAsUser(config, ugi);
        }
    }
    getLogger().debug("resetHDFSResources UGI {}", new Object[] { ugi });

    final Path workingDir = fs.getWorkingDirectory();
    getLogger().info(
            "Initialized a new HDFS File System with working dir: {} default block size: {} default replication: {} config: {}",
            new Object[] { workingDir, fs.getDefaultBlockSize(workingDir), fs.getDefaultReplication(workingDir),
                    config.toString() });

    return new HdfsResources(config, fs, ugi);
}

From source file:org.apache.oozie.servlet.MockCoordinatorEngineService.java

License:Apache License

private static CoordinatorJob createDummyCoordinatorJob(int idx, Configuration conf) {
    CoordinatorJobBean coordJob = new CoordinatorJobBean();
    coordJob.setId(JOB_ID + idx);/* w ww.  j ava  2s  . c o m*/
    coordJob.setAppName("testApp");
    coordJob.setAppPath("testAppPath");
    coordJob.setStatus(CoordinatorJob.Status.RUNNING);
    coordJob.setCreatedTime(new Date());
    coordJob.setLastModifiedTime(new Date());
    coordJob.setUser(USER);
    coordJob.setGroup(GROUP);
    coordJob.setConf(conf.toString());
    coordJob.setLastActionNumber(0);
    coordJob.setFrequency("1");
    coordJob.setExecutionOrder(Execution.FIFO);
    coordJob.setConcurrency(1);
    try {
        coordJob.setEndTime(DateUtils.parseDateOozieTZ("2009-02-03T23:59Z"));
        coordJob.setStartTime(DateUtils.parseDateOozieTZ("2009-02-01T23:59Z"));
    } catch (Exception e) {
        e.printStackTrace();
    }

    List<CoordinatorActionBean> actions = new ArrayList<CoordinatorActionBean>();
    for (int i = 0; i < idx; i++) {
        actions.add(createDummyAction(i, JOB_ID + idx));
    }

    coordJob.setActions(actions);
    return coordJob;
}

From source file:org.apache.pulsar.io.hdfs.AbstractHdfsConnector.java

License:Apache License

private static Configuration getConfig(final Configuration config, String res) throws IOException {
    boolean foundResources = false;
    if (null != res) {
        String[] resources = res.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;//from  w ww .j a  v  a  2s  . co m
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException("Could not find any of the " + res + " on the classpath");
    }
    return config;
}

From source file:org.apache.ranger.audit.destination.HDFSAuditDestination.java

License:Apache License

Configuration createConfiguration() {
    Configuration conf = new Configuration();
    for (Map.Entry<String, String> entry : configProps.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        // for ease of install config file may contain properties with empty value, skip those
        if (StringUtils.isNotEmpty(value)) {
            conf.set(key, value);/*from w  w  w.  j  a v  a  2  s . c  om*/
        }
        logger.info("Adding property to HDFS config: " + key + " => " + value);
    }

    logger.info("Returning HDFS Filesystem Config: " + conf.toString());
    return conf;
}

From source file:org.apache.ranger.audit.provider.hdfs.HdfsLogDestination.java

License:Apache License

Configuration createConfiguration() {
    Configuration conf = new Configuration();
    if (configProps != null) {
        for (Map.Entry<String, String> entry : configProps.entrySet()) {
            String key = entry.getKey();
            String value = entry.getValue();
            // for ease of install config file may contain properties with empty value, skip those
            if (StringUtils.isNotEmpty(value)) {
                conf.set(key, value);//from w w w.  j  a  v  a2s.com
            }
            mLogger.info("Adding property to HDFS config: " + key + " => " + value);
        }
    }

    mLogger.info("Returning HDFS Filesystem Config: " + conf.toString());
    return conf;
}

From source file:org.apache.rya.accumulo.mr.merge.util.ToolConfigUtils.java

License:Apache License

/**
 * Gets the set of user arguments from the user's config and/or their extra supplied
 * command line arguments.  This weeds out all the automatically generated parameters created
 * from initializing a {@link Configuration} object and should only give back a set of arguments
 * provided directly by the user.//www. j a v a  2  s. c om
 * @param conf the {@link Configuration} provided.
 * @param args the extra arguments from the command line.
 * @return a {@link Set} of argument strings.
 * @throws IOException
 */
public static Set<String> getUserArguments(final Configuration conf, final String[] args) throws IOException {
    String[] filteredArgs = new String[] {};
    if (Arrays.asList(args).contains("-conf")) {
        // parse args
        new GenericOptionsParser(conf, args);

        final List<String> commandLineArgs = new ArrayList<>();
        for (final String arg : args) {
            if (arg.startsWith("-D")) {
                commandLineArgs.add(arg);
            }
        }
        filteredArgs = commandLineArgs.toArray(new String[0]);
    } else {
        filteredArgs = args;
    }

    // Get the supplied config name from the resource string.
    // No real easy way of getting the name.
    // So, pulling it off the list of resource names in the Configuration's toString() method
    // where it should be the last one.
    final String confString = conf.toString();
    final String resourceString = StringUtils.removeStart(confString, "Configuration: ");
    final List<String> resourceNames = Arrays.asList(StringUtils.split(resourceString, ", "));
    final String configFilename = resourceNames.get(resourceNames.size() - 1);

    final Set<String> toolArgsSet = new HashSet<>();
    final File file = new File(configFilename);
    // Check that the last resource name is the actual user's config by seeing if it's a file
    // on the system, the other resources seem to be contained in jars and so should fail here which
    // should happen if no config is supplied.
    if (file.exists()) {
        XMLConfiguration configuration = null;
        try {
            configuration = new XMLConfiguration(configFilename);
            toolArgsSet.addAll(getConfigArguments(configuration));
        } catch (final ConfigurationException e) {
            log.error("Unable to load configuration file.", e);
        }
    }

    toolArgsSet.addAll(Arrays.asList(filteredArgs));
    return Collections.unmodifiableSet(toolArgsSet);
}

From source file:org.jacky.hadoop.examples.WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();

    System.out.println(conf.toString());

    //conf.setQuietMode(false);
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);/*  w w w .  j  a  va 2 s  . com*/
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:test.SomeMainClass.java

License:Apache License

private static String dumpConfiguration(Configuration configuration) {
    StringBuilder sb = new StringBuilder("Config@" + configuration.hashCode());
    sb.append("\n");
    sb.append(configuration.toString());
    sb.append("\n");

    Properties props = new Properties();
    if (configuration != null) {
        for (Map.Entry<String, String> entry : configuration) {
            props.setProperty(entry.getKey(), entry.getValue());
        }/*  www .j  a va 2  s  .  com*/
    }

    return sb.append(props.toString()).toString();
}