Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:crunch.MaxTemperature.java

License:Apache License

@Test
    public void get() throws IOException {
        // Single test as an expedient for inclusion in the book

        // vv MultipleResourceConfigurationTest
        Configuration conf = new Configuration();
        conf.addResource("configuration-1.xml");
        conf.addResource("configuration-2.xml");
        // ^^ MultipleResourceConfigurationTest

        assertThat(conf.get("color"), is("yellow"));

        // override
        // vv MultipleResourceConfigurationTest-Override
        assertThat(conf.getInt("size", 0), is(12));
        // ^^ MultipleResourceConfigurationTest-Override

        // final properties cannot be overridden
        // vv MultipleResourceConfigurationTest-Final
        assertThat(conf.get("weight"), is("heavy"));
        // ^^ MultipleResourceConfigurationTest-Final

        // variable expansion
        // vv MultipleResourceConfigurationTest-Expansion
        assertThat(conf.get("size-weight"), is("12,heavy"));
        // ^^ MultipleResourceConfigurationTest-Expansion

        // variable expansion with system properties
        // vv MultipleResourceConfigurationTest-SystemExpansion
        System.setProperty("size", "14");
        assertThat(conf.get("size-weight"), is("14,heavy"));
        // ^^ MultipleResourceConfigurationTest-SystemExpansion

        // system properties are not picked up
        // vv MultipleResourceConfigurationTest-NoSystemByDefault
        System.setProperty("length", "2");
        assertThat(conf.get("length"), is((String) null));
        // ^^ MultipleResourceConfigurationTest-NoSystemByDefault

    }/*from   w  w w.  j  a va  2 s .c  o m*/

From source file:crunch.MaxTemperature.java

License:Apache License

@Test
    public void get() throws IOException {
        // vv SingleResourceConfigurationTest
        Configuration conf = new Configuration();
        conf.addResource("configuration-1.xml");
        assertThat(conf.get("color"), is("yellow"));
        assertThat(conf.getInt("size", 0), is(10));
        assertThat(conf.get("breadth", "wide"), is("wide"));
        // ^^ SingleResourceConfigurationTest
    }/*from w w  w .  j a v  a 2  s  .c  o  m*/

From source file:cz.muni.fi.xfabian7.bp.mgrid.dindex.DIndex.java

public FileSystem getFileSystem() {
    FileSystem fs = null;//from  w  w w .  ja  va 2 s. c o m
    try {
        final Configuration confFileSystem = new Configuration();
        confFileSystem.addResource(new Path(HADOOP_CORESITE_FILE_PATH));
        fs = FileSystem.get(confFileSystem);
    } catch (IOException ex) {
        Logger.getLogger(DIndex.class.getName()).log(Level.SEVERE, null, ex);
    }
    return fs;
}

From source file:cz.muni.fi.xfabian7.bp.mgrid.HdfsStorageBucket.java

/**
 * This method return the FileSystem/*w w w.  j av  a 2  s  . c o m*/
 *
 * @return FileSystem
 * @throws IOException
 */
public FileSystem getFileSystem() throws IOException {
    Configuration conf = new Configuration();
    conf.addResource(new Path(HADOOP_CORESITE_FILE_PATH));
    return FileSystem.get(conf);
}

From source file:de.bitocean.mm.MMAppRunner.java

public static void main(String[] args) throws Exception {

    Configuration cfg = new Configuration();

    File cfgFile = EtoshaContextLogger.getCFGFile();

    System.out.println(">>> CFG:   " + cfgFile.getAbsolutePath());
    System.out.println(">>> exists : " + cfgFile.exists());

    /**/*from w ww  .java  2s  .c o  m*/
     * according to:
     *
     * http://stackoverflow.com/questions/11478036/hadoop-configuration-property-returns-null
     *
     * we add the resource as a URI
     */
    cfg.addResource(cfgFile.getAbsoluteFile().toURI().toURL());
    cfg.reloadConfiguration();
    System.out.println(cfg);

    System.out.println(cfg.getRaw("smw.url"));
    System.out.println(cfg.get("smw.pw"));
    System.out.println(cfg.get("smw.user")); // for SMW account
    System.out.println(cfg.get("smw.env"));

    SemanticContextBridge.overWriteEnvForLocaltest = false;

    int exitCode = ToolRunner.run(cfg, new MMAppRunner(), args);

}

From source file:dz.lab.hdfs.LoadConfiguration.java

public static void main(String[] args) {
    Configuration conf = new Configuration();

    // Print the property with empty configuration
    System.out.println("After construction: " + conf.get(PROP_NAME));

    // Add properties from core-site.xml
    conf.addResource(new Path(Vars.HADOOP_HOME + "/conf/core-site.xml"));
    System.out.println("After addResource: " + conf.get(PROP_NAME));

    // Manually set the property
    conf.set(PROP_NAME, "hdfs://192.168.2.72:8020/");
    System.out.println("After set: " + conf.get(PROP_NAME));
}

From source file:ece465.WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Map<String, String> env = System.getenv();
    Path coreSiteXml = new Path(env.get("HADOOP_CONF_DIR") + "/core-site.xml");
    Path hdfsSiteXml = new Path(env.get("HADOOP_CONF_DIR") + "/hdfs-site.xml");
    Path yarnSiteXml = new Path(env.get("HADOOP_CONF_DIR") + "/yarn-site.xml");
    Path mapredSiteXml = new Path(env.get("HADOOP_CONF_DIR") + "/mapred-site.xml");
    conf.addResource(coreSiteXml);
    conf.addResource(hdfsSiteXml);/*ww  w.j  a  v a2s . co m*/
    conf.addResource(yarnSiteXml);
    conf.addResource(mapredSiteXml);
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    Path inputPath = new Path(otherArgs[0]);
    System.out.println(inputPath);
    Path outputPath = new Path(otherArgs[1]);
    System.out.println(outputPath);
    FileInputFormat.addInputPath(job, inputPath);
    FileOutputFormat.setOutputPath(job, outputPath);
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:edu.berkeley.confspell.HSlurper.java

public void slurp(File f, OptionSet res) {
    Configuration c = new Configuration(false);
    c.addResource(new Path(f.getAbsolutePath())); // to search filesystem, not
                                                  // classpath
    c.reloadConfiguration();//w w  w .java 2s .  c o m
    fromHConf(res, c);
}

From source file:edu.brown.cs.mapreduce.AbstractHadoopClient.java

License:Open Source License

public static Configuration getConfiguration() {
    String vars[] = { AbstractHadoopClient.ENV_HADOOP_HOME, AbstractHadoopClient.ENV_HADOOP_CONF };

    for (String var : vars) {
        String val = System.getenv(var);
        if (val == null || val.length() == 0) {
            System.err.println("ERROR: The environment variable '" + var + "' is not defined!");
            System.exit(1);// w  w  w .j  av a2s.c o  m
        }
        AbstractHadoopClient.conf.put(var, val);
    } // FOR
      //
      // Load in the configuration files that we need
      //
    Configuration conf = new Configuration();
    for (String config_file : AbstractHadoopClient.CONFIG_FILES) {
        File file = new File(
                AbstractHadoopClient.conf.get(AbstractHadoopClient.ENV_HADOOP_CONF) + "/" + config_file);
        if (!file.exists()) {
            System.err.println("ERROR: The expected config file '" + file.getPath() + "' does not exist!");
            System.exit(1);
        }
        conf.addResource(new Path(file.getAbsolutePath()));
    }
    return (conf);
}

From source file:edu.brown.cs.mapreduce.generator.GenerateData.java

License:Open Source License

/**
 * @param args//from  w  w w . j  av a2  s.co  m
 */
public static void main(String[] args) {
    String directory = args[0];
    long bytes_file_wanted = Long.valueOf(args[1]);
    long bytes_total_wanted = Long.valueOf(args[2]);

    Configuration conf = new Configuration();
    conf.addResource(new Path(System.getenv("HADOOP_HOME") + "/conf/hadoop-default.xml"));
    conf.addResource(new Path(System.getenv("HADOOP_HOME") + "/conf/hadoop-site.xml"));
    FileSystem fs = null;
    try {
        fs = FileSystem.get(conf);
    } catch (Exception ex) {
        ex.printStackTrace();
        System.exit(-1);
    }
    System.out.println("fs.default.name: " + conf.get("fs.default.name"));
    //System.exit(-1);

    /*
    IGenerator generators[] = { new RankingGenerator() } ; //,
                          //new UserVisitGenerator() };
    Class classes[]         = { Ranking.class }; 
                          //UserVisit.class };
            
    for (int ctr = 0; ctr < generators.length; ctr++){
       IGenerator generator = generators[ctr];
       long bytes_total_written = 0;
       int file_idx = 0;
       long total_records = 0;
       while (bytes_total_written < bytes_total_wanted) {
    long bytes_file_written = 0;
    long record_ctr = 0;
    String class_name = classes[ctr].getSimpleName();
    try {
       //
       // Create a new file for this data type
       //
       String file = directory + "/" + class_name + "s/" + class_name + String.format("%05d", file_idx++);
       Path outFile = new Path(file);
       System.err.print("CREATE: " + outFile.toUri());
       FSDataOutputStream out = fs.create(outFile);
               
       while (bytes_file_written < bytes_file_wanted) {
          String data = (classes[ctr].cast(generator.generate())).toString() + "\n";
          //System.out.print("\t[" + bytes_file_written + "]");
          //System.out.print(data);
          out.write(data.getBytes());
          bytes_file_written += data.getBytes().length;
          record_ctr++;
       } // WHILE
       out.close();
       System.err.println(" [" + record_ctr + " records]");
    } catch (Exception ex) {
       ex.printStackTrace();
       System.exit(-1);
    }
    total_records += record_ctr;
    bytes_total_written += bytes_file_written;
       } // WHILE
       System.out.println("\nTOTAL: " + total_records);
    } // FOR
    */
}