Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:io.hops.util.GroupMembershipService.java

License:Apache License

private synchronized Configuration getConfiguration(Configuration conf, String confFileName)
        throws YarnException, IOException {
    InputStream confFileInputStream = this.rmContext.getConfigurationProvider()
            .getConfigurationInputStream(conf, confFileName);
    if (confFileInputStream != null) {
        conf.addResource(confFileInputStream);
    }/* ww  w .j ava 2s .c om*/
    return conf;
}

From source file:io.prestosql.plugin.hive.HdfsConfigurationInitializer.java

License:Apache License

private static Configuration readConfiguration(List<String> resourcePaths) {
    Configuration result = new Configuration(false);

    for (String resourcePath : resourcePaths) {
        Configuration resourceProperties = new Configuration(false);
        resourceProperties.addResource(new Path(resourcePath));
        copy(resourceProperties, result);
    }/*  w  w w . j  av a  2  s  .com*/

    return result;
}

From source file:lia.analysis.nutch.NutchExample.java

License:Apache License

public static void main(String[] args) throws IOException {
    Configuration conf = new Configuration();
    conf.addResource("nutch-default.xml");
    NutchDocumentAnalyzer analyzer = new NutchDocumentAnalyzer(conf); //1

    TokenStream ts = analyzer.tokenStream("content", new StringReader("The quick brown fox..."));
    int position = 0;
    while (true) { // 2
        Token token = ts.next();//w  w  w. ja  v  a2  s .  c  om
        if (token == null) {
            break;
        }
        int increment = token.getPositionIncrement();

        if (increment > 0) {
            position = position + increment;
            System.out.println();
            System.out.print(position + ": ");
        }

        System.out.print("[" + token.termText() + ":" + token.startOffset() + "->" + token.endOffset() + ":"
                + token.type() + "] ");
    }
    System.out.println();

    Query nutchQuery = Query.parse("\"the quick brown\"", conf); // 3
    org.apache.lucene.search.Query luceneQuery;
    luceneQuery = new QueryFilters(conf).filter(nutchQuery); // A
    System.out.println("Translated: " + luceneQuery);
}

From source file:MapReduce.DeviceCountPerCountry.java

public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new HBaseConfiguration();

    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Device Count");
    job.setJarByClass(DeviceCountPerCountry.class);

    Scan sc = new Scan();
    sc.setCaching(500);//w  w w .  j  a  va2  s. c  om
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            DeviceCountPerCountry.MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(DeviceCountPerCountry.MapClass.class);
    job.setReducerClass(DeviceCountPerCountry.ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    String dest = TweetUtils.OUTPUT_PREFIX + "DeviceCount";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.GenderPerTimezone.java

public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Gender Per Timezone");
    job.setJarByClass(GenderPerTimezone.class);

    Scan sc = new Scan();
    sc.setCaching(500);//from   w  ww.  ja  v a  2 s  .  c  o  m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("twittergenderprediction", // input table
            sc, // Scan instance to control CF and attribute selection
            GenderPerTimezone.MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(GenderPerTimezone.MapClass.class);
    job.setReducerClass(GenderPerTimezone.ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    String dest = TweetUtils.OUTPUT_PREFIX + "GenderPerTimezone";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.LanguageCount.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Language Count");
    job.setJarByClass(LanguageCount.class);

    Scan sc = new Scan();
    sc.setCaching(500);/* w  ww .  jav a 2 s  . com*/
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("twitteruser", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "LanguageCount";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.PopularHashtags.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Popular Hashtags");
    job.setJarByClass(PopularHashtags.class);

    Scan sc = new Scan();
    sc.setCaching(500);//from  w w  w  . j a  v a2  s  . co m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "PopularHashtags";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.SentimentAnalysis.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Sentiment Analysis");
    job.setJarByClass(SentimentAnalysis.class);

    Scan sc = new Scan();
    sc.setCaching(500);//from www  .  j a v a2  s  .c  om
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "SentimentAnalysis";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.SentimentsPerTimezone.java

public static void main(String args[]) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new HBaseConfiguration();

    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Device count per country");
    job.setJarByClass(SentimentsPerTimezone.class);

    Scan sc = new Scan();
    sc.setCaching(500);//  w w  w .  j ava 2s .  c  o m
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("twitteruser", // input table
            sc, // Scan instance to control CF and attribute selection
            SentimentsPerTimezone.MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(SentimentsPerTimezone.MapClass.class);
    job.setReducerClass(SentimentsPerTimezone.ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    String dest = TweetUtils.OUTPUT_PREFIX + "SentimentsPerTimezone";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:MapReduce.TopUserMentions.java

public static void main(String[] args) throws Exception {
    Configuration conf = new HBaseConfiguration();
    conf.addResource(TweetUtils.HBASE_CONF);
    Job job = Job.getInstance(conf, "Top User Mentions");
    job.setJarByClass(TopUserMentions.class);

    Scan sc = new Scan();
    sc.setCaching(500);/*from   ww w .  j  a  v a 2s.c  o  m*/
    sc.setCacheBlocks(false);

    TableMapReduceUtil.initTableMapperJob("tweetdata", // input table
            sc, // Scan instance to control CF and attribute selection
            MapClass.class, // mapper class
            Text.class, // mapper output key
            LongWritable.class, // mapper output value
            job);

    job.setMapperClass(MapClass.class);
    job.setReducerClass(ReducerClass.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    String dest = TweetUtils.OUTPUT_PREFIX + "TopUserMentions";
    if (args.length > 0) {
        dest = args[0];
    }
    File destination = new File(dest);
    FileUtil.fullyDelete(destination);
    FileOutputFormat.setOutputPath(job, new Path(dest));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}