Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:AllLab_Skeleton.Lab1.Lab1_Wordcount.java

/**
 * @param args the command line arguments
 *//*from   ww w .  ja  va  2s  .c  o m*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "word count");
    job.setJarByClass(Lab1_Wordcount.class);
    job.setMapperClass(WordCount_Mapper.class);
    job.setCombinerClass(WordCount_Reducer.class);
    job.setReducerClass(WordCount_Reducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:AllLab_Skeleton.Lab2.Lab2SecondarySort.java

/**
 * @param args the command line arguments
 */// w  w w. j  a v a  2  s . c  om
public static void main(String[] args) {

    try {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf, "SecondarySort");
        job.setJarByClass(Lab2SecondarySort.class);

        job.setMapperClass(Lab2Mapper.class);
        job.setMapOutputKeyClass(CompositeKeyWritable.class);
        job.setMapOutputValueClass(NullWritable.class);

        job.setPartitionerClass(Lab2Partitioner.class);
        job.setGroupingComparatorClass(Lab2GroupComparator.class);

        job.setReducerClass(Lab2Reducer.class);
        job.setOutputKeyClass(CompositeKeyWritable.class);
        job.setOutputValueClass(NullWritable.class);

        job.setNumReduceTasks(8);

        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    } catch (IOException | InterruptedException | ClassNotFoundException ex) {
        System.out.println("Erorr Message" + ex.getMessage());
    }
}

From source file:AllLab_Skeleton.Lab4.Lab4_Std_dev.java

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "medianstd");
    job.setJarByClass(Lab4_Std_dev.class);
    job.setMapperClass(Map.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(DoubleWritable.class);

    job.setReducerClass(Reduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(MedianSDCustomWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);

}

From source file:AllLab_Skeleton.Lab6.BloomFilterBhavesh.java

public static void main(String[] args) throws Exception {

    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Bloom Filter");
    job.setJarByClass(BloomFilterBhavesh.class);
    job.setMapperClass(BloomFilterMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);
    job.setNumReduceTasks(0);/*from  w  w  w .j  ava  2 s .  c  o  m*/
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    boolean success = job.waitForCompletion(true);
    System.out.println(success);

}

From source file:AllLab_Skeleton.Lab6.BloomFilterUsingDistributedCache.java

public static void main(String[] args) throws Exception {

    Configuration conf = new Configuration();

    Job job = Job.getInstance(conf, "Bloom Filter");
    job.setJarByClass(BloomFilterUsingDistributedCache.class);
    job.setMapperClass(BloomFilterMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    //adding the file in the cache having the Person class records
    //job.addCacheFile(new Path("localhost:9000/bhavesh/LabAssignment/CacheInput/cache.txt").toUri());
    DistributedCache.addCacheFile(new URI(args[2]), job.getConfiguration());
    job.setNumReduceTasks(0);//from ww w  .  ja v a2 s.  c o m

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    job.waitForCompletion(true);

}

From source file:AllLab_Skeleton.Lab6.ReduceSideJoin.java

public static void main(String[] args) throws Exception {

    Configuration conf = new Configuration();

    Job job = Job.getInstance(conf, "ReduceSideJoin");
    job.setJarByClass(ReduceSideJoin.class);

    // Use MultipleInputs to set which input uses what mapper
    // This will keep parsing of each data set separate from a logical
    // standpoint
    // The first two elements of the args array are the two inputs
    MultipleInputs.addInputPath(job, new Path(args[0]), TextInputFormat.class, UserJoinMapper.class);
    MultipleInputs.addInputPath(job, new Path(args[1]), TextInputFormat.class, CommentJoinMapper.class);
    job.getConfiguration().set("join.type", "leftouter");
    //job.setNumReduceTasks(0);
    job.setReducerClass(UserJoinReducer.class);

    job.setOutputFormatClass(TextOutputFormat.class);
    TextOutputFormat.setOutputPath(job, new Path(args[2]));

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.waitForCompletion(true);/*  w  w w .j av a2s .  c  o m*/
}

From source file:alluxio.checker.CheckerUtils.java

License:Apache License

/**
 * @return if the current node can recognize Alluxio classes and filesystem
 *//*w  ww  .  j  a v a 2  s  . c  o m*/
public static Status performIntegrationChecks() {
    // Check if the current node can recognize Alluxio classes
    try {
        // Check if the current node can recognize Alluxio common classes
        Class.forName("alluxio.AlluxioURI");
        // Check if current node can recognize Alluxio core client classes
        Class.forName("alluxio.client.file.BaseFileSystem");
        Class.forName("alluxio.hadoop.AlluxioFileSystem");
    } catch (ClassNotFoundException e) {
        LOG.error("Failed to find Alluxio classes on classpath", e);
        return Status.FAIL_TO_FIND_CLASS;
    }

    // Check if the current node can recognize Alluxio filesystem
    try {
        FileSystem.getFileSystemClass("alluxio", new Configuration());
    } catch (Exception e) {
        LOG.error("Failed to find Alluxio filesystem", e);
        return Status.FAIL_TO_FIND_FS;
    }

    return Status.SUCCESS;
}

From source file:alluxio.checker.MapReduceIntegrationChecker.java

License:Apache License

/**
 * Implements MapReduce with Alluxio integration checker.
 *
 * @return 0 for success, 2 for unable to find Alluxio classes, 1 otherwise
 *///from   ww w. jav  a 2s  . c  om
private int run(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String numMaps = new GenericOptionsParser(conf, args).getRemainingArgs()[0];
    conf.set(MRJobConfig.NUM_MAPS, numMaps);
    createHdfsFilesystem(conf);

    Job job = Job.getInstance(conf, "MapReduceIntegrationChecker");
    job.setJarByClass(MapReduceIntegrationChecker.class);
    job.setMapperClass(CheckerMapper.class);
    job.setCombinerClass(CheckerReducer.class);
    job.setReducerClass(CheckerReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setInputFormatClass(EmptyInputFormat.class);
    FileOutputFormat.setOutputPath(job, mOutputFilePath);

    try {
        if (!job.waitForCompletion(true)) {
            return 1;
        }
        Status resultStatus = generateReport();
        return resultStatus.equals(Status.SUCCESS) ? 0
                : (resultStatus.equals(Status.FAIL_TO_FIND_CLASS) ? 2 : 1);
    } finally {
        if (mFileSystem.exists(mOutputFilePath)) {
            mFileSystem.delete(mOutputFilePath, true);
        }
        mFileSystem.close();
    }
}

From source file:alluxio.checker.SparkIntegrationChecker.java

License:Apache License

/**
 * @return if this Spark driver or executors can recognize Alluxio classes and filesystem
 *///ww w .j a va  2 s. co  m
private Status performIntegrationChecks() {
    // Checks if Spark driver or executors can recognize Alluxio classes
    try {
        // Checks if Spark driver or executors can recognize Alluxio common classes
        Class.forName("alluxio.AlluxioURI");
        // Checks if Spark driver or executors can recognize Alluxio core client classes
        Class.forName("alluxio.client.file.BaseFileSystem");
        Class.forName("alluxio.hadoop.AlluxioFileSystem");
    } catch (ClassNotFoundException e) {
        LOG.error("Failed to find Alluxio classes on classpath ", e);
        return Status.FAIL_TO_FIND_CLASS;
    }

    // Checks if Spark driver or executors can recognize Alluxio filesystem
    try {
        FileSystem.getFileSystemClass("alluxio", new Configuration());
    } catch (Exception e) {
        LOG.error("Failed to find Alluxio filesystem ", e);
        return Status.FAIL_TO_FIND_FS;
    }

    return Status.SUCCESS;
}

From source file:alluxio.client.hadoop.FileSystemAclIntegrationTest.java

License:Apache License

@BeforeClass
public static void beforeClass() throws Exception {
    Configuration conf = new Configuration();
    conf.set("fs.alluxio.impl", FileSystem.class.getName());
    conf = HadoopConfigurationUtils.mergeAlluxioConfiguration(conf, ServerConfiguration.global());

    URI uri = URI.create(sLocalAlluxioClusterResource.get().getMasterURI());

    sTFS = org.apache.hadoop.fs.FileSystem.get(uri,
            HadoopConfigurationUtils.mergeAlluxioConfiguration(conf, ServerConfiguration.global()));
    sUfsRoot = ServerConfiguration.get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS);
    sUfs = UnderFileSystem.Factory.createForRoot(ServerConfiguration.global());
}