Example usage for org.apache.hadoop.mapreduce Job setSpeculativeExecution

List of usage examples for org.apache.hadoop.mapreduce Job setSpeculativeExecution

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job setSpeculativeExecution.

Prototype

public void setSpeculativeExecution(boolean speculativeExecution) 

Source Link

Document

Turn speculative execution on or off for this job.

Usage

From source file:org.apache.tez.mapreduce.TestMRRJobs.java

License:Apache License

@Test(timeout = 60000)
public void testRandomWriter() throws IOException, InterruptedException, ClassNotFoundException {

    LOG.info("\n\n\nStarting testRandomWriter().");
    if (!(new File(MiniTezCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniTezCluster.APPJAR + " not found. Not running test.");
        return;//  ww w  .  j ava 2 s  . co  m
    }

    RandomTextWriterJob randomWriterJob = new RandomTextWriterJob();
    mrrTezCluster.getConfig().set(RandomTextWriterJob.TOTAL_BYTES, "3072");
    mrrTezCluster.getConfig().set(RandomTextWriterJob.BYTES_PER_MAP, "1024");
    Job job = randomWriterJob.createJob(mrrTezCluster.getConfig());
    Path outputDir = new Path(OUTPUT_ROOT_DIR, "random-output");
    FileOutputFormat.setOutputPath(job, outputDir);
    job.setSpeculativeExecution(false);
    job.setJarByClass(RandomTextWriterJob.class);
    job.setMaxMapAttempts(1); // speed up failures
    job.submit();
    String trackingUrl = job.getTrackingURL();
    String jobId = job.getJobID().toString();
    boolean succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);
    Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
    Assert.assertTrue("Tracking URL was " + trackingUrl + " but didn't Match Job ID " + jobId,
            trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));

    // Make sure there are three files in the output-dir

    RemoteIterator<FileStatus> iterator = FileContext.getFileContext(mrrTezCluster.getConfig())
            .listStatus(outputDir);
    int count = 0;
    while (iterator.hasNext()) {
        FileStatus file = iterator.next();
        if (!file.getPath().getName().equals(FileOutputCommitter.SUCCEEDED_FILE_NAME)) {
            count++;
        }
    }
    Assert.assertEquals("Number of part files is wrong!", 3, count);

}

From source file:org.culturegraph.mf.cluster.job.merge.Union.java

License:Apache License

@Override
public int run(final String[] args) throws Exception {
    final String tmp = makeTmp();

    if (!configChecker.logAndVerify(LOG, getConf())) {
        return -1;
    }//from   w  w w  . jav  a  2  s  . c o m

    Job job;
    boolean ongoingMerges = true;

    job = new Job(getConf(), "initial explode");
    job.setSpeculativeExecution(false);
    job.setJarByClass(Union.class);
    AbstractJobLauncher.configurePropertyTableMapper(job, getConf(), InputTableMapper.class, Text.class,
            TextArrayWritable.class);
    configureReducer(job, ExplodeReducer.class, new Path(tmp + "explode_0"), SequenceFileOutputFormat.class);
    job.setNumReduceTasks(2);
    job.waitForCompletion(true);

    int count = 0;
    while (ongoingMerges) {

        job = new Job(getConf(), "recollect");

        job.setJarByClass(Union.class);
        configureMapper(job, RecollectMapper.class, new Path(tmp + "explode_" + count),
                SequenceFileInputFormat.class);

        configureReducer(job, RecollectReducer.class, new Path(tmp + "recollect_" + count),
                SequenceFileOutputFormat.class);
        job.setNumReduceTasks(2);
        job.waitForCompletion(true);

        job = new Job(getConf(), "explode");
        job.setJarByClass(Union.class);
        configureMapper(job, ExplodeMapper.class, new Path(tmp + "recollect_" + count),
                SequenceFileInputFormat.class);
        ++count;
        configureReducer(job, ExplodeReducer.class, new Path(tmp + "explode_" + count),
                SequenceFileOutputFormat.class);
        job.setNumReduceTasks(2);
        job.waitForCompletion(true);

        ongoingMerges = job.getCounters().getGroup(UNION_FIND).findCounter(OPEN_CLASSES).getValue() != 0;
        LOG.info("ongoingMerges=" + ongoingMerges);
    }

    job = new Job(HBaseConfiguration.create(getConf()), "collect result");
    job.setSpeculativeExecution(false);
    job.setJarByClass(Union.class);
    final Path path = new Path(tmp + "recollect_*");
    FileInputFormat.addInputPath(job, path);
    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setMapperClass(ResultMapper.class);
    job.setNumReduceTasks(0);
    job.setOutputFormatClass(NullOutputFormat.class);
    job.waitForCompletion(true);

    return 1;
}