Example usage for org.apache.hadoop.mapreduce Job submit

List of usage examples for org.apache.hadoop.mapreduce Job submit

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job submit.

Prototype

public void submit() throws IOException, InterruptedException, ClassNotFoundException 

Source Link

Document

Submit the job to the cluster and return immediately.

Usage

From source file:org.calrissian.accumulorecipes.eventstore.hadoop.EventInputFormatTest.java

License:Apache License

@Test
public void testNoQuery() throws Exception {

    Instance instance = new MockInstance("eventInst2");
    Connector connector = instance.getConnector("root", "".getBytes());
    AccumuloEventStore store = new AccumuloEventStore(connector);
    event = EventBuilder.create("", UUID.randomUUID().toString(), System.currentTimeMillis())
            .attr(new Attribute("key1", "val1")).attr(new Attribute("key2", false)).build();
    store.save(singleton(event));//from  w w  w .ja  v a  2s. c  om
    store.flush();
    AccumuloTestUtils.dumpTable(connector, "eventStore_shard");
    Job job = new Job(new Configuration());
    job.setJarByClass(getClass());
    job.setMapperClass(TestMapper.class);
    job.setNumReduceTasks(0);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setInputFormatClass(EventInputFormat.class);
    EventInputFormat.setInputInfo(job, "root", "".getBytes(), new Authorizations());
    EventInputFormat.setMockInstance(job, "eventInst2");
    EventInputFormat.setQueryInfo(job, new Date(System.currentTimeMillis() - 50000), new Date(),
            Collections.singleton(""));
    job.setOutputFormatClass(NullOutputFormat.class);

    job.submit();
    job.waitForCompletion(true);

    System.out.println("RESULT: " + TestMapper.entry);

    assertNotNull(TestMapper.entry);
    assertEquals(TestMapper.entry.getId(), event.getId());
    assertEquals(new HashSet<Attribute>(TestMapper.entry.getAttributes()),
            new HashSet<Attribute>(event.getAttributes()));

}

From source file:org.calrissian.accumulorecipes.featurestore.hadoop.MetricsInputFormatTest.java

License:Apache License

@Test
public void test() throws IOException, ClassNotFoundException, InterruptedException, AccumuloSecurityException,
        AccumuloException, TableExistsException, TableNotFoundException {

    Instance instance = new MockInstance("metricsInst");
    Connector connector = instance.getConnector("root", "".getBytes());
    AccumuloFeatureStore store = new AccumuloFeatureStore(connector);
    store.initialize();// w w  w.  j  av  a  2s .c o  m
    store.save(singleton(metric));

    Job job = new Job(new Configuration());
    job.setJarByClass(getClass());
    job.setMapperClass(TestMapper.class);
    job.setNumReduceTasks(0);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setInputFormatClass(FeaturesInputFormat.class);
    FeaturesInputFormat.setInputInfo(job, "root", "".getBytes(), new Authorizations());
    FeaturesInputFormat.setQueryInfo(job, new Date(0), new Date(), TimeUnit.MINUTES, "group", "type", "name",
            MetricFeature.class);
    FeaturesInputFormat.setMockInstance(job, "metricsInst");
    job.setOutputFormatClass(NullOutputFormat.class);

    job.submit();
    job.waitForCompletion(true);

    assertEquals(metric.getGroup(), TestMapper.metric.getGroup());
    assertEquals(metric.getType(), TestMapper.metric.getType());
    assertEquals(metric.getName(), TestMapper.metric.getName());
    assertEquals(metric.getVisibility(), TestMapper.metric.getVisibility());
    assertEquals(metric.getVector(), TestMapper.metric.getVector());

}

From source file:org.gridgain.client.hadoop.GridHadoopClientProtocolSelfTest.java

License:Open Source License

/**
 * Tests job counters retrieval.//from w  ww .  j ava  2 s  .com
 *
 * @throws Exception If failed.
 */
public void testJobCounters() throws Exception {
    GridGgfs ggfs = grid(0).ggfs(GridHadoopAbstractSelfTest.ggfsName);

    ggfs.mkdirs(new GridGgfsPath(PATH_INPUT));

    try (BufferedWriter bw = new BufferedWriter(
            new OutputStreamWriter(ggfs.create(new GridGgfsPath(PATH_INPUT + "/test.file"), true)))) {

        bw.write("alpha\n" + "beta\n" + "gamma\n" + "alpha\n" + "beta\n" + "gamma\n" + "alpha\n" + "beta\n"
                + "gamma\n");
    }

    Configuration conf = config(GridHadoopAbstractSelfTest.REST_PORT);

    final Job job = Job.getInstance(conf);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(TestCountingMapper.class);
    job.setReducerClass(TestCountingReducer.class);
    job.setCombinerClass(TestCountingCombiner.class);

    FileInputFormat.setInputPaths(job, new Path(PATH_INPUT));
    FileOutputFormat.setOutputPath(job, new Path(PATH_OUTPUT));

    job.submit();

    final Counter cntr = job.getCounters().findCounter(TestCounter.COUNTER1);

    assertEquals(0, cntr.getValue());

    cntr.increment(10);

    assertEquals(10, cntr.getValue());

    // Transferring to map phase.
    setupLockFile.delete();

    // Transferring to reduce phase.
    mapLockFile.delete();

    job.waitForCompletion(false);

    assertEquals("job must end successfully", JobStatus.State.SUCCEEDED, job.getStatus().getState());

    final Counters counters = job.getCounters();

    assertNotNull("counters cannot be null", counters);
    assertEquals("wrong counters count", 3, counters.countCounters());
    assertEquals("wrong counter value", 15, counters.findCounter(TestCounter.COUNTER1).getValue());
    assertEquals("wrong counter value", 3, counters.findCounter(TestCounter.COUNTER2).getValue());
    assertEquals("wrong counter value", 3, counters.findCounter(TestCounter.COUNTER3).getValue());
}

From source file:org.gridgain.client.hadoop.GridHadoopClientProtocolSelfTest.java

License:Open Source License

/**
 * Test job submission.//from   w  ww. j  ava  2 s .co  m
 *
 * @param noCombiners Whether there are no combiners.
 * @param noReducers Whether there are no reducers.
 * @throws Exception If failed.
 */
public void checkJobSubmit(boolean noCombiners, boolean noReducers) throws Exception {
    GridGgfs ggfs = grid(0).ggfs(GridHadoopAbstractSelfTest.ggfsName);

    ggfs.mkdirs(new GridGgfsPath(PATH_INPUT));

    try (BufferedWriter bw = new BufferedWriter(
            new OutputStreamWriter(ggfs.create(new GridGgfsPath(PATH_INPUT + "/test.file"), true)))) {

        bw.write("word");
    }

    Configuration conf = config(GridHadoopAbstractSelfTest.REST_PORT);

    final Job job = Job.getInstance(conf);

    job.setJobName(JOB_NAME);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(TestMapper.class);
    job.setReducerClass(TestReducer.class);

    if (!noCombiners)
        job.setCombinerClass(TestCombiner.class);

    if (noReducers)
        job.setNumReduceTasks(0);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TestOutputFormat.class);

    FileInputFormat.setInputPaths(job, new Path(PATH_INPUT));
    FileOutputFormat.setOutputPath(job, new Path(PATH_OUTPUT));

    job.submit();

    JobID jobId = job.getJobID();

    // Setup phase.
    JobStatus jobStatus = job.getStatus();
    checkJobStatus(jobStatus, jobId, JOB_NAME, JobStatus.State.RUNNING, 0.0f);
    assert jobStatus.getSetupProgress() >= 0.0f && jobStatus.getSetupProgress() < 1.0f;
    assert jobStatus.getMapProgress() == 0.0f;
    assert jobStatus.getReduceProgress() == 0.0f;

    U.sleep(2100);

    JobStatus recentJobStatus = job.getStatus();

    assert recentJobStatus.getSetupProgress() > jobStatus.getSetupProgress() : "Old="
            + jobStatus.getSetupProgress() + ", new=" + recentJobStatus.getSetupProgress();

    // Transferring to map phase.
    setupLockFile.delete();

    assert GridTestUtils.waitForCondition(new GridAbsPredicate() {
        @Override
        public boolean apply() {
            try {
                return F.eq(1.0f, job.getStatus().getSetupProgress());
            } catch (Exception e) {
                throw new RuntimeException("Unexpected exception.", e);
            }
        }
    }, 5000L);

    // Map phase.
    jobStatus = job.getStatus();
    checkJobStatus(jobStatus, jobId, JOB_NAME, JobStatus.State.RUNNING, 0.0f);
    assert jobStatus.getSetupProgress() == 1.0f;
    assert jobStatus.getMapProgress() >= 0.0f && jobStatus.getMapProgress() < 1.0f;
    assert jobStatus.getReduceProgress() == 0.0f;

    U.sleep(2100);

    recentJobStatus = job.getStatus();

    assert recentJobStatus.getMapProgress() > jobStatus.getMapProgress() : "Old=" + jobStatus.getMapProgress()
            + ", new=" + recentJobStatus.getMapProgress();

    // Transferring to reduce phase.
    mapLockFile.delete();

    assert GridTestUtils.waitForCondition(new GridAbsPredicate() {
        @Override
        public boolean apply() {
            try {
                return F.eq(1.0f, job.getStatus().getMapProgress());
            } catch (Exception e) {
                throw new RuntimeException("Unexpected exception.", e);
            }
        }
    }, 5000L);

    if (!noReducers) {
        // Reduce phase.
        jobStatus = job.getStatus();
        checkJobStatus(jobStatus, jobId, JOB_NAME, JobStatus.State.RUNNING, 0.0f);
        assert jobStatus.getSetupProgress() == 1.0f;
        assert jobStatus.getMapProgress() == 1.0f;
        assert jobStatus.getReduceProgress() >= 0.0f && jobStatus.getReduceProgress() < 1.0f;

        // Ensure that reduces progress increases.
        U.sleep(2100);

        recentJobStatus = job.getStatus();

        assert recentJobStatus.getReduceProgress() > jobStatus.getReduceProgress() : "Old="
                + jobStatus.getReduceProgress() + ", new=" + recentJobStatus.getReduceProgress();

        reduceLockFile.delete();
    }

    job.waitForCompletion(false);

    jobStatus = job.getStatus();
    checkJobStatus(job.getStatus(), jobId, JOB_NAME, JobStatus.State.SUCCEEDED, 1.0f);
    assert jobStatus.getSetupProgress() == 1.0f;
    assert jobStatus.getMapProgress() == 1.0f;
    assert jobStatus.getReduceProgress() == 1.0f;

    dumpGgfs(ggfs, new GridGgfsPath(PATH_OUTPUT));
}

From source file:org.kiji.mapreduce.TestMapReduceJob.java

License:Apache License

@Test
public void testSubmit() throws ClassNotFoundException, IOException, InterruptedException {
    Job hadoopJob = createMock(Job.class);

    // Expect that the job is submitted and that it is successful.
    hadoopJob.submit();
    expect(hadoopJob.isComplete()).andReturn(false);
    expect(hadoopJob.isComplete()).andReturn(true);
    expect(hadoopJob.isSuccessful()).andReturn(true);

    replay(hadoopJob);//from w  ww  .  ja  v  a  2 s.c  o  m

    MapReduceJob job = new ConcreteMapReduceJob(hadoopJob);
    MapReduceJob.Status jobStatus = job.submit();
    assertFalse(jobStatus.isComplete());
    assertTrue(jobStatus.isComplete());
    assertTrue(jobStatus.isSuccessful());

    verify(hadoopJob);
}

From source file:org.lilyproject.indexer.master.BatchIndexBuilder.java

License:Apache License

/**
 *
 * @return the ID of the started job//from w w w  .j  a  v a2  s. c  o  m
 */
public static Job startBatchBuildJob(IndexDefinition index, Configuration mapReduceConf,
        Configuration hbaseConf, String zkConnectString, int zkSessionTimeout) throws Exception {

    Configuration conf = new Configuration(mapReduceConf);
    Job job = new Job(conf);

    //
    // Find and set the MapReduce job jar.
    //
    Class mapperClass = IndexingMapper.class;
    String jobJar = findContainingJar(mapperClass);
    if (jobJar == null) {
        // TODO
        throw new RuntimeException("Job jar not found for class " + mapperClass);
    }

    job.getConfiguration().set("mapred.jar", jobJar);

    //
    // Pass information about the index to be built
    //
    String indexerConfString = Base64.encodeBytes(index.getConfiguration(), Base64.GZIP);
    job.getConfiguration().set("org.lilyproject.indexer.batchbuild.indexerconf", indexerConfString);

    if (index.getShardingConfiguration() != null) {
        String shardingConfString = Base64.encodeBytes(index.getShardingConfiguration(), Base64.GZIP);
        job.getConfiguration().set("org.lilyproject.indexer.batchbuild.shardingconf", shardingConfString);
    }

    int i = 0;
    for (Map.Entry<String, String> shard : index.getSolrShards().entrySet()) {
        i++;
        job.getConfiguration().set("org.lilyproject.indexer.batchbuild.solrshard.name." + i, shard.getKey());
        job.getConfiguration().set("org.lilyproject.indexer.batchbuild.solrshard.address." + i,
                shard.getValue());
    }

    job.setNumReduceTasks(0);
    job.setOutputFormatClass(NullOutputFormat.class);

    //
    // Define the HBase scanner
    //
    FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
    filterList.addFilter(new SingleColumnValueFilter(RecordCf.SYSTEM.bytes, RecordColumn.DELETED.bytes,
            CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes(true)));
    Scan scan = new Scan();
    scan.setFilter(filterList);
    scan.addColumn(RecordCf.SYSTEM.bytes, RecordColumn.DELETED.bytes);

    TableMapReduceUtil.initTableMapperJob(Table.RECORD.name, scan, IndexingMapper.class, null, null, job);

    //
    // Provide properties to connect to HBase
    //
    job.getConfiguration().set("hbase.zookeeper.quorum", hbaseConf.get("hbase.zookeeper.quorum"));
    job.getConfiguration().set("hbase.zookeeper.property.clientPort",
            hbaseConf.get("hbase.zookeeper.property.clientPort"));

    //
    // Provide Lily ZooKeeper props
    //
    job.getConfiguration().set("org.lilyproject.indexer.batchbuild.zooKeeperConnectString", zkConnectString);
    job.getConfiguration().set("org.lilyproject.indexer.batchbuild.zooKeeperSessionTimeout",
            String.valueOf(zkSessionTimeout));

    job.submit();

    return job;
}

From source file:org.mrgeo.cmd.findholes.mapreduce.FindHolesDriver.java

License:Apache License

@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "File() - name is generated in code")
public boolean runJob(String input, String output, int zoom, ProviderProperties props, Configuration conf)
        throws IOException {

    System.out.println("Input:     " + input);
    System.out.println("Output:    " + output);
    System.out.println("ZoomLevel: " + zoom);

    conf.set("zoom", Integer.toString(zoom));
    DataProviderFactory.saveProviderPropertiesToConfig(props, conf);

    MrsImageDataProvider midp = DataProviderFactory.getMrsImageDataProvider(input, AccessMode.READ, conf);
    MrsPyramidMetadata mipm = midp.getMetadataReader().read();

    System.out.println("DP = " + midp.getClass().getCanonicalName());
    System.out.println("DP resource = " + midp.getResourceName());

    LongRectangle lr = mipm.getTileBounds(zoom);
    conf.set("bounds", lr.toDelimitedString());

    AdHocDataProvider ahdp = DataProviderFactory.createAdHocDataProvider(conf);
    conf.set("adhoc.provider", ahdp.getResourceName());

    Job job = new Job(conf, "Find holes for " + input + " at zoom level " + zoom);
    conf = job.getConfiguration();//from w w w  .j a  v a  2  s .com

    // how to fake out loading core dependencies
    HadoopUtils.setJar(job, FindHolesDriver.class);

    job.setMapperClass(FindHolesMapper.class);
    job.setReducerClass(FindHolesReducer.class);

    job.setMapOutputKeyClass(LongWritable.class);
    job.setMapOutputValueClass(LongWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    //Properties props = new Properties();

    ImageInputFormatContext tifc = new ImageInputFormatContext(zoom, mipm.getTilesize(), input, props);
    MrsImageInputFormatProvider miifp = midp.getImageInputFormatProvider(tifc);

    // this is key for setting up the input
    job.setInputFormatClass(miifp.getInputFormat(input).getClass());

    miifp.setupJob(job, null);

    ahdp.setupJob(job);

    // now set output
    AdHocDataProvider dummy = DataProviderFactory.createAdHocDataProvider(conf);

    // mimic FileOutputFormat.setOutputPath(job, path);
    conf.set("mapred.output.dir", dummy.getResourceName());

    try {
        job.submit();

        boolean success = job.waitForCompletion(true);

        dummy.delete();

        if (success) {
            miifp.teardown(job);

            boolean[][] valid = new boolean[(int) lr.getHeight()][(int) lr.getWidth()];
            for (int y = 0; y < (int) lr.getHeight(); y++) {
                for (int x = 0; x < (int) lr.getWidth(); x++) {
                    valid[y][x] = false;
                }
            }

            final int size = ahdp.size();
            for (int i = 0; i < size; i++) {
                final InputStream stream = ahdp.get(i);
                try (BufferedReader br = new BufferedReader(new InputStreamReader(stream))) {
                    // read values out of stream
                    String line;
                    while ((line = br.readLine()) != null) {

                        // format is "y: x x x x"
                        String[] vals = line.split(":");
                        int y = Integer.parseInt(vals[0]);

                        if (vals.length == 1) {
                            continue;
                        }
                        vals = vals[1].trim().split(" ");
                        for (String v : vals) {
                            valid[y - (int) lr.getMinY()][Integer.parseInt(v) - (int) lr.getMinX()] = true;
                        }
                    }
                }
                stream.close();
            }
            ahdp.delete();
            File outFile = new File(output);
            PrintWriter pw = new PrintWriter(outFile);
            StringBuilder sbMissing = new StringBuilder();
            for (int y = 0; y < lr.getHeight(); y++) {
                // y + lr.getMinY()
                boolean m = false;
                for (int x = 0; x < lr.getWidth(); x++) {
                    // x + lr.getMinX()
                    if (valid[y][x]) {
                        pw.write("+");
                    } else {
                        m = true;
                        sbMissing.append("(").append(x + lr.getMinX()).append(",").append(y + lr.getMinY())
                                .append(") ");
                        pw.write("-");
                    }

                }
                pw.write("\n");
                if (m) {
                    sbMissing.append("\n");
                }
            }
            if (sbMissing.length() > 0) {
                pw.write("\n\n");
                pw.write(sbMissing.toString() + "\n");
            }
            pw.close();
            return true;
        }
    } catch (InterruptedException | ClassNotFoundException e) {
        throw new IOException(e);
    }

    return false;
}

From source file:org.mrgeo.ingest.IngestImageDriver.java

License:Apache License

private static boolean runJob(final String[] inputs, final String output, final Configuration config,
        final TiledInputFormatProvider<RasterWritable> formatProvider, final Bounds bounds, final Number nodata,
        final boolean categorical, final int zoomlevel, final int tilesize, final int bands,
        final Map<String, String> tags, final String protectionLevel, final Properties providerProperties)
        throws Exception {

    Configuration conf = config;/*from   ww  w .j  a v a2s  . com*/
    if (conf == null) {
        conf = HadoopUtils.createConfiguration();
    }

    final Job job = new Job(conf, "IngestImage");
    conf = job.getConfiguration();

    HadoopUtils.setJar(job, IngestImageDriver.class);

    job.setMapperClass(IngestImageMapper.class);
    job.setReducerClass(IngestImageReducer.class);

    for (final String input : inputs) {
        // using FileInputFormat for convenience. It creates "mapred.input.dir" in the config
        FileInputFormat.addInputPath(job, new Path(input));
    }

    formatProvider.setupJob(job, providerProperties);

    // getInputFormat takes an image name, but we don't need it here, so we'll just send an empty string
    job.setInputFormatClass(formatProvider.getInputFormat("").getClass());

    final AdHocDataProvider metadataProvider = DataProviderFactory.createAdHocDataProvider(providerProperties);
    final AdHocDataProvider statsProvider = DataProviderFactory.createAdHocDataProvider(providerProperties);

    // get the ad hoc providers set up for map/reduce
    metadataProvider.setupJob(job);
    statsProvider.setupJob(job);

    conf.set("metadata.provider", metadataProvider.getResourceName());
    conf.set("stats.provider", statsProvider.getResourceName());
    conf.setInt("zoomlevel", zoomlevel);
    conf.setInt("tilesize", tilesize);
    conf.setFloat("nodata", nodata.floatValue());
    conf.setInt("bands", bands);

    if (categorical) {
        conf.set("classification", Classification.Categorical.name());
    } else {
        conf.set("classification", Classification.Continuous.name());
    }

    String useProtectionLevel = protectionLevel;
    {
        MrsImageDataProvider dp = DataProviderFactory.getMrsImageDataProvider(output, AccessMode.OVERWRITE,
                conf);
        useProtectionLevel = ProtectionLevelUtils.getAndValidateProtectionLevel(dp, protectionLevel);
    }

    MrsImageOutputFormatProvider provider = MrsImageDataProvider.setupMrsPyramidOutputFormat(job, output,
            bounds, zoomlevel, tilesize, useProtectionLevel, providerProperties);

    try {
        job.submit();

        final boolean success = job.waitForCompletion(true);
        if (success) {
            provider.teardown(job);

            ImageStats[] stats = ImageStats.readStats(statsProvider);
            aggregateMetadata(metadataProvider, provider, output, stats, tags, useProtectionLevel,
                    providerProperties);
        }

        return success;
    } catch (final ClassNotFoundException e) {
        throw new IOException("Error running ingest map/reduce", e);
    } catch (final InterruptedException e) {
        throw new IOException("Error running ingest map/reduce", e);
    } finally {
        statsProvider.delete();
        metadataProvider.delete();
    }
}

From source file:org.mrgeo.mapreduce.ingestvector.IngestVectorDriver.java

License:Apache License

private static boolean runJob(String[] inputs, String output, Configuration config, int zoomlevel,
        String protectionLevel, Properties providerProperties) throws IOException {

    Bounds bounds = GeotoolsVectorUtils.calculateBounds(inputs, config);

    final Job job = new Job(config);

    final String now = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());

    final String jobName = "IngestVector_" + now + "_" + UUID.randomUUID().toString();
    job.setJobName(jobName);//  w  w w. j av a2s.co m

    final Configuration conf = job.getConfiguration();

    int tilesize = Integer.parseInt(MrGeoProperties.getInstance().getProperty(MrGeoConstants.MRGEO_MRS_TILESIZE,
            MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT));

    conf.setInt(MapGeometryToTiles.ZOOMLEVEL, zoomlevel);
    conf.setInt(MapGeometryToTiles.TILESIZE, tilesize);

    job.setInputFormatClass(IngestVectorGeometryInputFormat.class);

    job.setMapperClass(IngestGeometryMapper.class);
    job.setMapOutputKeyClass(TileIdWritable.class);
    job.setMapOutputValueClass(GeometryWritable.class);

    HadoopUtils.setJar(job, IngestVectorDriver.class);
    for (final String input : inputs) {
        // Source file can be on the local file system (instead of hdfs), and
        // we call the following to circumvent a bug in Hadoop 20.2
        // FileInputFormat.addInputPath.
        HadoopVectorUtils.addInputPath(job, new Path(input));
    }
    HadoopFileUtils.delete(output);

    MrsImageOutputFormatProvider ofProvider = MrsImageDataProvider.setupMrsPyramidOutputFormat(job, output,
            bounds, zoomlevel, tilesize, protectionLevel, providerProperties);

    job.setReducerClass(IngestGeometryReducer.class);

    job.setOutputKeyClass(TileIdWritable.class);
    job.setOutputValueClass(VectorTileWritable.class);

    try {
        job.submit();
        final boolean success = job.waitForCompletion(true);

        if (success) {
            ofProvider.teardown(job);
            MrsVectorPyramid.calculateMetadata(output, zoomlevel, tilesize, bounds, protectionLevel);
            return true;
        }

    } catch (final InterruptedException e) {
        e.printStackTrace();
    } catch (final ClassNotFoundException e) {
        e.printStackTrace();
    }

    return false;
}

From source file:org.mrgeo.mapreduce.ingestvector.IngestVectorDriver.java

License:Apache License

private static int calculateZoomlevel(final String[] inputs, final Configuration config) throws IOException {
    log.info("Calculating zoom level");

    final Job job = new Job(config);
    HadoopUtils.setJar(job, IngestVectorDriver.class);

    final String now = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date());

    final String jobName = "CalculateVectorZoom_" + now + "_" + UUID.randomUUID().toString();
    job.setJobName(jobName);/*from   ww w . ja va  2  s. c om*/

    final Configuration conf = job.getConfiguration();

    conf.setInt(MAX_ZOOM, zoomMax);
    conf.setInt(TILESIZE, Integer.parseInt(MrGeoProperties.getInstance()
            .getProperty(MrGeoConstants.MRGEO_MRS_TILESIZE, MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT)));

    job.setInputFormatClass(IngestVectorGeometryInputFormat.class);
    job.setMapperClass(CalculateZoomMapper.class);
    job.setMapOutputKeyClass(TileIdZoomWritable.class);
    job.setMapOutputValueClass(LongWritable.class);

    for (final String input : inputs) {
        // Source file can be on the local file system (instead of hdfs), and
        // we call the following to circumvent a bug in Hadoop 20.2
        // FileInputFormat.addInputPath.
        HadoopVectorUtils.addInputPath(job, new Path(input));
    }

    job.setReducerClass(CalculateZoomReducer.class);
    String output = new Path(HadoopFileUtils.getTempDir(), HadoopUtils.createRandomString(40)).toString();
    try {
        FileOutputFormat.setOutputPath(job, new Path(output));

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(LongWritable.class);

        try {
            job.submit();
            final boolean success = job.waitForCompletion(true);

            if (success) {
                return readZoomlevel(output, conf);
            }

        } catch (final InterruptedException e) {
            e.printStackTrace();
        } catch (final ClassNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        HadoopFileUtils.delete(output);
    }

    return 0;
}