Example usage for org.apache.hadoop.mapred JobID JobID

List of usage examples for org.apache.hadoop.mapred JobID JobID

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobID JobID.

Prototype

public JobID(String jtIdentifier, int id) 

Source Link

Document

Constructs a JobID object

Usage

From source file:org.apache.ignite.internal.processors.hadoop.v2.GridHadoopV2TaskContext.java

License:Apache License

/**
 * @param taskInfo Task info.//from  w w  w . j  av a2 s . c  o  m
 * @param job Job.
 * @param jobId Job ID.
 * @param locNodeId Local node ID.
 * @param jobConfDataInput DataInput for read JobConf.
 */
public GridHadoopV2TaskContext(GridHadoopTaskInfo taskInfo, GridHadoopJob job, GridHadoopJobId jobId,
        @Nullable UUID locNodeId, DataInput jobConfDataInput) throws IgniteCheckedException {
    super(taskInfo, job);
    this.locNodeId = locNodeId;

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());

    try {
        JobConf jobConf = new JobConf();

        try {
            jobConf.readFields(jobConfDataInput);
        } catch (IOException e) {
            throw new IgniteCheckedException(e);
        }

        // For map-reduce jobs prefer local writes.
        jobConf.setBooleanIfUnset(PARAM_IGFS_PREFER_LOCAL_WRITES, true);

        jobCtx = new JobContextImpl(jobConf, new JobID(jobId.globalId().toString(), jobId.localId()));

        useNewMapper = jobConf.getUseNewMapper();
        useNewReducer = jobConf.getUseNewReducer();
        useNewCombiner = jobConf.getCombinerClass() == null;
    } finally {
        Thread.currentThread().setContextClassLoader(null);
    }
}

From source file:org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job.java

License:Apache License

/**
 * @param jobId Job ID.//from   w w w . j a  v  a 2 s  .  c  o m
 * @param jobInfo Job info.
 * @param log Logger.
 */
public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, IgniteLogger log) {
    assert jobId != null;
    assert jobInfo != null;

    this.jobId = jobId;
    this.jobInfo = jobInfo;

    hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());

    HadoopClassLoader clsLdr = (HadoopClassLoader) getClass().getClassLoader();

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(clsLdr);

    jobConf = new JobConf();

    HadoopFileSystemsUtils.setupFileSystems(jobConf);

    Thread.currentThread().setContextClassLoader(null);

    for (Map.Entry<String, String> e : jobInfo.properties().entrySet())
        jobConf.set(e.getKey(), e.getValue());

    jobCtx = new JobContextImpl(jobConf, hadoopJobID);

    rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log);
}

From source file:org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext.java

License:Apache License

/**
 * @param taskInfo Task info./*  w w  w.  jav  a2s.  c  om*/
 * @param job Job.
 * @param jobId Job ID.
 * @param locNodeId Local node ID.
 * @param jobConfDataInput DataInput for read JobConf.
 */
public HadoopV2TaskContext(HadoopTaskInfo taskInfo, HadoopJob job, HadoopJobId jobId, @Nullable UUID locNodeId,
        DataInput jobConfDataInput) throws IgniteCheckedException {
    super(taskInfo, job);
    this.locNodeId = locNodeId;

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());

    try {
        JobConf jobConf = new JobConf();

        try {
            jobConf.readFields(jobConfDataInput);
        } catch (IOException e) {
            throw new IgniteCheckedException(e);
        }

        // For map-reduce jobs prefer local writes.
        jobConf.setBooleanIfUnset(PARAM_IGFS_PREFER_LOCAL_WRITES, true);

        jobCtx = new JobContextImpl(jobConf, new JobID(jobId.globalId().toString(), jobId.localId()));

        useNewMapper = jobConf.getUseNewMapper();
        useNewReducer = jobConf.getUseNewReducer();
        useNewCombiner = jobConf.getCombinerClass() == null;
    } finally {
        Thread.currentThread().setContextClassLoader(null);
    }
}

From source file:org.apache.pig.tools.pigstats.mapreduce.SimplePigStats.java

License:Apache License

MRJobStats addMRJobStatsForNative(NativeMapReduceOper mr) {
    MRJobStats js = mroJobMap.get(mr);
    js.setId(new JobID(mr.getJobId(), NativeMapReduceOper.getJobNumber()));
    js.setAlias(mr);

    return js;
}

From source file:org.apache.pig.tools.pigstats.SimplePigStats.java

License:Apache License

@SuppressWarnings("deprecation")
public JobStats addJobStatsForNative(NativeMapReduceOper mr) {
    JobStats js = mroJobMap.get(mr);//from w  ww  .j  a  v  a 2 s  .c om
    js.setId(new JobID(mr.getJobId(), NativeMapReduceOper.getJobNumber()));
    js.setAlias(mr);

    return js;
}

From source file:org.apache.tez.mapreduce.hadoop.IDConverter.java

License:Apache License

public static JobID toMRJobId(TezDAGID dagId) {
    return new JobID(Long.toString(dagId.getApplicationId().getClusterTimestamp()),
            dagId.getApplicationId().getId());
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2Job.java

License:Open Source License

/**
 * @param jobId Job ID./* ww w.j a va  2 s . c o  m*/
 * @param jobInfo Job info.
 * @param log Logger.
 */
public GridHadoopV2Job(GridHadoopJobId jobId, final GridHadoopDefaultJobInfo jobInfo, GridLogger log) {
    assert jobId != null;
    assert jobInfo != null;

    this.jobId = jobId;
    this.jobInfo = jobInfo;

    hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());

    GridHadoopClassLoader clsLdr = (GridHadoopClassLoader) getClass().getClassLoader();

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(clsLdr);

    jobConf = new JobConf();

    GridHadoopFileSystemsUtils.setupFileSystems(jobConf);

    Thread.currentThread().setContextClassLoader(null);

    for (Map.Entry<String, String> e : jobInfo.properties().entrySet())
        jobConf.set(e.getKey(), e.getValue());

    jobCtx = new JobContextImpl(jobConf, hadoopJobID);

    rsrcMgr = new GridHadoopV2JobResourceManager(jobId, jobCtx, log);
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2TaskContext.java

License:Open Source License

/**
 * @param taskInfo Task info./*from  w ww  . j a  va2  s.c om*/
 * @param job Job.
 * @param jobId Job ID.
 * @param locNodeId Local node ID.
 * @param jobConfDataInput DataInput for read JobConf.
 */
public GridHadoopV2TaskContext(GridHadoopTaskInfo taskInfo, GridHadoopJob job, GridHadoopJobId jobId,
        @Nullable UUID locNodeId, DataInput jobConfDataInput) throws GridException {
    super(taskInfo, job);
    this.locNodeId = locNodeId;

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());

    try {
        JobConf jobConf = new JobConf();

        try {
            jobConf.readFields(jobConfDataInput);
        } catch (IOException e) {
            throw new GridException(e);
        }

        // For map-reduce jobs prefer local writes.
        jobConf.setBooleanIfUnset(PARAM_GGFS_PREFER_LOCAL_WRITES, true);

        jobCtx = new JobContextImpl(jobConf, new JobID(jobId.globalId().toString(), jobId.localId()));

        useNewMapper = jobConf.getUseNewMapper();
        useNewReducer = jobConf.getUseNewReducer();
        useNewCombiner = jobConf.getCombinerClass() == null;
    } finally {
        Thread.currentThread().setContextClassLoader(null);
    }
}