Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public Class<?> getClass(String name, Class<?> defaultValue) 

Source Link

Document

Get the value of the name property as a Class.

Usage

From source file:com.alibaba.wasp.LocalWaspCluster.java

License:Apache License

/**
 * Constructor./*  w  ww.jav  a2 s. c  o  m*/
 *
 * @param conf
 *          Configuration to use. Post construction has the master's address.
 * @param noMasters
 *          Count of masters to start.
 * @param noFServers
 *          Count of fservers to start.
 * @param masterClass
 * @param fserverClass
 * @throws java.io.IOException
 */
@SuppressWarnings("unchecked")
public LocalWaspCluster(final Configuration conf, final int noMasters, final int noFServers,
        final Class<? extends FMaster> masterClass, final Class<? extends FServer> fserverClass)
        throws IOException {
    this.conf = conf;
    // Always have masters and fservers come up on port '0' so we don't
    // clash over default ports.
    conf.set(FConstants.MASTER_PORT, "0");
    conf.set(FConstants.FSERVER_PORT, "0");
    this.masterClass = (Class<? extends FMaster>) conf.getClass(FConstants.MASTER_IMPL, masterClass);
    // Start the FMasters.
    for (int i = 0; i < noMasters; i++) {
        addMaster(new Configuration(conf), i);
    }

    // Wait for master active.
    try {
        Thread.sleep(3000);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }

    // Start the FServers.
    this.fserverClass = (Class<? extends FServer>) conf.getClass(FConstants.FSERVER_IMPL, fserverClass);

    for (int i = 0; i < noFServers; i++) {
        addFServer(new Configuration(conf), i);
    }
}

From source file:com.bah.culvert.data.index.Index.java

License:Apache License

private static DatabaseAdapter getDatabaseAdapter(Configuration conf) {
    try {/* w  ww . ja  va 2s  .  co m*/
        // get the database class
        Class<?> dbAdapterClass = conf.getClass(DATABASE_ADAPTER_CONF_KEY, null);
        Configuration databaseConf = getDatabaseConfiguration(conf);

        // create the adapter
        DatabaseAdapter db = DatabaseAdapter.class.cast(dbAdapterClass.newInstance());
        db.setConf(databaseConf);

        // and then make sure it is connected
        db.verify();
        return db;
    } catch (InstantiationException e) {
        throw new RuntimeException("Error instantiating a new DatabaseAdapter object.");
    } catch (IllegalAccessException e) {
        throw new RuntimeException("Error instantiating a new DatabaseAdapter object.");
    }
}

From source file:com.cloudera.branchreduce.impl.thrift.Client.java

License:Open Source License

@Override
public int handle(YarnClientService clientService) throws Exception {
    clientService.startAndWait();//from  ww w .  j  av  a  2  s.  c o  m
    if (!clientService.isRunning()) {
        LOG.error("BranchReduce job did not start, exiting...");
        return 1;
    }

    Lord.Client client = null;
    while (clientService.isRunning()) {
        ApplicationReport report = clientService.getApplicationReport();
        if (report.getYarnApplicationState() == YarnApplicationState.RUNNING) {
            String originalTrackingUrl = report.getOriginalTrackingUrl();
            if (originalTrackingUrl != null && originalTrackingUrl.contains(":")) {
                System.out.println("Original Tracking URL = " + originalTrackingUrl);
                String[] pieces = originalTrackingUrl.split(":");
                TSocket socket = new TSocket(pieces[0], Integer.valueOf(pieces[1]));
                TProtocol protocol = new TBinaryProtocol(socket);
                client = new Lord.Client(protocol);
                socket.open();
                break;
            }
        }
    }

    if (client == null) {
        LOG.error("Could not connect to thrift service to get status");
        return 1;
    }

    Configuration conf = clientService.getParameters().getConfiguration();
    Class<GlobalState> globalStatusClass = (Class<GlobalState>) conf
            .getClass(BranchReduceConfig.GLOBAL_STATE_CLASS, GlobalState.class);

    boolean finished = false;
    while (!clientService.isApplicationFinished()) {
        if (!finished) {
            GlobalStatusResponse resp = client.getGlobalStatus(new GlobalStatusRequest());
            this.value = Writables.fromByteBuffer(resp.bufferForGlobalState(), globalStatusClass);
            if (resp.isFinished()) {
                LOG.info("Job finished running.");
                finished = true;
            }
            LOG.info(value);
        }
        Thread.sleep(1000);
    }

    clientService.stopAndWait();
    ApplicationReport report = clientService.getFinalReport();
    if (report.getFinalApplicationStatus() == FinalApplicationStatus.SUCCEEDED) {
        System.out.println("Job complete.");
        System.out.println(value);
        return 0;
    } else {
        System.out.println("Final app state: " + report.getFinalApplicationStatus());
        System.out.println("Last global state:");
        System.out.println(value);
        return 1;
    }
}

From source file:com.cloudera.crunch.type.avro.Avros.java

License:Open Source License

public static ReflectDataFactory getReflectDataFactory(Configuration conf) {
    return (ReflectDataFactory) ReflectionUtils
            .newInstance(conf.getClass(REFLECT_DATA_FACTORY_CLASS, ReflectDataFactory.class), conf);
}

From source file:com.cloudera.crunch.type.avro.SafeAvroSerialization.java

License:Apache License

/** Returns the specified map output deserializer.  Defaults to the final
 * output deserializer if no map output schema was specified. */
public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
    boolean isKey = AvroKey.class.isAssignableFrom(c);
    Configuration conf = getConf();
    Schema schema = isKey ? Pair.getKeySchema(AvroJob.getMapOutputSchema(conf))
            : Pair.getValueSchema(AvroJob.getMapOutputSchema(conf));

    ReflectDataFactory factory = (ReflectDataFactory) ReflectionUtils
            .newInstance(conf.getClass("crunch.reflectdatafactory", ReflectDataFactory.class), conf);
    DatumReader<T> datumReader = factory.getReader(schema);
    return new AvroWrapperDeserializer(datumReader, isKey);
}

From source file:com.cloudera.hadoop.hdfs.nfs.nfs4.FileHandleStore.java

License:Apache License

public static FileHandleStore get(Configuration conf) {
    FileHandleStore fileHandleStore;/*from  ww w.j av  a 2 s .c  o  m*/
    fileHandleStore = ReflectionUtils
            .newInstance(conf.getClass(NFS_FILEHANDLE_STORE_CLASS, WritableFileFileHandleStore.class)
                    .asSubclass(FileHandleStore.class), conf);
    try {
        fileHandleStore.initialize();
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
    return fileHandleStore;
}

From source file:com.cloudera.sqoop.mapreduce.MergeTextMapper.java

License:Apache License

@Override
protected void setup(Context c) throws IOException, InterruptedException {
    Configuration conf = c.getConfiguration();

    Class<? extends SqoopRecord> recordClass = (Class<? extends SqoopRecord>) conf
            .getClass(MergeJob.MERGE_SQOOP_RECORD_KEY, SqoopRecord.class);
    this.record = ReflectionUtils.newInstance(recordClass, conf);

    super.setup(c);
}

From source file:com.cloudera.sqoop.testutil.InjectableConnManager.java

License:Apache License

/**
 * Allow the user to inject custom mapper, input, and output formats
 * into the importTable() process.//w w w  .j av  a 2  s .co m
 */
@Override
@SuppressWarnings("unchecked")
public void importTable(ImportJobContext context) throws IOException, ImportException {

    SqoopOptions options = context.getOptions();
    Configuration conf = options.getConf();

    Class<? extends Mapper> mapperClass = (Class<? extends Mapper>) conf.getClass(MAPPER_KEY, Mapper.class);
    Class<? extends InputFormat> ifClass = (Class<? extends InputFormat>) conf.getClass(INPUT_FORMAT_KEY,
            TextInputFormat.class);
    Class<? extends OutputFormat> ofClass = (Class<? extends OutputFormat>) conf.getClass(OUTPUT_FORMAT_KEY,
            TextOutputFormat.class);

    Class<? extends ImportJobBase> jobClass = (Class<? extends ImportJobBase>) conf.getClass(IMPORT_JOB_KEY,
            ImportJobBase.class);

    String tableName = context.getTableName();

    // Instantiate the user's chosen ImportJobBase instance.
    ImportJobBase importJob = ReflectionUtils.newInstance(jobClass, conf);

    // And configure the dependencies to inject
    importJob.setOptions(options);
    importJob.setMapperClass(mapperClass);
    importJob.setInputFormatClass(ifClass);
    importJob.setOutputFormatClass(ofClass);

    importJob.runImport(tableName, context.getJarFile(), getSplitColumn(options, tableName), conf);
}

From source file:com.codefollower.lealone.test.start.AnotherHRegionServerStarter.java

License:Apache License

public static void main(String[] args) throws Exception {
    if (args.length == 0)
        args = new String[] { "start" };

    Configuration conf = HBaseConfiguration.create();
    @SuppressWarnings("unchecked")
    Class<? extends HRegionServer> regionServerClass = (Class<? extends HRegionServer>) conf
            .getClass(HConstants.REGION_SERVER_IMPL, HRegionServer.class);
    int hbaseRegionServerPort = 60020;
    int hbaseRegionServerInfoPort = 60030;
    int lealoneRegionServerTcpPort = 9092;

    //        new Thread(new MyHRegionServerCommandLine(regionServerClass, conf, args, hbaseRegionServerPort,
    //                hbaseRegionServerInfoPort, lealoneRegionServerTcpPort)).start();

    hbaseRegionServerPort = 60021;//w ww  .j a va  2 s .  c  o m
    hbaseRegionServerInfoPort = 60031;
    lealoneRegionServerTcpPort = 9093;

    new Thread(new MyHRegionServerCommandLine(regionServerClass, conf, args, hbaseRegionServerPort,
            hbaseRegionServerInfoPort, lealoneRegionServerTcpPort)).start();
}

From source file:com.codefollower.yourbase.test.start.AnotherHRegionServerStarter.java

License:Apache License

public static void main(String[] args) throws Exception {
    if (args.length == 0)
        args = new String[] { "start" };

    Configuration conf = HBaseConfiguration.create();
    @SuppressWarnings("unchecked")
    Class<? extends HRegionServer> regionServerClass = (Class<? extends HRegionServer>) conf
            .getClass(HConstants.REGION_SERVER_IMPL, HRegionServer.class);
    int hbaseRegionServerPort = 60020;
    int hbaseRegionServerInfoPort = 60030;
    int yourbaseRegionServerTcpPort = 9092;

    //        new Thread(new MyHRegionServerCommandLine(regionServerClass, conf, args, hbaseRegionServerPort,
    //                hbaseRegionServerInfoPort, yourbaseRegionServerTcpPort)).start();

    hbaseRegionServerPort = 60021;//from  w w  w  .  j a va2s. co m
    hbaseRegionServerInfoPort = 60031;
    yourbaseRegionServerTcpPort = 9093;

    new Thread(new MyHRegionServerCommandLine(regionServerClass, conf, args, hbaseRegionServerPort,
            hbaseRegionServerInfoPort, yourbaseRegionServerTcpPort)).start();
}