Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:com.datatorrent.apps.ApplicationTest.java

License:Apache License

private Configuration getConfig() {
    Configuration conf = new Configuration(false);
    conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-test.xml"));
    conf.set("dt.operator.fileOutput.prop.filePath", outputDir);
    topic = conf.get("dt.operator.kafkaInput.prop.topics");

    return conf;//from  w  ww . j a  v a  2s  .  com
}

From source file:com.datatorrent.bench.HDFSBenchmarkTest.java

@Test
public void testApplication() throws IOException, Exception {
    try {//ww  w .  j a  va  2  s. com
        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);
        conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
        lma.prepareDAG(new HDFSBenchmark(), conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(10000); // runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}

From source file:com.datatorrent.benchmark.CouchBaseBenchmarkTest.java

License:Open Source License

@Test
public void testCouchBaseAppOutput() throws Exception {
    Configuration conf = new Configuration();
    InputStream is = getClass().getResourceAsStream("/dt-site-couchbase.xml");
    conf.addResource(is);

    conf.get("dt.application.CouchBaseAppOutput.operator.couchbaseOutput.store.uriString");
    conf.get("dt.application.CouchBaseAppOutput.operator.couchbaseOutput.store.password");
    conf.get("dt.application.CouchBaseAppOutput.operator.couchbaseOutput.store.bucket");
    conf.get("dt.application.couchbaseAppOutput.operator.couchbaseOutput.store.max_tuples");
    conf.get("dt.application.couchbaseAppOutput.operator.couchbaseOutput.store.queueSize");
    conf.get("dt.application.couchbaseAppOutput.operator.couchbaseOutput.store.blocktime");
    conf.get("dt.application.couchbaseAppOutput.operator.couchbaseOutput.store.timeout");
    LocalMode lm = LocalMode.newInstance();

    try {//from   w ww . j  ava  2  s  .co  m
        lm.prepareDAG(new CouchBaseAppOutput(), conf);
        LocalMode.Controller lc = lm.getController();
        //lc.setHeartbeatMonitoringEnabled(false);
        lc.run(20000);
    } catch (Exception ex) {
        logger.info(ex.getCause());
    }
    is.close();
}

From source file:com.datatorrent.benchmark.CouchBaseBenchmarkTest.java

License:Open Source License

@Test
public void testCouchBaseAppInput() throws Exception {
    Configuration conf = new Configuration();
    InputStream is = getClass().getResourceAsStream("/dt-site-couchbase.xml");
    conf.addResource(is);
    conf.get("dt.application.CouchBaseAppInput.operator.couchbaseInput.store.uriString");
    conf.get("dt.application.CouchBaseAppInput.operator.couchbaseInput.store.blocktime");
    conf.get("dt.application.CouchBaseAppInput.operator.couchbaseInput.store.timeout");
    conf.get("dt.application.CouchBaseAppInput.operator.couchbaseInput.store.bucket");
    conf.get("dt.application.CouchBaseAppInput.operator.couchbaseInput.store.password");
    LocalMode lm = LocalMode.newInstance();

    try {/*from ww w  . j  av  a 2 s.com*/
        lm.prepareDAG(new CouchBaseAppInput(), conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(20000);
    } catch (Exception ex) {
        logger.info(ex.getCause());
    }
    is.close();
}

From source file:com.datatorrent.benchmark.hive.HiveInsertBenchmarkTest.java

License:Apache License

@Test
public void testMethod() throws SQLException {
    Configuration conf = new Configuration();
    InputStream inputStream = null;
    try {/*from w ww .  j a va 2s . c  o m*/
        inputStream = new FileInputStream("src/site/conf/dt-site-hive.xml");
    } catch (FileNotFoundException ex) {
        LOG.debug("Exception caught", ex);
    }
    conf.addResource(inputStream);

    LOG.debug("conf properties are {}", conf
            .get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.connectionProperties"));
    LOG.debug("conf dburl is {}",
            conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.dbUrl"));
    LOG.debug("conf filepath is {}",
            conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.filepath"));
    LOG.debug("maximum length is {}",
            conf.get("dt.application.HiveInsertBenchmarkingApp.operator.RollingFsWriter.maxLength"));
    LOG.debug("tablename is {}",
            conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.tablename"));
    LOG.debug("permission is {}",
            conf.get("dt.application.HiveInsertBenchmarkingApp.operator.RollingFsWriter.filePermission"));
    HiveInsertBenchmarkingApp app = new HiveInsertBenchmarkingApp();
    LocalMode lm = LocalMode.newInstance();
    try {
        lm.prepareDAG(app, conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(120000);
    } catch (Exception ex) {
        DTThrowable.rethrow(ex);
    }

    IOUtils.closeQuietly(inputStream);
}

From source file:com.datatorrent.benchmark.hive.HiveMapBenchmarkTest.java

License:Apache License

@Test
public void testMethod() throws SQLException {
    Configuration conf = new Configuration();
    InputStream inputStream = null;
    try {// ww w  . ja v  a  2 s .c o m
        inputStream = new FileInputStream("src/site/conf/dt-site-hive.xml");
    } catch (FileNotFoundException ex) {
        LOG.debug("Exception caught {}", ex);
    }
    conf.addResource(inputStream);
    LOG.debug("conf properties are {}", conf.get(
            "dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.connectionProperties"));
    LOG.debug("conf dburl is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.dbUrl"));
    LOG.debug("conf filepath is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.filepath"));
    LOG.debug("maximum length is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.RollingFsMapWriter.maxLength"));
    LOG.debug("tablename is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.tablename"));
    LOG.debug("permission is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.RollingFsMapWriter.filePermission"));
    LOG.debug("delimiter is {}",
            conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.RollingFsMapWriter.delimiter"));

    HiveMapInsertBenchmarkingApp app = new HiveMapInsertBenchmarkingApp();
    LocalMode lm = LocalMode.newInstance();
    try {
        lm.prepareDAG(app, conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(30000);
    } catch (Exception ex) {
        DTThrowable.rethrow(ex);
    }

    IOUtils.closeQuietly(inputStream);
}

From source file:com.datatorrent.benchmark.kafka.KafkaInputBenchmarkTest.java

License:Open Source License

@Test
public void testBenchmark() {
    Configuration conf = new Configuration();
    InputStream inputStream = getClass().getResourceAsStream("/dt-site-kafka.xml");
    conf.addResource(inputStream);

    LocalMode lma = LocalMode.newInstance();

    try {//from  w w w. java2  s. c om
        lma.prepareDAG(new KafkaInputBenchmark(), conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(30000);
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
}

From source file:com.datatorrent.benchmark.memsql.MemsqlInputBenchmarkTest.java

License:Open Source License

@Test
public void testMethod() throws SQLException, IOException {
    Configuration conf = new Configuration();
    InputStream inputStream = new FileInputStream("src/site/conf/dt-site-memsql.xml");
    conf.addResource(inputStream);

    MemsqlStore memsqlStore = new MemsqlStore();
    memsqlStore.setDbUrl(conf.get("dt.rootDbUrl"));
    memsqlStore.setConnectionProperties(conf.get(
            "dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.connectionProperties"));

    AbstractMemsqlOutputOperatorTest.memsqlInitializeDatabase(memsqlStore);

    MemsqlOutputOperator outputOperator = new MemsqlOutputOperator();
    outputOperator.getStore()/*  ww w  . j  a v a2 s.c om*/
            .setDbUrl(conf.get("dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.dbUrl"));
    outputOperator.getStore().setConnectionProperties(conf.get(
            "dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.connectionProperties"));
    outputOperator.setBatchSize(BATCH_SIZE);

    Random random = new Random();
    com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE);
    attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L);
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(
            OPERATOR_ID, attributeMap);

    long seedSize = conf.getLong("dt.seedSize", SEED_SIZE);

    outputOperator.setup(context);
    outputOperator.beginWindow(0);

    for (long valueCounter = 0; valueCounter < seedSize; valueCounter++) {
        outputOperator.input.put(random.nextInt());
    }

    outputOperator.endWindow();
    outputOperator.teardown();

    MemsqlInputBenchmark app = new MemsqlInputBenchmark();
    LocalMode lm = LocalMode.newInstance();

    try {
        lm.prepareDAG(app, conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(20000);
    } catch (Exception ex) {
        DTThrowable.rethrow(ex);
    }

    IOUtils.closeQuietly(inputStream);
}

From source file:com.datatorrent.benchmark.memsql.MemsqlOutputBenchmarkTest.java

License:Open Source License

@Test
public void testMethod() throws SQLException {
    Configuration conf = new Configuration();
    InputStream inputStream = getClass().getResourceAsStream("/dt-site-memsql.xml");
    conf.addResource(inputStream);

    MemsqlStore memsqlStore = new MemsqlStore();
    memsqlStore.setDbUrl(conf.get("dt.rootDbUrl"));
    memsqlStore.setConnectionProperties(conf.get(
            "dt.application.MemsqlOutputBenchmark.operator.memsqlOutputOperator.store.connectionProperties"));

    AbstractMemsqlOutputOperatorTest.memsqlInitializeDatabase(memsqlStore);

    MemsqlOutputBenchmark app = new MemsqlOutputBenchmark();
    LocalMode lm = LocalMode.newInstance();

    try {/*from  w  w  w .ja  v  a2 s.  c  o  m*/
        lm.prepareDAG(app, conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(20000);
    } catch (Exception ex) {
        DTThrowable.rethrow(ex);
    }

    IOUtils.closeQuietly(inputStream);
}

From source file:com.datatorrent.benchmark.testbench.EventClassifierAppTest.java

License:Open Source License

@Test
public void testEventClassifierApp() throws Exception {
    Logger logger = LoggerFactory.getLogger(EventClassifierAppTest.class);
    Configuration conf = new Configuration();
    LocalMode lm = LocalMode.newInstance();
    InputStream is = getClass().getResourceAsStream("/dt-site-testbench.xml");
    conf.addResource(is);
    conf.get("dt.application.EventClassifierApp.operator.hmapOper.keys");
    conf.get("dt.application.EventClassifierApp.operator.hmapOper.numKeys");
    try {//  w w  w.ja va 2 s  .co  m
        lm.prepareDAG(new EventClassifierApp(), conf);
        LocalMode.Controller lc = lm.getController();
        lc.run(20000);
    } catch (Exception ex) {
        logger.info(ex.getMessage());
    }
    is.close();
}