Example usage for org.apache.hadoop.conf Configuration setBoolean

List of usage examples for org.apache.hadoop.conf Configuration setBoolean

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setBoolean.

Prototype

public void setBoolean(String name, boolean value) 

Source Link

Document

Set the value of the name property to a boolean.

Usage

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static void setBSONWriteSplits(final Configuration conf, final boolean val) {
    conf.setBoolean(BSON_WRITE_SPLITS, val);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static void setBSONOutputBuildSplits(final Configuration conf, final boolean val) {
    conf.setBoolean(BSON_OUTPUT_BUILDSPLITS, val);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static Configuration buildConfiguration(final Map<String, Object> data) {
    Configuration newConf = new Configuration();
    for (Map.Entry<String, Object> entry : data.entrySet()) {
        String key = entry.getKey();
        Object val = entry.getValue();
        if (val instanceof String) {
            newConf.set(key, (String) val);
        } else if (val instanceof Boolean) {
            newConf.setBoolean(key, (Boolean) val);
        } else if (val instanceof Integer) {
            newConf.setInt(key, (Integer) val);
        } else if (val instanceof Float) {
            newConf.setFloat(key, (Float) val);
        } else if (val instanceof DBObject) {
            setDBObject(newConf, key, (DBObject) val);
        } else {//from w w  w.j a  v a 2s  . c  o m
            throw new RuntimeException("can't convert " + val.getClass() + " into any type for Configuration");
        }
    }
    return newConf;
}

From source file:com.moz.fiji.hadoop.configurator.TestHadoopConfigurator.java

License:Apache License

@Test
public void testConfigure() {
    Configuration conf = new Configuration();
    conf.setBoolean("my.boolean.value", true);
    conf.setFloat("my.float.value", 3.1f);
    conf.setFloat("my.double.value", 1.9f);
    conf.setInt("my.int.value", 12);
    conf.set("my.string.value", "bar");
    conf.setStrings("my.string.collection", "apple", "banana");
    conf.setStrings("my.string.array", "red", "green", "blue");
    conf.setBoolean("your.boolean.value", true);
    conf.setFloat("your.float.value", 1.0f);
    conf.setFloat("your.double.value", 2.0f);
    conf.setInt("your.int.value", 1);
    conf.setLong("your.long.value", 2L);
    conf.set("your.string.value", "asdf");

    MyConfiguredClass instance = ReflectionUtils.newInstance(MyConfiguredClass.class, conf);
    assertEquals(true, instance.getBooleanValue());
    assertEquals(3.1f, instance.getFloatValue(), 1e-6f);
    assertEquals(1.9, instance.getDoubleValue(), 1e-6);
    assertEquals(12, instance.getIntValue());
    assertEquals(456L, instance.getLongValue());
    assertEquals("bar", instance.getStringValue());
    assertEquals(true, instance.getYourBoolean());
    assertEquals(1.0f, instance.getYourFloat(), 1e-6f);
    assertEquals(2.0, instance.getYourDouble(), 1e-6);
    assertEquals(1, instance.getYourInt());
    assertEquals(2L, instance.getYourLong());
}

From source file:com.moz.fiji.mapreduce.output.DirectFijiTableMapReduceJobOutput.java

License:Apache License

/** {@inheritDoc} */
@Override/*from w ww .j  a v a  2s. c o  m*/
public void configure(Job job) throws IOException {
    // sets Hadoop output format, Fiji output table and # of reducers:
    super.configure(job);

    final Configuration conf = job.getConfiguration();

    // Fiji table context:
    conf.setClass(FijiConfKeys.FIJI_TABLE_CONTEXT_CLASS, DirectFijiTableWriterContext.class,
            FijiTableContext.class);

    // Since there's no "commit" operation for an entire map task writing to a
    // Fiji table, do not use speculative execution when writing directly to a Fiji table.
    conf.setBoolean("mapred.map.tasks.speculative.execution", false);
}

From source file:com.netflix.bdp.s3mper.listing.BigTableGcsConsistentListingAspectTest.java

License:Apache License

@BeforeClass
public static void setUpClass() throws Exception {
    final String runId = Integer.toHexString(new Random().nextInt());

    for (final String envVar : asList(GOOGLE_APPLICATION_CREDENTIALS)) {
        if (isNullOrEmpty(System.getenv(envVar))) {
            fail("Required environment variable " + envVar + " is not defined");
        }/*  ww  w. j  a va2s .c om*/
    }

    conf = new Configuration();

    conf.set("fs.gs.project.id", "steel-ridge-91615");
    conf.set("fs.gs.impl", "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem");
    conf.set("fs.AbstractFileSystem.gs.impl", "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS");
    conf.set("google.cloud.auth.service.account.json.keyfile", System.getenv(GOOGLE_APPLICATION_CREDENTIALS));

    conf.setBoolean("s3mper.disable", false);
    conf.setBoolean("s3mper.failOnError", true);
    conf.setBoolean("s3mper.metastore.deleteMarker.enabled", true);
    conf.setBoolean("s3mper.reporting.disabled", true);
    conf.setLong("s3mper.listing.recheck.count", 10);
    conf.setLong("s3mper.listing.recheck.period", 1000);
    conf.setFloat("s3mper.listing.threshold", 1);
    conf.set("s3mper.metastore.name", "ConsistentListingMetastoreTest-" + runId);
    conf.setBoolean("s3mper.metastore.create", true);

    testBucket = System.getProperty("fs.test.bucket", "gs://rohan-test/");
    testPath = new Path(testBucket, System.getProperty("fs.test.path", "/test-" + runId));

    markerFs = FileSystem.get(testPath.toUri(), conf);

    Configuration deleteConf = new Configuration(conf);
    deleteConf.setBoolean("s3mper.metastore.deleteMarker.enabled", false);
    deleteFs = FileSystem.get(testPath.toUri(), deleteConf);

    meta = new BigTableMetastore();
    meta.initalize(testPath.toUri(), conf);

    Configuration janitorConf = new Configuration(conf);
    janitorConf.setBoolean("s3mper.metastore.deleteMarker.enabled", false);
}

From source file:com.netflix.bdp.s3mper.listing.ConsistentListingAspectTest.java

License:Apache License

@BeforeClass
public static void setUpClass() throws Exception {
    final String runId = Integer.toHexString(new Random().nextInt());

    for (final String envVar : asList(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)) {
        if (isNullOrEmpty(System.getenv(envVar))) {
            fail("Required environment variable " + envVar + " is not defined");
        }//w ww  . j  a va2  s . c o m
    }

    conf = new Configuration();

    conf.set("fs.gs.awsAccessKeyId", System.getenv(AWS_ACCESS_KEY_ID));
    conf.set("fs.gs.awsSecretAccessKey", System.getenv(AWS_SECRET_ACCESS_KEY));

    conf.set("fs.gs.project.id", "steel-ridge-91615");
    conf.set("fs.gs.impl", "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem");
    conf.set("fs.AbstractFileSystem.gs.impl", "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS");
    conf.set("google.cloud.auth.service.account.json.keyfile", System.getenv(GOOGLE_APPLICATION_CREDENTIALS));

    conf.setBoolean("s3mper.disable", false);
    conf.setBoolean("s3mper.failOnError", true);
    conf.setBoolean("s3mper.metastore.deleteMarker.enabled", true);
    conf.setBoolean("s3mper.reporting.disabled", true);
    conf.setLong("s3mper.listing.recheck.count", 10);
    conf.setLong("s3mper.listing.recheck.period", 1000);
    conf.setFloat("s3mper.listing.threshold", 1);
    conf.set("s3mper.metastore.name", "ConsistentListingMetastoreTest-" + runId);
    conf.set("s3mper.metastore.impl", "com.netflix.bdp.s3mper.metastore.impl.DynamoDBMetastore");
    conf.setBoolean("s3mper.metastore.create", true);

    testPath = new Path(System.getProperty("fs.test.path", "gs://rohan-test/test-" + runId));

    markerFs = FileSystem.get(testPath.toUri(), conf);

    Configuration deleteConf = new Configuration(conf);
    deleteConf.setBoolean("s3mper.metastore.deleteMarker.enabled", false);
    deleteFs = FileSystem.get(testPath.toUri(), deleteConf);

    meta = new DynamoDBMetastore();
    meta.initalize(testPath.toUri(), conf);

    while (true) {
        try {
            meta.list(asList(testPath));
            break;
        } catch (Exception e) {
            if ((e instanceof ResourceNotFoundException)
                    || (e.getCause() instanceof ResourceNotFoundException)) {
                Thread.sleep(1000);
            } else {
                throw e;
            }
        }
    }

    alert = new CloudWatchAlertDispatcher();
    alert.init(testPath.toUri(), conf);

    Configuration janitorConf = new Configuration(conf);
    janitorConf.setBoolean("s3mper.metastore.deleteMarker.enabled", false);

    janitor = new MetastoreJanitor();
    janitor.initalize(testPath.toUri(), janitorConf);
}

From source file:com.netflix.bdp.s3mper.listing.DarkloadTest.java

License:Apache License

@Test(expected = FileNotFoundException.class)
public void testDarkloadingDisabled() throws Throwable {
    Configuration noDarkloadConf = new Configuration(conf);
    noDarkloadConf.setBoolean("s3mper.failOnError", true);
    noDarkloadConf.setBoolean("s3mper.darkload", false);
    FileSystem noDarkloadFs = FileSystem.get(testPath.toUri(), noDarkloadConf);
    Path path = new Path(testPath + "/test");
    meta.add(path, false);/*from w  w  w . j a v  a  2s .  co  m*/
    noDarkloadFs.listStatus(path.getParent());
}

From source file:com.ngdata.hbaseindexer.mr.HBaseMapReduceIndexerTool.java

License:Apache License

public int run(HBaseIndexingOptions hbaseIndexingOpts, JobProcessCallback callback) throws Exception {

    if (hbaseIndexingOpts.isDryRun) {
        return new IndexerDryRun(hbaseIndexingOpts, getConf(), System.out).run();
    }/* w  w  w.j  ava  2 s  .co m*/

    long programStartTime = System.currentTimeMillis();
    Configuration conf = getConf();

    IndexingSpecification indexingSpec = hbaseIndexingOpts.getIndexingSpecification();

    conf.set(HBaseIndexerMapper.INDEX_COMPONENT_FACTORY_KEY, indexingSpec.getIndexerComponentFactory());
    conf.set(HBaseIndexerMapper.INDEX_CONFIGURATION_CONF_KEY,
            new String(indexingSpec.getConfiguration(), Charsets.UTF_8));
    conf.set(HBaseIndexerMapper.INDEX_NAME_CONF_KEY, indexingSpec.getIndexerName());
    conf.set(HBaseIndexerMapper.TABLE_NAME_CONF_KEY, indexingSpec.getTableName());
    HBaseIndexerMapper.configureIndexConnectionParams(conf, indexingSpec.getIndexConnectionParams());

    IndexerComponentFactory factory = IndexerComponentFactoryUtil.getComponentFactory(
            indexingSpec.getIndexerComponentFactory(),
            new ByteArrayInputStream(indexingSpec.getConfiguration()), indexingSpec.getIndexConnectionParams());
    IndexerConf indexerConf = factory.createIndexerConf();

    Map<String, String> params = indexerConf.getGlobalParams();
    String morphlineFile = params.get(MorphlineResultToSolrMapper.MORPHLINE_FILE_PARAM);
    if (hbaseIndexingOpts.morphlineFile != null) {
        morphlineFile = hbaseIndexingOpts.morphlineFile.getPath();
    }
    if (morphlineFile != null) {
        conf.set(MorphlineResultToSolrMapper.MORPHLINE_FILE_PARAM, new File(morphlineFile).getName());
        ForkedMapReduceIndexerTool.addDistributedCacheFile(new File(morphlineFile), conf);
    }

    String morphlineId = params.get(MorphlineResultToSolrMapper.MORPHLINE_ID_PARAM);
    if (hbaseIndexingOpts.morphlineId != null) {
        morphlineId = hbaseIndexingOpts.morphlineId;
    }
    if (morphlineId != null) {
        conf.set(MorphlineResultToSolrMapper.MORPHLINE_ID_PARAM, morphlineId);
    }

    conf.setBoolean(HBaseIndexerMapper.INDEX_DIRECT_WRITE_CONF_KEY, hbaseIndexingOpts.isDirectWrite());

    if (hbaseIndexingOpts.fairSchedulerPool != null) {
        conf.set("mapred.fairscheduler.pool", hbaseIndexingOpts.fairSchedulerPool);
    }

    // switch off a false warning about allegedly not implementing Tool
    // also see http://hadoop.6.n7.nabble.com/GenericOptionsParser-warning-td8103.html
    // also see https://issues.apache.org/jira/browse/HADOOP-8183
    getConf().setBoolean("mapred.used.genericoptionsparser", true);

    if (hbaseIndexingOpts.log4jConfigFile != null) {
        Utils.setLogConfigFile(hbaseIndexingOpts.log4jConfigFile, getConf());
        ForkedMapReduceIndexerTool.addDistributedCacheFile(hbaseIndexingOpts.log4jConfigFile, conf);
    }

    Job job = Job.getInstance(getConf());
    job.setJobName(getClass().getSimpleName() + "/" + HBaseIndexerMapper.class.getSimpleName());
    job.setJarByClass(HBaseIndexerMapper.class);
    //        job.setUserClassesTakesPrecedence(true);

    TableMapReduceUtil.initTableMapperJob(hbaseIndexingOpts.getScans(), HBaseIndexerMapper.class, Text.class,
            SolrInputDocumentWritable.class, job);

    // explicitely set hbase configuration on the job because the TableMapReduceUtil overwrites it with the hbase defaults
    // (see HBASE-4297 which is not really fixed in hbase 0.94.6 on all code paths)
    HBaseConfiguration.merge(job.getConfiguration(), getConf());

    int mappers = new JobClient(job.getConfiguration()).getClusterStatus().getMaxMapTasks(); // MR1
    //mappers = job.getCluster().getClusterStatus().getMapSlotCapacity(); // Yarn only
    LOG.info("Cluster reports {} mapper slots", mappers);

    LOG.info("Using these parameters: " + "reducers: {}, shards: {}, fanout: {}, maxSegments: {}",
            new Object[] { hbaseIndexingOpts.reducers, hbaseIndexingOpts.shards, hbaseIndexingOpts.fanout,
                    hbaseIndexingOpts.maxSegments });

    if (hbaseIndexingOpts.isDirectWrite()) {
        CloudSolrServer solrServer = new CloudSolrServer(hbaseIndexingOpts.zkHost);
        solrServer.setDefaultCollection(hbaseIndexingOpts.collection);

        if (hbaseIndexingOpts.clearIndex) {
            clearSolr(indexingSpec.getIndexConnectionParams());
        }

        // Run a mapper-only MR job that sends index documents directly to a live Solr instance.
        job.setOutputFormatClass(NullOutputFormat.class);
        job.setNumReduceTasks(0);
        job.submit();
        callback.jobStarted(job.getJobID().toString(), job.getTrackingURL());
        if (!ForkedMapReduceIndexerTool.waitForCompletion(job, hbaseIndexingOpts.isVerbose)) {
            return -1; // job failed
        }
        commitSolr(indexingSpec.getIndexConnectionParams());
        ForkedMapReduceIndexerTool.goodbye(job, programStartTime);
        return 0;
    } else {
        FileSystem fileSystem = FileSystem.get(getConf());

        if (fileSystem.exists(hbaseIndexingOpts.outputDir)) {
            if (hbaseIndexingOpts.overwriteOutputDir) {
                LOG.info("Removing existing output directory {}", hbaseIndexingOpts.outputDir);
                if (!fileSystem.delete(hbaseIndexingOpts.outputDir, true)) {
                    LOG.error("Deleting output directory '{}' failed", hbaseIndexingOpts.outputDir);
                    return -1;
                }
            } else {
                LOG.error("Output directory '{}' already exists. Run with --overwrite-output-dir to "
                        + "overwrite it, or remove it manually", hbaseIndexingOpts.outputDir);
                return -1;
            }
        }

        int exitCode = ForkedMapReduceIndexerTool.runIndexingPipeline(job, callback, getConf(),
                hbaseIndexingOpts.asOptions(), programStartTime, fileSystem, null, -1, // File-based parameters
                -1, // num mappers, only of importance for file-based indexing
                hbaseIndexingOpts.reducers);

        if (hbaseIndexingOpts.isGeneratedOutputDir()) {
            LOG.info("Deleting generated output directory " + hbaseIndexingOpts.outputDir);
            fileSystem.delete(hbaseIndexingOpts.outputDir, true);
        }
        return exitCode;
    }
}

From source file:com.ngdata.sep.demo.LoggingConsumer.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.replication", true);

    ZooKeeperItf zk = ZkUtil.connect("localhost", 20000);
    SepModel sepModel = new SepModelImpl(zk, conf);

    final String subscriptionName = "logger";

    if (!sepModel.hasSubscription(subscriptionName)) {
        sepModel.addSubscriptionSilent(subscriptionName);
    }//from   w  ww  .  j a v  a2  s  .c  o  m

    PayloadExtractor payloadExtractor = new BasePayloadExtractor(Bytes.toBytes("sep-user-demo"),
            Bytes.toBytes("info"), Bytes.toBytes("payload"));

    SepConsumer sepConsumer = new SepConsumer(subscriptionName, 0, new EventLogger(), 1, "localhost", zk, conf,
            payloadExtractor);

    sepConsumer.start();
    System.out.println("Started");

    while (true) {
        Thread.sleep(Long.MAX_VALUE);
    }
}