Example usage for org.apache.hadoop.conf Configuration setBoolean

List of usage examples for org.apache.hadoop.conf Configuration setBoolean

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setBoolean.

Prototype

public void setBoolean(String name, boolean value) 

Source Link

Document

Set the value of the name property to a boolean.

Usage

From source file:io.prestosql.plugin.hive.s3.PrestoS3ConfigurationUpdater.java

License:Apache License

@Override
public void updateConfiguration(Configuration config) {
    // re-map filesystem schemes to match Amazon Elastic MapReduce
    config.set("fs.s3.impl", PrestoS3FileSystem.class.getName());
    config.set("fs.s3a.impl", PrestoS3FileSystem.class.getName());
    config.set("fs.s3n.impl", PrestoS3FileSystem.class.getName());

    if (awsAccessKey != null) {
        config.set(S3_ACCESS_KEY, awsAccessKey);
    }//  www.  j av a2  s  . c  o m
    if (awsSecretKey != null) {
        config.set(S3_SECRET_KEY, awsSecretKey);
    }
    if (endpoint != null) {
        config.set(S3_ENDPOINT, endpoint);
    }
    if (signerType != null) {
        config.set(S3_SIGNER_TYPE, signerType.name());
    }
    config.setBoolean(S3_PATH_STYLE_ACCESS, pathStyleAccess);
    config.setBoolean(S3_USE_INSTANCE_CREDENTIALS, useInstanceCredentials);
    config.setBoolean(S3_SSL_ENABLED, sslEnabled);
    config.setBoolean(S3_SSE_ENABLED, sseEnabled);
    config.set(S3_SSE_TYPE, sseType.name());
    if (encryptionMaterialsProvider != null) {
        config.set(S3_ENCRYPTION_MATERIALS_PROVIDER, encryptionMaterialsProvider);
    }
    if (kmsKeyId != null) {
        config.set(S3_KMS_KEY_ID, kmsKeyId);
    }
    if (sseKmsKeyId != null) {
        config.set(S3_SSE_KMS_KEY_ID, sseKmsKeyId);
    }
    config.setInt(S3_MAX_CLIENT_RETRIES, maxClientRetries);
    config.setInt(S3_MAX_ERROR_RETRIES, maxErrorRetries);
    config.set(S3_MAX_BACKOFF_TIME, maxBackoffTime.toString());
    config.set(S3_MAX_RETRY_TIME, maxRetryTime.toString());
    config.set(S3_CONNECT_TIMEOUT, connectTimeout.toString());
    config.set(S3_SOCKET_TIMEOUT, socketTimeout.toString());
    config.set(S3_STAGING_DIRECTORY, stagingDirectory.toString());
    config.setInt(S3_MAX_CONNECTIONS, maxConnections);
    config.setLong(S3_MULTIPART_MIN_FILE_SIZE, multipartMinFileSize.toBytes());
    config.setLong(S3_MULTIPART_MIN_PART_SIZE, multipartMinPartSize.toBytes());
    config.setBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, pinClientToCurrentRegion);
    config.set(S3_USER_AGENT_PREFIX, userAgentPrefix);
    config.set(S3_ACL_TYPE, aclType.name());
}

From source file:io.svectors.hbase.cdc.BaseTest.java

License:Apache License

@Before
public void setUp() throws Exception {
    final Configuration hbaseConf = HBaseConfiguration.create();
    hbaseConf.setInt("replication.stats.thread.period.seconds", 5);
    hbaseConf.setLong("replication.sleep.before.failover", 2000);
    hbaseConf.setInt("replication.source.maxretriesmultiplier", 10);
    hbaseConf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true);

    // add kafka properties. we prefix each property with kafka
    addKafkaProperties(hbaseConf);//from  w ww .j  a va  2  s  .  c om

    utility = new HBaseTestingUtility(hbaseConf);
    utility.startMiniCluster();
    numRegionServers = utility.getHBaseCluster().getRegionServerThreads().size();

    // setup kafka
    kafkaServer = new KafkaServer(utility.getZkCluster().getClientPort(), 9092);

}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

CommandLine parse(Configuration conf, String[] args) throws IOException, ParseException {
    Parser parser = new BasicParser();
    conf.setBoolean("mapreduce.client.genericoptionsparser.used", true);
    GenericOptionsParser genericParser = new GenericOptionsParser(conf, args);
    return parser.parse(options, genericParser.getRemainingArgs());
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

/**
 * Set whether the job is using a Java RecordReader.
 * @param conf the configuration to modify
 * @param value the new value/*  ww w . j  av  a  2s.c  om*/
 */
public static void setIsJavaRecordReader(Configuration conf, boolean value) {
    conf.setBoolean(Submitter.IS_JAVA_RR, value);
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

/**
 * Set whether the Mapper is written in Java.
 * @param conf the configuration to modify
 * @param value the new value//from  ww w . ja  v  a  2 s. c  o m
 */
public static void setIsJavaMapper(Configuration conf, boolean value) {
    conf.setBoolean(Submitter.IS_JAVA_MAP, value);
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

/**
 * Set whether the Reducer is written in Java.
 * @param conf the configuration to modify
 * @param value the new value//from w  ww . j a  va 2 s  .com
 */
public static void setIsJavaReducer(Configuration conf, boolean value) {
    conf.setBoolean(Submitter.IS_JAVA_REDUCE, value);
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

/**
 * Set whether the job will use a Java RecordWriter.
 * @param conf the configuration to modify
 * @param value the new value to set/*from www . j ava 2 s  .com*/
 */
public static void setIsJavaRecordWriter(Configuration conf, boolean value) {
    conf.setBoolean(Submitter.IS_JAVA_RW, value);
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java

License:Apache License

/**
 * Set whether to keep the command file for debugging
 * @param conf the configuration to modify
 * @param keep the new value/*ww  w.  java 2  s .c om*/
 */
public static void setKeepCommandFile(Configuration conf, boolean keep) {
    conf.setBoolean(Submitter.PRESERVE_COMMANDFILE, keep);
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception//from  w  w w . j a v a  2s.  c  om
 */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(Submitter.IS_JAVA_RR, "true");
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        job.setInputFormatClass(DummyInputFormat.class);
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);

        InputSplit isplit = isplits.get(0);

        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordReader<FloatWritable, NullWritable> rReader = input_format.createRecordReader(isplit, tcontext);

        TestMapContext context = new TestMapContext(conf, taskAttemptid, rReader, null, null, null, isplit);
        // stub for client
        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        PipesMapper<FloatWritable, NullWritable, IntWritable, Text> mapper = new PipesMapper<FloatWritable, NullWritable, IntWritable, Text>(
                context);

        initStdOut(conf);
        mapper.run(context);
        String stdOut = readStdOut(conf);

        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.Application
 * test a internal functions: /* w ww. j  a  v  a  2 s .c  o m*/
 *     MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
 *
 * @throws Throwable
 */

@Test
public void testApplication() throws Throwable {

    System.err.println("testApplication");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationStub");
        //getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        DummyRecordReader reader = (DummyRecordReader) input_format.createRecordReader(isplit, tcontext);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        MapContextImpl<IntWritable, Text, IntWritable, Text> context = new MapContextImpl<IntWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, null, writer, null, reporter, null);

        System.err.println("ready to launch application");
        Application<IntWritable, Text, IntWritable, Text> application = new Application<IntWritable, Text, IntWritable, Text>(
                context, reader);
        System.err.println("done");

        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();

        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));

        assertEquals(0.0, context.getProgress(), 0.01);
        assertNotNull(context.getCounter("group", "name"));

        // test status MessageType.STATUS
        assertEquals(context.getStatus(), "PROGRESS");
        // check MessageType.PROGRESS
        assertEquals(0.55f, reader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        assertTrue(stdOut.contains("key:123"));
        assertTrue(stdOut.contains("value:value"));
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}