Example usage for org.apache.hadoop.conf Configuration clear

List of usage examples for org.apache.hadoop.conf Configuration clear

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration clear.

Prototype

public void clear() 

Source Link

Document

Clears all keys from the configuration.

Usage

From source file:com.cloudera.recordservice.mapreduce.MapReduceTest.java

License:Apache License

@Test
public void testGetSplits() throws IOException {
    Configuration config = new Configuration();

    boolean exceptionThrown = false;
    try {/* ww w  .  j a  va 2 s. c  o  m*/
        PlanUtil.getSplits(config, new Credentials());
    } catch (IllegalArgumentException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("No input specified"));
    }
    assertTrue(exceptionThrown);

    // Set db/table and make sure it works.
    config.set(ConfVars.TBL_NAME_CONF.name, "tpch.nation");
    PlanUtil.getSplits(config, new Credentials());

    // Also set input. This should fail.
    config.set(FileInputFormat.INPUT_DIR, "/test");
    exceptionThrown = false;
    try {
        PlanUtil.getSplits(config, new Credentials());
    } catch (IllegalArgumentException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage(), e.getMessage().contains("More than one input specified"));
    }
    assertTrue(exceptionThrown);

    // Unset the table and set columns. INPUT_DIR and columns don't work now.
    config.unset(ConfVars.TBL_NAME_CONF.name);
    config.setStrings(ConfVars.COL_NAMES_CONF.name, "a");
    exceptionThrown = false;
    try {
        PlanUtil.getSplits(config, new Credentials());
    } catch (IllegalArgumentException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("Column projections can only be specified with table inputs."));
    }
    assertTrue(exceptionThrown);

    // Test some cases that work
    verifyInputSplitsTable(1, 4, "tpch.nation");
    verifyInputSplitsTable(2, 12, "rs.alltypes");
    verifyInputSplitsTable(1, 1, "tpch.nation", "n_name");
    verifyInputSplitsTable(2, 3, "rs.alltypes", "int_col", "double_col", "string_col");
    verifyInputSplitsPath(1, 1, "/test-warehouse/tpch.nation");

    // Test some cases using the config utility.
    config.clear();
    RecordServiceConfig.setInputTable(config, null, "tpch.nation", "n_nationkey", "n_comment");
    verifyInputSplits(1, 2, config);

    exceptionThrown = false;
    try {
        verifyInputSplitsTable(1, 1, "tpch.nation", "bad");
    } catch (IOException e) {
        exceptionThrown = true;
        assertTrue(e.getCause() instanceof RecordServiceException);
        RecordServiceException ex = (RecordServiceException) e.getCause();
        assertEquals(RecordServiceException.ErrorCode.INVALID_REQUEST, ex.code);
    }
    assertTrue(exceptionThrown);

    exceptionThrown = false;
    try {
        verifyInputSplitsPath(1, 1, "/test-warehouse/tpch.nation,/test-warehouse/tpch.nation");
    } catch (IllegalArgumentException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("Only reading a single directory is currently supported."));
    }
    assertTrue(exceptionThrown);
}

From source file:com.cloudera.recordservice.mapreduce.MapReduceTest.java

License:Apache License

@Test
// TODO: make this generic. This should be extensible to test all the input
// formats we support. How do we do this?
public void testReadNation() throws IOException, InterruptedException {
    Configuration config = new Configuration();
    RecordServiceInputFormat.RecordServiceRecordReader reader = new RecordServiceInputFormat.RecordServiceRecordReader();

    try {/*from   w  ww  . jav a 2 s  . c o m*/
        RecordServiceConfig.setInputTable(config, null, "tpch.nation");
        List<InputSplit> splits = PlanUtil.getSplits(config, new Credentials()).splits;
        reader.initialize(splits.get(0), new TaskAttemptContextImpl(new JobConf(config), new TaskAttemptID()));

        int numRows = 0;
        while (reader.nextKeyValue()) {
            RecordServiceRecord value = reader.getCurrentValue();
            ++numRows;

            if (numRows == 10) {
                assertEquals("INDONESIA", value.getColumnValue(1).toString());
            }
        }
        assertFalse(reader.nextKeyValue());
        assertFalse(reader.nextRecord());
        assertEquals(25, numRows);

        config.clear();
        RecordServiceConfig.setInputTable(config, "tpch", "nation", "n_comment");
        splits = PlanUtil.getSplits(config, new Credentials()).splits;
        reader.initialize(splits.get(0), new TaskAttemptContextImpl(new JobConf(config), new TaskAttemptID()));
        numRows = 0;
        while (reader.nextKeyValue()) {
            RecordServiceRecord value = reader.getCurrentValue();
            if (numRows == 12) {
                assertEquals("ously. final, express gifts cajole a", value.getColumnValue(0).toString());
            }
            ++numRows;
        }
        assertEquals(25, numRows);
    } finally {
        reader.close();
    }
}

From source file:com.firewallid.util.FIConfiguration.java

public static Configuration create() {
    Configuration conf = new Configuration();
    conf.clear();
    addFirewallIndonesiaResources(conf);
    return conf;/*from w  w w  . j  a  va  2s .  c o  m*/
}

From source file:com.github.seqware.queryengine.impl.HBaseStorage.java

License:Open Source License

/**
 * Configure a HBaseConfiguration with a given set of properties
 *
 * @param config a {@link org.apache.hadoop.conf.Configuration} object.
 */// w  ww  .j  a  va 2  s . c  o m
public static void configureHBaseConfig(Configuration config) {
    if (Constants.Term.HBASE_REMOTE_TESTING.getTermValue(Boolean.class)) {
        config.clear();
        Map<String, String> termValue = Constants.Term.HBASE_PROPERTIES.getTermValue(Map.class);
        for (Entry<String, String> e : termValue.entrySet()) {
            config.set(e.getKey(), e.getValue());
        }
    }
}

From source file:com.google.mr4c.content.S3CredentialsTest.java

License:Open Source License

@Test
public void testConfigurationUpdate() throws Exception {
    S3Credentials cred1 = buildS3Credentials1();
    Configuration conf = new Configuration();
    conf.clear();
    cred1.applyTo(conf);/*from  w w w .ja  va2s . c  o m*/
    S3Credentials cred2 = S3Credentials.extractFrom(conf);
    assertEquals(cred1, cred2);
}

From source file:com.jivesoftware.os.rcvs.hbase094.HBaseRowColumnValueStoreInitializer.java

License:Apache License

private static org.apache.hadoop.conf.Configuration defaultHbaseConfig(
        HBase094RowColumnValueStoreConfig config) {
    org.apache.hadoop.conf.Configuration hbaseConfig = new org.apache.hadoop.conf.Configuration();
    hbaseConfig.clear();
    hbaseConfig.set("hbase.zookeeper.quorum", config.getHBaseZookeeperQuorum());
    hbaseConfig.set("hbase.zookeeper.property.clientPort", String.valueOf(config.getHBaseZookeeperPort()));
    return hbaseConfig;
}

From source file:com.jivesoftware.os.rcvs.hbase098.HBaseRowColumnValueStoreInitializer.java

License:Apache License

private static org.apache.hadoop.conf.Configuration defaultHbaseConfig(
        HBase098RowColumnValueStoreConfig config) {
    org.apache.hadoop.conf.Configuration hbaseConfig = new org.apache.hadoop.conf.Configuration();
    hbaseConfig.clear();
    hbaseConfig.set("hbase.zookeeper.quorum", config.getHBaseZookeeperQuorum());
    hbaseConfig.set("hbase.zookeeper.property.clientPort", String.valueOf(config.getHBaseZookeeperPort()));
    return hbaseConfig;
}

From source file:com.quixey.hadoop.fs.oss.OSSCredentialsTest.java

License:Apache License

@Test
public void testExtractFromURI() {
    OSSCredentials oss = new OSSCredentials();
    Configuration config = new Configuration();
    config.clear();

    oss.initialize(URI.create("oss://xyz:abc@bucket"), config);

    assertEquals(oss.getAccessKeyId(), "xyz");
    assertEquals(oss.getSecretAccessKey(), "abc");
}

From source file:com.quixey.hadoop.fs.oss.OSSCredentialsTest.java

License:Apache License

@Test
public void testExtractFromConfiguration() {
    Configuration config = new Configuration();
    config.clear();
    config.set("fs.oss.accessKeyId", "xyz");
    config.set("fs.oss.secretAccessKey", "abc");

    OSSCredentials oss = new OSSCredentials();
    oss.initialize(URI.create("oss://bucket"), config);

    assertEquals(oss.getAccessKeyId(), "xyz");
    assertEquals(oss.getSecretAccessKey(), "abc");
}

From source file:com.quixey.hadoop.fs.oss.OSSCredentialsTest.java

License:Apache License

@Test
public void testThrowIAEOnMissingKeyID() {
    OSSCredentials oss = new OSSCredentials();
    Configuration config = new Configuration();
    config.clear();

    thrown.expect(IllegalArgumentException.class);
    oss.initialize(URI.create("oss://abc@bucket"), config);
}