Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetHashKeyType() {
    Configuration conf = createMock(Configuration.class);

    conf.setInt(DynamoDBConfiguration.HASH_KEY_TYPE_PROPERTY, Types.NUMBER.ordinal());

    replay(conf);/* ww  w.  j a v a 2  s.c  o  m*/
    DynamoDBQueryInputFormat.setHashKeyType(conf, Types.NUMBER);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetHashKeyValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;
    AttributeValue attr = new AttributeValue().withS(VALUE);

    conf.setInt(DynamoDBConfiguration.HASH_KEY_TYPE_PROPERTY, type.ordinal());
    conf.set(DynamoDBConfiguration.HASH_KEY_VALUE_PROPERTY, VALUE);

    replay(conf);/*from w  ww.  j  a v  a 2  s. c o  m*/

    DynamoDBQueryInputFormat.setHashKeyValue(conf, type, attr);

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyType() {
    Configuration conf = createMock(Configuration.class);

    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, Types.NUMBER.ordinal());

    replay(conf);/*from  w  w w .  ja v  a2s.  co  m*/
    DynamoDBQueryInputFormat.setRangeKeyType(conf, Types.NUMBER);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyOperator() {
    Configuration conf = createMock(Configuration.class);

    conf.setInt(DynamoDBConfiguration.RANGE_KEY_OPERATOR_PROPERTY, ComparisonOperator.BETWEEN.ordinal());

    replay(conf);// w w  w.  j av a2 s. c o  m
    DynamoDBQueryInputFormat.setRangeKeyComparisonOperator(conf, ComparisonOperator.BETWEEN);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyValues() {
    Configuration conf = createMock(Configuration.class);
    final String[] VALUES = new String[] { "TEST1", "TEST2" };
    Types type = Types.STRING;

    List<AttributeValue> attrs = new ArrayList<AttributeValue>();
    for (String value : VALUES) {
        attrs.add(new AttributeValue().withS(value));
    }/*from w  ww.  java2 s  . co m*/

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, false);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.setStrings(DynamoDBConfiguration.RANGE_KEY_VALUES_PROPERTY, VALUES);

    replay(conf);

    DynamoDBQueryInputFormat.setRangeKeyValues(conf, type, attrs);

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyCondition() {
    Configuration conf = createMock(Configuration.class);
    final String[] VALUES = new String[] { "TEST1", "TEST2" };
    Types type = Types.STRING;
    ComparisonOperator operator = ComparisonOperator.BETWEEN;

    List<AttributeValue> attrs = new ArrayList<AttributeValue>();
    for (String value : VALUES) {
        attrs.add(new AttributeValue().withS(value));
    }/*w  w  w . ja  va2 s  .co  m*/

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, false);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_OPERATOR_PROPERTY, ComparisonOperator.BETWEEN.ordinal());
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.setStrings(DynamoDBConfiguration.RANGE_KEY_VALUES_PROPERTY, VALUES);

    replay(conf);

    DynamoDBQueryInputFormat.setRangeKeyCondition(conf, type, operator, attrs);

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyInterpolateMinValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;
    AttributeValue attr = new AttributeValue().withS(VALUE);

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, true);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.set(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MIN_VALUE_PROPERTY, VALUE);

    replay(conf);//from w  ww  .jav a 2s.c o m

    DynamoDBQueryInputFormat.setRangeKeyInterpolateMinValue(conf, type, attr);

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyInterpolateMaxValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;
    AttributeValue attr = new AttributeValue().withS(VALUE);

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, true);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.set(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MAX_VALUE_PROPERTY, VALUE);

    replay(conf);/*from  w  ww.java  2 s .  c o m*/

    DynamoDBQueryInputFormat.setRangeKeyInterpolateMaxValue(conf, type, attr);

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testSetRangeKeyInterpolateRange() {
    Configuration conf = createMock(Configuration.class);
    final String MIN_VALUE = "TEST1";
    final String MAX_VALUE = "TEST2";
    Types type = Types.STRING;

    AttributeValue min_attr = new AttributeValue().withS(MIN_VALUE);
    AttributeValue max_attr = new AttributeValue().withS(MAX_VALUE);

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, true);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.set(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MIN_VALUE_PROPERTY, MIN_VALUE);

    conf.setBoolean(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_PROPERTY, true);
    conf.setInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, type.ordinal());
    conf.set(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MAX_VALUE_PROPERTY, MAX_VALUE);

    replay(conf);/*from   w  w w. ja v a2s.  co  m*/

    DynamoDBQueryInputFormat.setRangeKeyInterpolateRange(conf, type, min_attr, max_attr);

    verify(conf);

}

From source file:com.wipro.ats.bdre.datagen.mr.Driver.java

License:Apache License

/**
 * @param args the cli arguments/*ww  w . ja  v  a 2 s . co m*/
 */
@Override
public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Configuration conf = getConf();
    GetGeneralConfig generalConfig = new GetGeneralConfig();
    GeneralConfig gc = generalConfig.byConigGroupAndKey("imconfig", "common.default-fs-name");
    conf.set("fs.defaultFS", gc.getDefaultVal());

    String processId = args[0];
    Path outputDir = new Path(ResolvePath.replaceVars(args[1]));

    Properties dataProps = Config.getDataProperties(processId);
    Properties tableProps = Config.getTableProperties(processId);

    TableUtil tableUtil = new TableUtil();
    Table table = tableUtil.formTableFromConfig(processId);
    FileSystem fs = FileSystem.get(conf);
    LOGGER.info("Default FS =" + conf.get("fs.defaultFS"));
    //set in the conf for mappers to use
    conf.set(Config.SEPARATOR_KEY, tableProps.getProperty("separator"));
    conf.set(Config.PID_KEY, processId);
    conf.setLong(Config.NUM_ROWS_KEY, Long.parseLong(dataProps.getProperty("numRows")));
    conf.setInt(Config.NUM_SPLITS_KEY, Integer.parseInt(dataProps.getProperty("numSplits")));

    Job job = Job.getInstance(conf);
    Path mrOutputPath = new Path(outputDir.toString() + "/MROUT/" + table.getTableName());

    FileOutputFormat.setOutputPath(job, mrOutputPath);
    job.setJobName("Datagen-" + table.getTableName());
    job.setJarByClass(Driver.class);
    job.setMapperClass(RecordGenMapper.class);
    job.setNumReduceTasks(0);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setInputFormatClass(RangeInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);
    job.waitForCompletion(true);

    //merge and create a single file

    Path srcDir = mrOutputPath;
    Path destFile = new Path(outputDir.toString() + "/" + table.getTableName());
    FileUtil.copyMerge(fs, srcDir, fs, destFile, true, conf, "");

    //Return file info oozie params
    RegisterFileInfo registerFileInfo = new RegisterFileInfo();
    registerFileInfo.setBatchId(null);
    registerFileInfo.setCreationTs(new Timestamp(new Date().getTime()));
    registerFileInfo.setFileHash("0");
    registerFileInfo.setFileSize(0L);
    registerFileInfo.setPath(destFile.toString());
    registerFileInfo.setSubProcessId(Integer.parseInt(processId));
    OozieUtil oozieUtil = new OozieUtil();
    oozieUtil.persistBeanData(registerFileInfo, false);
    return 0;
}