Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormat.java

License:Apache License

public static ComparisonOperator getRangeKeyComparisonOperator(Configuration conf) {
    return ComparisonOperator.values()[conf.getInt(DynamoDBConfiguration.RANGE_KEY_OPERATOR_PROPERTY,
            ComparisonOperator.EQ.ordinal())];
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetHashKeyType() {
    Configuration conf = createMock(Configuration.class);
    expect(conf.getInt(DynamoDBConfiguration.HASH_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(Types.NUMBER.ordinal());

    replay(conf);//from w  ww.  j a  va 2 s. com
    Types type = DynamoDBQueryInputFormat.getHashKeyType(conf);
    assertEquals(Types.NUMBER, type);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetHashKeyValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;

    expect(conf.getInt(DynamoDBConfiguration.HASH_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(type.ordinal());/*from  www .ja v a2  s  .  c  o m*/
    expect(conf.get(DynamoDBConfiguration.HASH_KEY_VALUE_PROPERTY)).andReturn(VALUE);
    expect(conf.getInt(DynamoDBConfiguration.HASH_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(type.ordinal());
    expect(conf.get(DynamoDBConfiguration.HASH_KEY_VALUE_PROPERTY)).andReturn(null);

    replay(conf);

    assertEquals(VALUE, DynamoDBQueryInputFormat.getHashKeyValue(conf).getS());
    assertNull(DynamoDBQueryInputFormat.getHashKeyValue(conf));

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetRangeKeyType() {
    Configuration conf = createMock(Configuration.class);
    expect(conf.getInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(Types.NUMBER.ordinal());

    replay(conf);//  w w  w. j  ava  2s  . c  o  m
    Types type = DynamoDBQueryInputFormat.getRangeKeyType(conf);
    assertEquals(Types.NUMBER, type);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetRangeKeyOperator() {
    Configuration conf = createMock(Configuration.class);
    expect(conf.getInt(DynamoDBConfiguration.RANGE_KEY_OPERATOR_PROPERTY, ComparisonOperator.EQ.ordinal()))
            .andReturn(ComparisonOperator.BETWEEN.ordinal());

    replay(conf);/*from   w w  w .  java  2  s . c  o m*/
    ComparisonOperator operator = DynamoDBQueryInputFormat.getRangeKeyComparisonOperator(conf);
    assertEquals(ComparisonOperator.BETWEEN, operator);
    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetRangeKeyValues() {
    Configuration conf = createMock(Configuration.class);
    final String[] VALUES = new String[] { "TEST1", "TEST2" };
    Types type = Types.STRING;

    List<AttributeValue> attrs = new ArrayList<AttributeValue>();
    for (String value : VALUES) {
        attrs.add(new AttributeValue().withS(value));
    }//from   w ww  .  ja va 2 s .co  m

    expect(conf.getInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(type.ordinal());
    expect(conf.getStrings(DynamoDBConfiguration.RANGE_KEY_VALUES_PROPERTY)).andReturn(VALUES);

    replay(conf);

    Collection<AttributeValue> results = DynamoDBQueryInputFormat.getRangeKeyValues(conf);
    int i = 0;
    for (AttributeValue result : results) {
        assertEquals(VALUES[i++], result.getS());
    }

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetRangeKeyInterpolateMinValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;

    expect(conf.getInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(type.ordinal());/*from  w  w w.  j a  v a2 s . com*/
    expect(conf.get(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MIN_VALUE_PROPERTY)).andReturn(VALUE);

    replay(conf);

    AttributeValue attr = DynamoDBQueryInputFormat.getRangeKeyInterpolateMinValue(conf);
    assertEquals(VALUE, attr.getS());

    verify(conf);
}

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBQueryInputFormatTest.java

License:Apache License

@Test
public void testGetRangeKeyInterpolateMaxValue() {
    Configuration conf = createMock(Configuration.class);
    final String VALUE = "TEST";
    Types type = Types.STRING;

    expect(conf.getInt(DynamoDBConfiguration.RANGE_KEY_TYPE_PROPERTY, Types.STRING.ordinal()))
            .andReturn(type.ordinal());//  w  ww .j  a v  a2 s  .c  o m
    expect(conf.get(DynamoDBConfiguration.RANGE_KEY_INTERPOLATE_MAX_VALUE_PROPERTY)).andReturn(VALUE);

    replay(conf);

    AttributeValue attr = DynamoDBQueryInputFormat.getRangeKeyInterpolateMaxValue(conf);
    assertEquals(VALUE, attr.getS());

    verify(conf);
}

From source file:com.xiaomi.linden.hadoop.indexing.job.LindenJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = getConf();
    String dir = conf.get(LindenJobConfig.INPUT_DIR, null);
    logger.info("input dir:" + dir);
    Path inputPath = new Path(StringUtils.unEscapeString(dir));
    Path outputPath = new Path(conf.get(LindenJobConfig.OUTPUT_DIR));
    String indexPath = conf.get(LindenJobConfig.INDEX_PATH);

    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(outputPath)) {
        fs.delete(outputPath, true);//from  w ww  .  j  av a 2 s  .c  om
    }
    if (fs.exists(new Path(indexPath))) {
        fs.delete(new Path(indexPath), true);
    }

    int numShards = conf.getInt(LindenJobConfig.NUM_SHARDS, 1);
    Shard[] shards = createShards(indexPath, numShards);

    Shard.setIndexShards(conf, shards);

    //empty trash;
    (new Trash(conf)).expunge();

    Job job = Job.getInstance(conf, "linden-hadoop-indexing");
    job.setJarByClass(LindenJob.class);
    job.setMapperClass(LindenMapper.class);
    job.setCombinerClass(LindenCombiner.class);
    job.setReducerClass(LindenReducer.class);
    job.setMapOutputKeyClass(Shard.class);
    job.setMapOutputValueClass(IntermediateForm.class);
    job.setOutputKeyClass(Shard.class);
    job.setOutputValueClass(Text.class);
    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(IndexUpdateOutputFormat.class);
    job.setReduceSpeculativeExecution(false);
    job.setNumReduceTasks(numShards);

    String lindenSchemaFile = conf.get(LindenJobConfig.SCHEMA_FILE_URL);
    if (lindenSchemaFile == null) {
        throw new IOException("no schema file is found");
    }
    logger.info("Adding schema file: " + lindenSchemaFile);
    job.addCacheFile(new URI(lindenSchemaFile + "#lindenSchema"));
    String lindenPropertiesFile = conf.get(LindenJobConfig.LINDEN_PROPERTIES_FILE_URL);
    if (lindenPropertiesFile == null) {
        throw new IOException("no linden properties file is found");
    }
    logger.info("Adding linden properties file: " + lindenPropertiesFile);
    job.addCacheFile(new URI(lindenPropertiesFile + "#lindenProperties"));

    FileInputFormat.setInputPaths(job, inputPath);
    FileOutputFormat.setOutputPath(job, outputPath);

    Path[] inputs = FileInputFormat.getInputPaths(job);
    StringBuilder buffer = new StringBuilder(inputs[0].toString());
    for (int i = 1; i < inputs.length; i++) {
        buffer.append(",");
        buffer.append(inputs[i].toString());
    }
    logger.info("mapreduce.input.dir = " + buffer.toString());
    logger.info("mapreduce.output.dir = " + FileOutputFormat.getOutputPath(job).toString());
    logger.info("mapreduce.job.num.reduce.tasks = " + job.getNumReduceTasks());
    logger.info(shards.length + " shards = " + conf.get(LindenJobConfig.INDEX_SHARDS));
    logger.info("mapreduce.input.format.class = " + job.getInputFormatClass());
    logger.info("mapreduce.output.format.class = " + job.getOutputFormatClass());
    logger.info("mapreduce.cluster.temp.dir = " + conf.get(MRJobConfig.TEMP_DIR));

    job.waitForCompletion(true);
    if (!job.isSuccessful()) {
        throw new RuntimeException("Job failed");
    }
    return 0;
}

From source file:com.xiaomi.linden.hadoop.indexing.reduce.FileSystemDirectory.java

License:Apache License

/**
 * Constructor// w  w  w .  j a  v  a2 s  .  co m
 * @param fs
 * @param directory
 * @param create
 * @param conf
 * @throws IOException
 */
public FileSystemDirectory(FileSystem fs, Path directory, boolean create, Configuration conf)
        throws IOException {

    this.fs = fs;
    this.directory = directory;
    this.ioFileBufferSize = conf.getInt("io.file.buffer.size", 4096);

    if (create) {
        create();
    }

    boolean isDir = false;
    try {
        FileStatus status = fs.getFileStatus(directory);
        if (status != null) {
            isDir = status.isDirectory();
        }
    } catch (IOException e) {
        // file does not exist, isDir already set to false
    }
    if (!isDir) {
        throw new IOException(directory + " is not a directory");
    }
}