Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:com.mortardata.pig.storage.DynamoDBStorage.java

License:Apache License

/**
 * FRONTEND and BACKEND/*from   ww  w . ja  va  2  s.co m*/
 **/
@Override
public void setStoreLocation(String location, Job job) throws IOException {
    this.hadoopJobInfo = loadHadoopJobInfo(job);
    Configuration conf = this.hadoopJobInfo.getJobConfiguration();
    this.maxRetryWaitMilliseconds = conf.getLong(MAX_RETRY_WAIT_MILLISECONDS_PROPERTY,
            MAX_RETRY_WAIT_MILLISECONDS_DEFAULT);
    this.maxNumRetriesPerBatchWrite = conf.getInt(MAX_NUM_RETRIES_PER_BATCH_WRITE_PROPERTY,
            MAX_NUM_RETRIES_PER_BATCH_WRITE);
    this.throughputWritePercent = new Float(
            conf.getFloat(THROUGHPUT_WRITE_PERCENT_PROPERTY, THROUGHPUT_WRITE_PERCENT_DEFAULT)).doubleValue();
    if (this.throughputWritePercent < 0.1 || this.throughputWritePercent > 1.5) {
        throw new IOException(THROUGHPUT_WRITE_PERCENT_PROPERTY + " must be between 0.1 and 1.5.  Got: "
                + this.throughputWritePercent);
    }

    this.minBatchSize = conf.getInt(MINIMUM_BATCH_SIZE_PROPERTY, MINIMUM_BATCH_SIZE_DEFAULT);
    if (this.minBatchSize < 1 || this.minBatchSize > DYNAMO_MAX_ITEMS_IN_BATCH_WRITE_REQUEST) {
        throw new IOException(MINIMUM_BATCH_SIZE_PROPERTY + " must be between 1 and "
                + DYNAMO_MAX_ITEMS_IN_BATCH_WRITE_REQUEST + ". Got: " + this.minBatchSize);
    }
}

From source file:com.moz.fiji.hadoop.configurator.ConfigurationMethod.java

License:Apache License

/**
 * Calls an object's method with the value read from a Configuration instance.
 *
 * @param instance The object to populate.
 * @param conf The configuration to read from.
 * @throws IllegalAccessException If the method cannot be called on the object.
 * @throws HadoopConfigurationException If there is a problem with the annotation definition.
 *///from   ww w . jav a  2  s  .  co m
public void call(Object instance, Configuration conf) throws IllegalAccessException {
    final String key = getKey();
    if (null == key) {
        throw new HadoopConfigurationException("Missing 'key' attribute of @HadoopConf on "
                + instance.getClass().getName() + "." + mMethod.getName());
    }

    if (!mMethod.isAccessible()) {
        mMethod.setAccessible(true);
    }

    final Class<?>[] parameterTypes = mMethod.getParameterTypes();
    if (1 != parameterTypes.length) {
        throw new HadoopConfigurationException(
                "Methods annotated with @HadoopConf must have exactly one parameter: "
                        + instance.getClass().getName() + "." + mMethod.getName());
    }

    final Class<?> parameterType = parameterTypes[0];

    try {
        try {
            if (boolean.class == parameterType) {
                mMethod.invoke(instance, conf.getBoolean(key, Boolean.parseBoolean(getDefault())));
            } else if (float.class == parameterType) {
                mMethod.invoke(instance, conf.getFloat(key, Float.parseFloat(getDefault())));
            } else if (double.class == parameterType) {
                mMethod.invoke(instance, conf.getFloat(key, Float.parseFloat(getDefault())));
            } else if (int.class == parameterType) {
                mMethod.invoke(instance, conf.getInt(key, Integer.parseInt(getDefault())));
            } else if (long.class == parameterType) {
                mMethod.invoke(instance, conf.getLong(key, Long.parseLong(getDefault())));
            } else if (parameterType.isAssignableFrom(String.class)) {
                mMethod.invoke(instance, conf.get(key, getDefault()));
            } else if (parameterType.isAssignableFrom(Collection.class)) {
                mMethod.invoke(instance, conf.getStringCollection(key));
            } else if (String[].class == parameterType) {
                mMethod.invoke(instance, new Object[] { conf.getStrings(key) });
            } else {
                throw new HadoopConfigurationException(
                        "Unsupported method parameter type annotated by @HadoopConf: "
                                + instance.getClass().getName() + "." + mMethod.getName());
            }
        } catch (NumberFormatException e) {
            mMethod.invoke(instance, getDefault());
        }
    } catch (InvocationTargetException e) {
        throw new HadoopConfigurationException(e);
    }
}

From source file:com.moz.fiji.hadoop.configurator.ConfigurationVariable.java

License:Apache License

/**
 * Populates an object's field with the value read from a Configuration instance.
 *
 * @param instance The object to populate.
 * @param conf The configuration to read from.
 * @throws IllegalAccessException If the field cannot be set on the object.
 * @throws HadoopConfigurationException If there is a problem with the annotation definition.
 *///  www . j a v  a 2  s  .  c  om
public void setValue(Object instance, Configuration conf) throws IllegalAccessException {
    final String key = getKey();
    if (null == key) {
        throw new HadoopConfigurationException("Missing 'key' attribute of @HadoopConf on "
                + instance.getClass().getName() + "." + mField.getName());
    }
    if (null == conf.get(key) && mAnnotation.defaultValue().isEmpty()) {
        // Nothing set in the configuration, and no default value
        // specified. Just leave the field alone.
        return;
    }

    if (!mField.isAccessible()) {
        mField.setAccessible(true);
    }

    try {
        if (boolean.class == mField.getType()) {
            mField.setBoolean(instance, conf.getBoolean(key, getDefaultBoolean(instance)));
        } else if (float.class == mField.getType()) {
            mField.setFloat(instance, conf.getFloat(key, getDefaultFloat(instance)));
        } else if (double.class == mField.getType()) {
            mField.setDouble(instance, conf.getFloat(key, getDefaultDouble(instance)));
        } else if (int.class == mField.getType()) {
            mField.setInt(instance, conf.getInt(key, getDefaultInt(instance)));
        } else if (long.class == mField.getType()) {
            mField.setLong(instance, conf.getLong(key, getDefaultLong(instance)));
        } else if (mField.getType().isAssignableFrom(String.class)) {
            mField.set(instance, conf.get(key, getDefaultString(instance)));
        } else if (mField.getType().isAssignableFrom(Collection.class)) {
            mField.set(instance, conf.getStringCollection(key));
        } else if (String[].class == mField.getType()) {
            mField.set(instance, conf.getStrings(key));
        } else {
            throw new HadoopConfigurationException("Unsupported field type annotated by @HadoopConf: "
                    + instance.getClass().getName() + "." + mField.getName());
        }
    } catch (NumberFormatException e) {
        // That's okay. The default value for the field will be kept.
    }
}

From source file:com.moz.fiji.mapreduce.kvstore.KeyValueStoreReaderFactory.java

License:Apache License

/**
 * Creates a KeyValueStoreReaderFactory backed by store bindings specified in a Configuration.
 *
 * @param conf the Configuration from which a set of KeyValueStore bindings should
 *     be deserialized and initialized./*ww w .  j  a va2s . c om*/
 * @throws IOException if there is an error deserializing or initializing a
 *     KeyValueStore instance.
 */
private KeyValueStoreReaderFactory(Configuration conf) throws IOException {
    Map<String, KeyValueStore<?, ?>> keyValueStores = new HashMap<String, KeyValueStore<?, ?>>();
    int numKvStores = conf.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT,
            KeyValueStoreConfigSerializer.DEFAULT_KEY_VALUE_STORE_COUNT);
    for (int i = 0; i < numKvStores; i++) {
        KeyValueStoreConfiguration kvStoreConf = KeyValueStoreConfiguration.createInConfiguration(conf, i);

        Class<? extends KeyValueStore> kvStoreClass = kvStoreConf
                .<KeyValueStore>getClass(KeyValueStoreConfigSerializer.CONF_CLASS, null, KeyValueStore.class);

        String kvStoreName = kvStoreConf.get(KeyValueStoreConfigSerializer.CONF_NAME, "");

        if (null != kvStoreClass) {
            KeyValueStore<?, ?> kvStore = ReflectionUtils.newInstance(kvStoreClass, conf);
            if (null != kvStore) {
                kvStore.initFromConf(kvStoreConf);
                if (kvStoreName.isEmpty()) {
                    LOG.warn("Deserialized KeyValueStore not bound to a name; ignoring.");
                    continue;
                }
                keyValueStores.put(kvStoreName, kvStore);
            }
        }
    }

    mKeyValueStores = Collections.unmodifiableMap(keyValueStores);
    mKVStoreReaderCache = Maps.newConcurrentMap();
}

From source file:com.moz.fiji.mapreduce.kvstore.TestKeyValueStoreConfiguration.java

License:Apache License

@Test
public void testStoreInt() {
    Configuration parent = new Configuration(false);
    KeyValueStoreConfiguration isolated = KeyValueStoreConfiguration.createInConfiguration(parent, 0);
    isolated.setInt("foo-key", 123);
    assertEquals(123, isolated.getInt("foo-key", 0));

    // Check that this value is stored in the namespace on the parent:
    Configuration delegate = isolated.getDelegate();
    assertEquals(123, delegate.getInt(KeyValueStoreConfiguration.confKeyAtIndex("foo-key", 0), 0));
}

From source file:com.moz.fiji.mapreduce.TestFijiBulkImportJobBuilder.java

License:Apache License

@Test
public void testBuildWithKeyValueStore() throws Exception {
    final FijiMapReduceJob mrjob = FijiBulkImportJobBuilder.create().withConf(getConf())
            .withInput(MapReduceJobInputs.newTextMapReduceJobInput(new Path(mTempPath, "input")))
            .withBulkImporter(KVStoreBulkImporter.class).withOutput(MapReduceJobOutputs
                    .newHFileMapReduceJobOutput(mTable.getURI(), new Path(mTempPath, "output"), 10))
            .build();//from www . j  a v  a 2s .c  o m

    final Job job = mrjob.getHadoopJob();
    // Verify that everything else is what we expected as in the previous test
    // (except the bulk importer class name)...
    assertEquals(TextInputFormat.class, job.getInputFormatClass());
    assertEquals(BulkImportMapper.class, job.getMapperClass());
    assertEquals(KVStoreBulkImporter.class,
            job.getConfiguration().getClass(FijiConfKeys.FIJI_BULK_IMPORTER_CLASS, null));
    assertEquals(IdentityReducer.class, job.getReducerClass());
    assertEquals(10, job.getNumReduceTasks());
    assertEquals(FijiHFileOutputFormat.class, job.getOutputFormatClass());
    assertEquals(TotalOrderPartitioner.class, job.getPartitionerClass());

    // KeyValueStore-specific checks here.
    final Configuration confOut = job.getConfiguration();
    assertEquals(1, confOut.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT, 0));
    assertEquals(EmptyKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("foostore", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
            + KeyValueStoreConfigSerializer.CONF_NAME));
}

From source file:com.moz.fiji.mapreduce.TestFijiGatherJobBuilder.java

License:Apache License

@Test
public void testEmptyKeyValueStore() throws Exception {
    // We override UnconfiguredKeyValueStore with EmptyKeyValueStore; this should succeed.
    final FijiMapReduceJob gatherJob = FijiGatherJobBuilder.create().withConf(getConf())
            .withInputTable(mTable.getURI()).withGatherer(UnconfiguredKVGatherer.class)
            .withCombiner(MyCombiner.class).withReducer(MyReducer.class)
            .withOutput(MapReduceJobOutputs.newTextMapReduceJobOutput(new Path("mypath"), 10))
            .withStore("foostore", EmptyKeyValueStore.builder().build()).build();

    // Verify that the MR Job was configured correctly.
    final Job job = gatherJob.getHadoopJob();
    final Configuration conf = job.getConfiguration();
    assertEquals(1, conf.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT, 0));
    assertEquals(EmptyKeyValueStore.class.getName(),
            conf.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("foostore", conf.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
            + KeyValueStoreConfigSerializer.CONF_NAME));
}

From source file:com.moz.fiji.mapreduce.TestFijiMapReduceJobBuilder.java

License:Apache License

@Test
public void testBuild() throws Exception {
    final FijiMapReduceJob job = FijiMapReduceJobBuilder.create().withConf(mConf)
            .withInput(MapReduceJobInputs.newTextMapReduceJobInput(new Path("/path/to/my/input")))
            .withMapper(MyMapper.class).withReducer(MyReducer.class)
            .withOutput(MapReduceJobOutputs.newTextMapReduceJobOutput(new Path("/path/to/my/output"), 16))
            .build();//from www. j  a  v a2  s .  com

    final Job hadoopJob = job.getHadoopJob();
    assertEquals(TextInputFormat.class, hadoopJob.getInputFormatClass());
    assertEquals(MyMapper.class, hadoopJob.getMapperClass());
    assertEquals(MyReducer.class, hadoopJob.getReducerClass());
    assertEquals(16, hadoopJob.getNumReduceTasks());
    assertEquals(TextOutputFormat.class, hadoopJob.getOutputFormatClass());

    // KeyValueStore-specific checks here.
    Configuration confOut = hadoopJob.getConfiguration();
    assertEquals(2, confOut.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT, 0));
    assertEquals(EmptyKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("mapperMap", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
            + KeyValueStoreConfigSerializer.CONF_NAME));
    assertEquals(EmptyKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "1."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("reducerMap", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "1."
            + KeyValueStoreConfigSerializer.CONF_NAME));
}

From source file:com.moz.fiji.mapreduce.TestFijiMapReduceJobBuilder.java

License:Apache License

@Test
public void testBuildWithXmlKVStores() throws Exception {
    // Test that we can override default configuration KeyValueStores from an XML file.
    final InputStream xmlStores = Resources.openSystemResource("com.moz.fiji/mapreduce/test-kvstores.xml");

    // This file needs to exist before we build the job, or else
    // we can't build the job; it's referenced by a key-value store that checks
    // for its presence.
    final File tmpFile = new File("/tmp/foo.seq");
    if (tmpFile.createNewFile()) {
        // We created this temp file, we're responsible for deleting it.
        tmpFile.deleteOnExit();//from  w ww .  jav a2s . co  m
    }

    LOG.info("Building job...");
    final FijiMapReduceJob job = FijiMapReduceJobBuilder.create().withConf(mConf)
            .withInput(MapReduceJobInputs.newTextMapReduceJobInput(new Path("/path/to/my/input")))
            .withMapper(MyMapper.class).withReducer(MyReducer.class)
            .withOutput(MapReduceJobOutputs.newTextMapReduceJobOutput(new Path("/path/to/my/output"), 16))
            .withStoreBindings(xmlStores).build();

    xmlStores.close();

    LOG.info("Verifying job configuration...");
    final Job hadoopJob = job.getHadoopJob();
    assertEquals(TextInputFormat.class, hadoopJob.getInputFormatClass());
    assertEquals(MyMapper.class, hadoopJob.getMapperClass());
    assertEquals(MyReducer.class, hadoopJob.getReducerClass());
    assertEquals(16, hadoopJob.getNumReduceTasks());
    assertEquals(TextOutputFormat.class, hadoopJob.getOutputFormatClass());

    // KeyValueStore-specific checks here.
    // We override mapperMap with a SeqFileKeyValueStore.
    Configuration confOut = hadoopJob.getConfiguration();
    assertEquals(2, confOut.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT, 0));
    assertEquals(SeqFileKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("mapperMap", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
            + KeyValueStoreConfigSerializer.CONF_NAME));
    assertEquals(EmptyKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "1."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("reducerMap", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "1."
            + KeyValueStoreConfigSerializer.CONF_NAME));
}

From source file:com.moz.fiji.mapreduce.TestFijiProduceJobBuilder.java

License:Apache License

@Test
public void testEmptyKeyValueStore() throws ClassNotFoundException, IOException {
    // We override UnconfiguredKeyValueStore with EmptyKeyValueStore; this should succeed.
    FijiMapReduceJob produceJob = FijiProduceJobBuilder.create().withConf(getConf())
            .withInputTable(mTable.getURI()).withProducer(UnconfiguredKVProducer.class)
            .withStore("foostore", EmptyKeyValueStore.get())
            .withOutput(MapReduceJobOutputs.newDirectFijiTableMapReduceJobOutput(mTable.getURI())).build();

    // Verify that the MR Job was configured correctly.
    Job job = produceJob.getHadoopJob();
    Configuration confOut = job.getConfiguration();
    assertEquals(1, confOut.getInt(KeyValueStoreConfigSerializer.CONF_KEY_VALUE_STORE_COUNT, 0));
    assertEquals(EmptyKeyValueStore.class.getName(),
            confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
                    + KeyValueStoreConfigSerializer.CONF_CLASS));
    assertEquals("foostore", confOut.get(KeyValueStoreConfiguration.KEY_VALUE_STORE_NAMESPACE + "0."
            + KeyValueStoreConfigSerializer.CONF_NAME));
}