List of usage examples for org.apache.hadoop.conf Configuration getBoolean
public boolean getBoolean(String name, boolean defaultValue)
name
property as a boolean
. From source file:com.moz.fiji.mapreduce.kvstore.TestKeyValueStoreConfiguration.java
License:Apache License
@Test public void testStoreBoolean() { final boolean expected = true; Configuration parent = new Configuration(false); KeyValueStoreConfiguration isolated = KeyValueStoreConfiguration.createInConfiguration(parent, 0); isolated.setBoolean("foo-key", expected); assertEquals(expected, isolated.getBoolean("foo-key", !expected)); // Check that this value is stored in the namespace on the parent: Configuration delegate = isolated.getDelegate(); assertEquals(expected,//from w w w .ja va2s. c om delegate.getBoolean(KeyValueStoreConfiguration.confKeyAtIndex("foo-key", 0), !expected)); }
From source file:com.moz.fiji.mapreduce.output.TestFijiTableMapReduceJobOutput.java
License:Apache License
/** Test that mapper speculative execution is disabled for FijiTableMapReduceJobOutput. */ @Test/* www .j a v a 2 s . co m*/ public void testSpecExDisabled() throws Exception { final Fiji fiji = getFiji(); final FijiTableLayout layout = FijiTableLayout .createUpdatedLayout(FijiTableLayouts.getLayout(FijiTableLayouts.SIMPLE), null); fiji.createTable("table", layout); FijiURI tableURI = FijiURI.newBuilder(fiji.getURI()).withTableName("table").build(); final Job job = new Job(); new DirectFijiTableMapReduceJobOutput(tableURI).configure(job); final Configuration conf = job.getConfiguration(); boolean isMapSpecExEnabled = conf.getBoolean("mapred.map.tasks.speculative.execution", true); assertFalse(isMapSpecExEnabled); }
From source file:com.moz.fiji.schema.tools.TestFijiToolLauncher.java
License:Apache License
@Test public void testSetsOptionsParsedFlag() throws Exception { Configuration conf = new Configuration(); assertFalse(conf.getBoolean("mapred.used.genericoptionsparser", false)); assertFalse(conf.getBoolean("mapreduce.client.genericoptionsparser.used", false)); FijiToolLauncher launcher = new FijiToolLauncher(); launcher.setConf(conf);//from w w w . j a v a 2 s . com FijiTool tool = launcher.getToolForName("help"); launcher.run(tool, new String[0]); // Make sure Hadoop 1.x and Hadoop 2.x flags are both enabled by the launcher. assertTrue(conf.getBoolean("mapred.used.genericoptionsparser", false)); assertTrue(conf.getBoolean("mapreduce.client.genericoptionsparser.used", false)); }
From source file:com.navercorp.pinpoint.common.hbase.ConnectionFactoryBean.java
License:Apache License
public ConnectionFactoryBean(Configuration configuration) { Objects.requireNonNull(configuration, " must not be null"); try {/*from w w w . ja va 2 s . c om*/ warmUp = configuration.getBoolean("hbase.client.warmup.enable", false); connection = ConnectionFactory.createConnection(configuration); } catch (IOException e) { throw new HbaseSystemException(e); } }
From source file:com.navercorp.pinpoint.common.hbase.ConnectionFactoryBean.java
License:Apache License
public ConnectionFactoryBean(Configuration configuration, ExecutorService executorService) { Objects.requireNonNull(configuration, "configuration must not be null"); Objects.requireNonNull(executorService, "executorService must not be null"); try {/*from w ww . j ava2 s . c o m*/ warmUp = configuration.getBoolean("hbase.client.warmup.enable", false); connection = ConnectionFactory.createConnection(configuration, executorService); } catch (IOException e) { throw new HbaseSystemException(e); } }
From source file:com.navercorp.pinpoint.common.hbase.HBaseAsyncOperationFactory.java
License:Apache License
public static HBaseAsyncOperation create(Configuration configuration) throws IOException { boolean enableAsyncMethod = configuration.getBoolean(ENABLE_ASYNC_METHOD, DEFAULT_ENABLE_ASYNC_METHOD); if (!enableAsyncMethod) { return DisabledHBaseAsyncOperation.INSTANCE; }//from www. j a v a 2 s. c om int queueSize = configuration.getInt(ASYNC_IN_QUEUE_SIZE, DEFAULT_ASYNC_IN_QUEUE_SIZE); if (configuration.get(ASYNC_PERIODIC_FLUSH_TIME, null) == null) { configuration.setInt(ASYNC_PERIODIC_FLUSH_TIME, DEFAULT_ASYNC_PERIODIC_FLUSH_TIME); } if (configuration.get(ASYNC_RETRY_COUNT, null) == null) { configuration.setInt(ASYNC_RETRY_COUNT, DEFAULT_ASYNC_RETRY_COUNT); } return new HBaseAsyncTemplate(configuration, queueSize); }
From source file:com.nearinfinity.honeycomb.hbase.HTableProvider.java
License:Apache License
public HTableProvider(final Configuration configuration) { String hTableName = configuration.get(ConfigConstants.TABLE_NAME); long writeBufferSize = configuration.getLong(ConfigConstants.WRITE_BUFFER, ConfigConstants.DEFAULT_WRITE_BUFFER); int poolSize = configuration.getInt(ConfigConstants.TABLE_POOL_SIZE, ConfigConstants.DEFAULT_TABLE_POOL_SIZE); boolean autoFlush = configuration.getBoolean(ConfigConstants.AUTO_FLUSH, ConfigConstants.DEFAULT_AUTO_FLUSH); tableName = hTableName;/*from w ww . j a v a 2 s . c o m*/ tablePool = new HTablePool(configuration, poolSize, new HTableFactory(writeBufferSize, autoFlush)); }
From source file:com.netease.news.classifier.naivebayes.ThetaMapper.java
License:Apache License
@Override protected void setup(Context ctx) throws IOException, InterruptedException { super.setup(ctx); Configuration conf = ctx.getConfiguration(); float alphaI = conf.getFloat(ALPHA_I, 1.0f); Map<String, Vector> scores = BayesUtils.readScoresFromCache(conf); if (conf.getBoolean(TRAIN_COMPLEMENTARY, false)) { trainer = new ComplementaryThetaTrainer(scores.get(TrainNaiveBayesJob.WEIGHTS_PER_FEATURE), scores.get(TrainNaiveBayesJob.WEIGHTS_PER_LABEL), alphaI); } else {//from w w w . j a v a 2 s . c o m trainer = new StandardThetaTrainer(scores.get(TrainNaiveBayesJob.WEIGHTS_PER_FEATURE), scores.get(TrainNaiveBayesJob.WEIGHTS_PER_LABEL), alphaI); } }
From source file:com.netflix.aegisthus.tools.StorageHelper.java
License:Apache License
public StorageHelper(Configuration config) { this.config = config; debug = config.getBoolean(CFG_STORAGE_DEBUG, false); }
From source file:com.netflix.bdp.s3mper.listing.ConsistentListingAspect.java
License:Apache License
private void updateConfig(Configuration conf) { disabled = conf.getBoolean("s3mper.disable", disabled); if (disabled) { log.warn("S3mper Consistency explicitly disabled."); return;// ww w . j a va 2 s . c o m } darkload = conf.getBoolean("s3mper.darkload", darkload); failOnError = conf.getBoolean("s3mper.failOnError", failOnError); taskFailOnError = conf.getBoolean("s3mper.task.failOnError", taskFailOnError); checkTaskListings = conf.getBoolean("s3mper.listing.task.check", checkTaskListings); failOnTimeout = conf.getBoolean("s3mper.failOnTimeout", failOnTimeout); delistDeleteMarkedFiles = conf.getBoolean("s3mper.listing.delist.deleted", delistDeleteMarkedFiles); trackDirectories = conf.getBoolean("s3mper.listing.directory.tracking", trackDirectories); fileThreshold = conf.getFloat("s3mper.listing.threshold", fileThreshold); recheckCount = conf.getLong("s3mper.listing.recheck.count", recheckCount); recheckPeriod = conf.getLong("s3mper.listing.recheck.period", recheckPeriod); taskRecheckCount = conf.getLong("s3mper.listing.task.recheck.count", taskRecheckCount); taskRecheckPeriod = conf.getLong("s3mper.listing.task.recheck.period", taskRecheckPeriod); statOnMissingFile = conf.getBoolean("s3mper.listing.statOnMissingFile", false); }