List of usage examples for org.apache.hadoop.conf Configuration getBoolean
public boolean getBoolean(String name, boolean defaultValue)
name
property as a boolean
. From source file:com.aliyun.odps.volume.VolumeFSInputStream.java
License:Apache License
public VolumeFSInputStream(String path, VolumeFSClient volumeClient, Long fileLength, Configuration conf) throws IOException { this.path = path; this.volumeFSClient = volumeClient; this.seekOptimization = conf.getBoolean(VolumeFileSystemConfigKeys.ODPS_VOLUME_SEEK_OPTIMIZATION_ENABLED, false);/*from w w w. j a va 2s . c o m*/ if (this.seekOptimization) { this.blockSize = conf.getLong(VolumeFileSystemConfigKeys.ODPS_VOLUME_BLOCK_SIZE, VolumeFSConstants.DEFAULT_VOLUME_BLOCK_SIZE); } this.fileLength = fileLength; this.closed = false; this.uuid = UUID.randomUUID().toString(); buffer_block_dir = new File(conf.get(VolumeFileSystemConfigKeys.ODPS_VOLUME_BLOCK_BUFFER_DIR, VolumeFSConstants.DEFAULT_VOLUME_BLOCK_BUFFER_DIR)); if (!buffer_block_dir.exists() && !buffer_block_dir.mkdirs()) { throw new IOException("Cannot create Volume block buffer directory: " + buffer_block_dir); } if (seekOptimization) { executorService = Executors.newFixedThreadPool(1); } }
From source file:com.asakusafw.runtime.mapreduce.simple.SimpleJobRunner.java
License:Apache License
private KeyValueSorter.Options getSorterOptions(Configuration configuration) { long bufferSize = configuration.getLong(KEY_BUFFER_SIZE, -1); if (bufferSize < 0) { bufferSize = DEFAULT_BUFFER_SIZE; } else {// ww w . j a v a 2 s . c o m bufferSize = Math.max(MIN_BUFFER_SIZE, Math.min(MAX_BUFFER_SIZE, bufferSize)); } File temporaryDirectory = null; String tempdirString = configuration.get(KEY_TEMPORARY_LOCATION); if (tempdirString != null) { temporaryDirectory = new File(tempdirString); if (temporaryDirectory.mkdirs() == false && temporaryDirectory.isDirectory() == false) { LOG.warn(MessageFormat.format("failed to prepare shuffle temporary directory: {0}={1}", KEY_TEMPORARY_LOCATION, temporaryDirectory)); } } boolean compress = configuration.getBoolean(KEY_COMPRESS_BLOCK, DEFAULT_COMPRESS_BLOCK); KeyValueSorter.Options options = new KeyValueSorter.Options().withBufferSize((int) bufferSize) .withTemporaryDirectory(temporaryDirectory).withCompressBlock(compress); return options; }
From source file:com.asakusafw.runtime.stage.configurator.AutoLocalStageConfigurator.java
License:Apache License
private void localize(Job job) { Configuration conf = job.getConfiguration(); // reset job-tracker conf.set(KEY_JOBTRACKER, DEFAULT_JOBTRACKER); // replace local directories String tmpDir = conf.get(KEY_TEMPORARY_DIRECTORY, ""); if (tmpDir.isEmpty()) { String name = System.getProperty("user.name", "asakusa"); tmpDir = String.format("/tmp/hadoop-%s/autolocal", name); } else if (tmpDir.length() > 1 && tmpDir.endsWith("/")) { tmpDir = tmpDir.substring(0, tmpDir.length() - 1); }// w w w .j a va 2s. co m if (conf.getBoolean(KEY_DIRECTORY_QUALIFIER, true)) { String qualifier = UUID.randomUUID().toString(); tmpDir = String.format("%s/%s", tmpDir, qualifier); } LOG.info(MessageFormat.format("Substituting temporary dir: job={0}, target={1}", job.getJobName(), tmpDir)); conf.set(KEY_LOCAL_DIR, tmpDir + "/mapred/local"); conf.set(KEY_STAGING_DIR, tmpDir + "/mapred/staging"); }
From source file:com.asakusafw.runtime.stage.optimizer.LibraryCopySuppressionConfigurator.java
License:Apache License
@Override public void configure(Job job) throws IOException, InterruptedException { Configuration conf = job.getConfiguration(); if (conf.getBoolean(KEY_ENABLED, DEFAULT_ENABLED) == false) { return;//from w w w .j av a 2 s. co m } // activates only if application launcher is used if (conf.getBoolean(ApplicationLauncher.KEY_LAUNCHER_USED, false) == false) { return; } if (JobCompatibility.isLocalMode(job) == false) { return; } String libraries = conf.get(KEY_CONF_LIBRARIES); if (libraries == null || libraries.isEmpty()) { return; } Set<String> loaded = new HashSet<>(); ClassLoader loader = conf.getClassLoader(); if (loader instanceof URLClassLoader) { for (URL url : ((URLClassLoader) loader).getURLs()) { try { loaded.add(url.toURI().toString()); } catch (URISyntaxException e) { LOG.warn(MessageFormat.format("Failed to analyze classpath: {0}", url)); } } } if (loaded.isEmpty()) { return; } StringBuilder result = new StringBuilder(); for (String library : libraries.split(",")) { //$NON-NLS-1$ if (loaded.contains(library)) { if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Keep library: {0}", library)); //$NON-NLS-1$ } } else { if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Suppress library: {0}", library)); //$NON-NLS-1$ } if (result.length() != 0) { result.append(','); } result.append(library); } } if (result.length() > 0) { conf.set(KEY_CONF_LIBRARIES, result.toString()); } else { if (CONFIGURATION_UNSET != null) { try { CONFIGURATION_UNSET.invoke(conf, KEY_CONF_LIBRARIES); return; } catch (Exception e) { LOG.warn(MessageFormat.format("Failed to invoke {0}", CONFIGURATION_UNSET), e); } } String newLibraries = selectLibraries(libraries); conf.set(KEY_CONF_LIBRARIES, newLibraries); } }
From source file:com.asakusafw.runtime.util.cache.HadoopFileCacheRepository.java
License:Apache License
/** * Creates a new instance.//from w ww . ja v a 2s . c om * @param configuration the current configuration * @param repository the cache root path (must be absolute) * @param lockProvider the cache lock provider * @param retryStrategy the retry strategy */ public HadoopFileCacheRepository(Configuration configuration, Path repository, LockProvider<? super Path> lockProvider, RetryStrategy retryStrategy) { if (repository.toUri().getScheme() == null) { throw new IllegalArgumentException( MessageFormat.format("Cache repository location must contan the scheme: {0}", repository)); } this.configuration = configuration; this.repository = repository; this.lockProvider = lockProvider; this.retryStrategy = retryStrategy; this.checkBeforeDelete = configuration.getBoolean(KEY_CHECK_BEFORE_DELETE, DEFAULT_CHECK_BEFORE_DELETE); }
From source file:com.asakusafw.runtime.workaround.snappyjava.MacSnappyJavaWorkaround.java
License:Apache License
@Override public void configure(Job job) throws IOException, InterruptedException { Configuration conf = job.getConfiguration(); if (conf.getBoolean(KEY_ENABLED, DEFAULT_ENABLED) == false) { return;/*w w w . j a va2 s. c o m*/ } install(conf.getBoolean(KEY_SKIP_ON_UNKNOWN, DEFAULT_SKIP_ON_UNKNOWN)); }
From source file:com.atlantbh.nutch.filter.xpath.DOMContentUtils.java
License:Apache License
public void setConf(Configuration conf) { // forceTags is used to override configurable tag ignoring, later on Collection<String> forceTags = new ArrayList<String>(1); this.conf = conf; linkParams.clear();/*from w w w. ja v a2 s.co m*/ linkParams.put("a", new LinkParams("a", "href", 1)); linkParams.put("area", new LinkParams("area", "href", 0)); if (conf.getBoolean("parser.html.form.use_action", true)) { linkParams.put("form", new LinkParams("form", "action", 1)); if (conf.get("parser.html.form.use_action") != null) forceTags.add("form"); } linkParams.put("frame", new LinkParams("frame", "src", 0)); linkParams.put("iframe", new LinkParams("iframe", "src", 0)); linkParams.put("script", new LinkParams("script", "src", 0)); linkParams.put("link", new LinkParams("link", "href", 0)); linkParams.put("img", new LinkParams("img", "src", 0)); // remove unwanted link tags from the linkParams map String[] ignoreTags = conf.getStrings("parser.html.outlinks.ignore_tags"); for (int i = 0; ignoreTags != null && i < ignoreTags.length; i++) { if (!forceTags.contains(ignoreTags[i])) linkParams.remove(ignoreTags[i]); } }
From source file:com.bah.culvert.data.index.Index.java
License:Apache License
/** * Get the contents of a key that might be binary. * @param isBinarySettingKey Tells us weather or not the field is binary. * @param potentiallyBinaryEncodedSetting The actual field name that might * contain binary data./*w ww .j av a 2 s . c o m*/ * @param conf The configuration to retrieve from * @return The decoded value to return. */ private static byte[] getBinaryConfSetting(String isBinarySettingKey, String potentiallyBinaryEncodedSetting, Configuration conf) { String value = conf.get(potentiallyBinaryEncodedSetting); boolean isBase64 = conf.getBoolean(isBinarySettingKey, false); if (isBase64) { return Base64.decodeBase64(value.getBytes()); } else { return value.getBytes(); } }
From source file:com.bah.lucene.BlockCacheDirectoryFactoryV1.java
License:Apache License
public BlockCacheDirectoryFactoryV1(Configuration configuration, long totalNumberOfBytes) { // setup block cache // 134,217,728 is the slab size, therefore there are 16,384 blocks // in a slab when using a block size of 8,192 int numberOfBlocksPerSlab = 16384; int blockSize = BlockDirectory.BLOCK_SIZE; int slabCount = configuration.getInt(BLUR_SHARD_BLOCKCACHE_SLAB_COUNT, -1); slabCount = getSlabCount(slabCount, numberOfBlocksPerSlab, blockSize, totalNumberOfBytes); Cache cache;//w ww . ja v a 2 s.c o m if (slabCount >= 1) { BlockCache blockCache; boolean directAllocation = configuration.getBoolean(BLUR_SHARD_BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true); int slabSize = numberOfBlocksPerSlab * blockSize; LOG.info(MessageFormat.format( "Number of slabs of block cache [{0}] with direct memory allocation set to [{1}]", slabCount, directAllocation)); LOG.info(MessageFormat.format( "Block cache target memory usage, slab size of [{0}] will allocate [{1}] slabs and use ~[{2}] bytes", slabSize, slabCount, ((long) slabCount * (long) slabSize))); try { long totalMemory = (long) slabCount * (long) numberOfBlocksPerSlab * (long) blockSize; blockCache = new BlockCache(directAllocation, totalMemory, slabSize); } catch (OutOfMemoryError e) { if ("Direct buffer memory".equals(e.getMessage())) { System.err.println( "The max direct memory is too low. Either increase by setting (-XX:MaxDirectMemorySize=<size>g -XX:+UseLargePages) or disable direct allocation by (blur.shard.blockcache.direct.memory.allocation=false) in blur-site.properties"); System.exit(1); } throw e; } cache = new BlockDirectoryCache(blockCache); } else { cache = BlockDirectory.NO_CACHE; } _cache = cache; }
From source file:com.bah.lucene.BlockCacheDirectoryFactoryV2.java
License:Apache License
public BlockCacheDirectoryFactoryV2(Configuration configuration, long totalNumberOfBytes) { final int fileBufferSizeInt = configuration.getInt(BLUR_SHARD_BLOCK_CACHE_V2_FILE_BUFFER_SIZE, 8192); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_FILE_BUFFER_SIZE, fileBufferSizeInt)); final int cacheBlockSizeInt = configuration.getInt(BLUR_SHARD_BLOCK_CACHE_V2_CACHE_BLOCK_SIZE, 8192); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_CACHE_BLOCK_SIZE, cacheBlockSizeInt)); final Map<String, Integer> cacheBlockSizeMap = new HashMap<String, Integer>(); for (Entry<String, String> prop : configuration) { String key = prop.getKey(); if (key.startsWith(BLUR_SHARD_BLOCK_CACHE_V2_CACHE_BLOCK_SIZE_PREFIX)) { String value = prop.getValue(); int cacheBlockSizeForFile = Integer.parseInt(value); String fieldType = key.substring(BLUR_SHARD_BLOCK_CACHE_V2_CACHE_BLOCK_SIZE_PREFIX.length()); cacheBlockSizeMap.put(fieldType, cacheBlockSizeForFile); LOG.info(//from w w w . j a v a 2 s . c o m MessageFormat.format("{0}={1} for file type [{2}]", key, cacheBlockSizeForFile, fieldType)); } } final STORE store = STORE.valueOf(configuration.get(BLUR_SHARD_BLOCK_CACHE_V2_STORE, OFF_HEAP)); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_STORE, store)); final Set<String> cachingFileExtensionsForRead = getSet( configuration.get(BLUR_SHARD_BLOCK_CACHE_V2_READ_CACHE_EXT, DEFAULT_VALUE)); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_READ_CACHE_EXT, cachingFileExtensionsForRead)); final Set<String> nonCachingFileExtensionsForRead = getSet( configuration.get(BLUR_SHARD_BLOCK_CACHE_V2_READ_NOCACHE_EXT, DEFAULT_VALUE)); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_READ_NOCACHE_EXT, nonCachingFileExtensionsForRead)); final boolean defaultReadCaching = configuration.getBoolean(BLUR_SHARD_BLOCK_CACHE_V2_READ_DEFAULT, true); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_READ_DEFAULT, defaultReadCaching)); final Set<String> cachingFileExtensionsForWrite = getSet( configuration.get(BLUR_SHARD_BLOCK_CACHE_V2_WRITE_CACHE_EXT, DEFAULT_VALUE)); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_WRITE_CACHE_EXT, cachingFileExtensionsForWrite)); final Set<String> nonCachingFileExtensionsForWrite = getSet( configuration.get(BLUR_SHARD_BLOCK_CACHE_V2_WRITE_NOCACHE_EXT, DEFAULT_VALUE)); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_WRITE_NOCACHE_EXT, nonCachingFileExtensionsForWrite)); final boolean defaultWriteCaching = configuration.getBoolean(BLUR_SHARD_BLOCK_CACHE_V2_WRITE_DEFAULT, true); LOG.info(MessageFormat.format("{0}={1}", BLUR_SHARD_BLOCK_CACHE_V2_WRITE_DEFAULT, defaultWriteCaching)); Size fileBufferSize = new Size() { @Override public int getSize(CacheDirectory directory, String fileName) { return fileBufferSizeInt; } }; Size cacheBlockSize = new Size() { @Override public int getSize(CacheDirectory directory, String fileName) { String ext = getExt(fileName); Integer size = cacheBlockSizeMap.get(ext); if (size != null) { return size; } return cacheBlockSizeInt; } }; FileNameFilter readFilter = new FileNameFilter() { @Override public boolean accept(CacheDirectory directory, String fileName) { String ext = getExt(fileName); if (cachingFileExtensionsForRead.contains(ext)) { return true; } else if (nonCachingFileExtensionsForRead.contains(ext)) { return false; } return defaultReadCaching; } }; FileNameFilter writeFilter = new FileNameFilter() { @Override public boolean accept(CacheDirectory directory, String fileName) { String ext = getExt(fileName); if (cachingFileExtensionsForWrite.contains(ext)) { return true; } else if (nonCachingFileExtensionsForWrite.contains(ext)) { return false; } return defaultWriteCaching; } }; Quiet quiet = new Quiet() { @Override public boolean shouldBeQuiet(CacheDirectory directory, String fileName) { Thread thread = Thread.currentThread(); String name = thread.getName(); if (name.startsWith(SHARED_MERGE_SCHEDULER)) { return true; } return false; } }; _cache = new BaseCache(totalNumberOfBytes, fileBufferSize, cacheBlockSize, readFilter, writeFilter, quiet, store); }