List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name, String defaultValue)
name
. From source file:andromache.config.CassandraConfigHelper.java
License:Apache License
public static int getOutputRpcPort(Configuration conf) { if (conf.get(OUTPUT_THRIFT_PORT, "").equals("")) { return DEFAULT_THRIFT_PORT; }/* ww w . jav a 2s . com*/ return Integer.parseInt(conf.get(OUTPUT_THRIFT_PORT)); }
From source file:backup.datanode.DataNodeRestoreProcessor.java
License:Apache License
public DataNodeRestoreProcessor(Configuration conf, DataNode datanode) throws Exception { _closer = Closer.create();// w w w .java 2 s .c o m _datanode = datanode; _restoreThroughput = Metrics.METRICS.meter(RESTORE_THROUGHPUT); _bytesPerChecksum = conf.getInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT); _checksumType = Type .valueOf(conf.get(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT)); int threads = conf.getInt(DFS_BACKUP_DATANODE_RESTORE_BLOCK_HANDLER_COUNT_KEY, DFS_BACKUP_DATANODE_RESTORE_BLOCK_HANDLER_COUNT_DEFAULT); long pauseOnError = conf.getLong(DFS_BACKUP_DATANODE_RESTORE_ERROR_PAUSE_KEY, DFS_BACKUP_DATANODE_RESTORE_ERROR_PAUSE_DEFAULT); _backupStore = _closer.register(BackupStore.create(BackupUtil.convert(conf))); _restoreBlocks = new ArrayBlockingQueue<>(threads); _executorService = Executors.newCachedThreadPool(); _closer.register((Closeable) () -> _executorService.shutdownNow()); for (int t = 0; t < threads; t++) { _executorService.submit(Executable.createDaemon(LOG, pauseOnError, _running, () -> restoreBlocks())); } }
From source file:backup.namenode.NameNodeBackupBlockCheckProcessor.java
License:Apache License
public NameNodeBackupBlockCheckProcessor(Configuration conf, NameNodeRestoreProcessor processor, NameNode namenode, UserGroupInformation ugi) throws Exception { String[] nnStorageLocations = conf.getStrings(DFS_NAMENODE_NAME_DIR); URI uri = new URI(nnStorageLocations[0]); _reportPath = new File(new File(uri.getPath()).getParent(), "backup-reports"); _reportPath.mkdirs();//from w w w. j a v a 2 s. com if (!_reportPath.exists()) { throw new IOException("Report path " + _reportPath + " does not exist"); } this.ugi = ugi; this.namenode = namenode; this.conf = conf; this.processor = processor; backupStore = BackupStore.create(BackupUtil.convert(conf)); this.fileSystem = (DistributedFileSystem) FileSystem.get(conf); this.ignorePath = conf.get(DFS_BACKUP_IGNORE_PATH_FILE_KEY, DFS_BACKUP_IGNORE_PATH_FILE_DEFAULT); this.batchSize = conf.getInt(DFS_BACKUP_REMOTE_BACKUP_BATCH_KEY, DFS_BACKUP_REMOTE_BACKUP_BATCH_DEFAULT); this.checkInterval = conf.getLong(DFS_BACKUP_NAMENODE_BLOCK_CHECK_INTERVAL_KEY, DFS_BACKUP_NAMENODE_BLOCK_CHECK_INTERVAL_DEFAULT); this.initInterval = conf.getLong(DFS_BACKUP_NAMENODE_BLOCK_CHECK_INTERVAL_DELAY_KEY, DFS_BACKUP_NAMENODE_BLOCK_CHECK_INTERVAL_DELAY_DEFAULT); start(); }
From source file:be.uantwerpen.adrem.bigfim.AprioriPhaseMapper.java
License:Apache License
@Override public void setup(Context context) throws IOException { Configuration conf = context.getConfiguration(); delimiter = conf.get(DELIMITER_KEY, " "); Path[] localCacheFiles = getLocalCacheFiles(conf); countTrie = new ItemSetTrie.SupportCountItemsetTrie(-1); if (localCacheFiles != null) { String filename = localCacheFiles[0].toString(); phase = readCountTrieFromItemSetsFile(filename, countTrie) + 1; singletons = getSingletonsFromCountTrie(countTrie); }/*from www .ja v a 2s.co m*/ }
From source file:be.uantwerpen.adrem.bigfim.ComputeTidListMapper.java
License:Apache License
@Override public void setup(Context context) throws IOException { Configuration conf = context.getConfiguration(); delimiter = conf.get(DELIMITER_KEY, " "); Path[] localCacheFiles = getLocalCacheFiles(conf); if (localCacheFiles != null) { String filename = localCacheFiles[0].toString(); phase = readCountTrieFromItemSetsFile(filename, countTrie) + 1; singletons = getSingletonsFromCountTrie(countTrie); }//from w ww . j av a 2 s .c o m id = context.getTaskAttemptID().getTaskID().getId(); }
From source file:be.uantwerpen.adrem.disteclat.ItemReaderReducer.java
License:Apache License
@Override public void setup(Context context) { Configuration conf = context.getConfiguration(); mos = new MultipleOutputs<IntWritable, Writable>(context); numberOfMappers = parseInt(conf.get(NUMBER_OF_MAPPERS_KEY, "1")); minSup = conf.getInt(MIN_SUP_KEY, -1); shortFisFilename = createPath(getJobAbsoluteOutputDir(context), OShortFIs, OShortFIs + "-1"); }
From source file:be.ugent.intec.halvade.utils.HalvadeConf.java
License:Open Source License
public static boolean getReportAllVariant(Configuration conf) { String s = conf.get(reportAllVariant, "false"); if (s.equalsIgnoreCase("true")) return true; else/*www .j ava 2s .com*/ return false; }
From source file:boa.datagen.SeqProjectCombiner.java
License:Apache License
public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); conf.set("fs.default.name", "hdfs://boa-njt/"); FileSystem fileSystem = FileSystem.get(conf); String base = conf.get("fs.default.name", ""); HashMap<String, String> sources = new HashMap<String, String>(); HashSet<String> marks = new HashSet<String>(); FileStatus[] files = fileSystem.listStatus(new Path(base + "tmprepcache/2015-07")); for (int i = 0; i < files.length; i++) { FileStatus file = files[i];/*from www. j a v a2 s .c o m*/ String name = file.getPath().getName(); if (name.startsWith("projects-") && name.endsWith(".seq")) { System.out.println("Reading file " + i + " in " + files.length + ": " + name); SequenceFile.Reader r = new SequenceFile.Reader(fileSystem, file.getPath(), conf); final Text key = new Text(); final BytesWritable value = new BytesWritable(); try { while (r.next(key, value)) { String s = key.toString(); if (marks.contains(s)) continue; Project p = Project .parseFrom(CodedInputStream.newInstance(value.getBytes(), 0, value.getLength())); if (p.getCodeRepositoriesCount() > 0 && p.getCodeRepositories(0).getRevisionsCount() > 0) marks.add(s); sources.put(s, name); } } catch (Exception e) { System.err.println(name); e.printStackTrace(); } r.close(); } } SequenceFile.Writer w = SequenceFile.createWriter(fileSystem, conf, new Path(base + "repcache/2015-07/projects.seq"), Text.class, BytesWritable.class); for (int i = 0; i < files.length; i++) { FileStatus file = files[i]; String name = file.getPath().getName(); if (name.startsWith("projects-") && name.endsWith(".seq")) { System.out.println("Reading file " + i + " in " + files.length + ": " + name); SequenceFile.Reader r = new SequenceFile.Reader(fileSystem, file.getPath(), conf); final Text key = new Text(); final BytesWritable value = new BytesWritable(); try { while (r.next(key, value)) { String s = key.toString(); if (sources.get(s).equals(name)) w.append(key, value); } } catch (Exception e) { System.err.println(name); e.printStackTrace(); } r.close(); } } w.close(); fileSystem.close(); }
From source file:cascading.flow.hadoop.util.HadoopUtil.java
License:Open Source License
public static void initLog4j(Configuration configuration) { String values = configuration.get("log4j.logger", null); if (values == null || values.length() == 0) return;//from w w w . j a va2 s. c om if (!Util.hasClass("org.apache.log4j.Logger")) { LOG.info( "org.apache.log4j.Logger is not in the current CLASSPATH, not setting log4j.logger properties"); return; } String[] elements = values.split(","); for (String element : elements) LogUtil.setLog4jLevel(element.split("=")); }
From source file:cascading.flow.tez.Hadoop2TezFlowStep.java
License:Open Source License
protected String getOutputPath(Configuration sinkConf) { return sinkConf.get("mapred.output.dir", sinkConf.get("mapreduce.output.fileoutputformat.outputdir")); }