List of usage examples for org.apache.hadoop.conf Configuration getInt
public int getInt(String name, int defaultValue)
name
property as an int
. From source file:crunch.MaxTemperature.java
License:Apache License
@Test public void get() throws IOException { // Single test as an expedient for inclusion in the book // vv MultipleResourceConfigurationTest Configuration conf = new Configuration(); conf.addResource("configuration-1.xml"); conf.addResource("configuration-2.xml"); // ^^ MultipleResourceConfigurationTest assertThat(conf.get("color"), is("yellow")); // override // vv MultipleResourceConfigurationTest-Override assertThat(conf.getInt("size", 0), is(12)); // ^^ MultipleResourceConfigurationTest-Override // final properties cannot be overridden // vv MultipleResourceConfigurationTest-Final assertThat(conf.get("weight"), is("heavy")); // ^^ MultipleResourceConfigurationTest-Final // variable expansion // vv MultipleResourceConfigurationTest-Expansion assertThat(conf.get("size-weight"), is("12,heavy")); // ^^ MultipleResourceConfigurationTest-Expansion // variable expansion with system properties // vv MultipleResourceConfigurationTest-SystemExpansion System.setProperty("size", "14"); assertThat(conf.get("size-weight"), is("14,heavy")); // ^^ MultipleResourceConfigurationTest-SystemExpansion // system properties are not picked up // vv MultipleResourceConfigurationTest-NoSystemByDefault System.setProperty("length", "2"); assertThat(conf.get("length"), is((String) null)); // ^^ MultipleResourceConfigurationTest-NoSystemByDefault }/*from w ww . ja v a2 s .c om*/
From source file:crunch.MaxTemperature.java
License:Apache License
@Test public void get() throws IOException { // vv SingleResourceConfigurationTest Configuration conf = new Configuration(); conf.addResource("configuration-1.xml"); assertThat(conf.get("color"), is("yellow")); assertThat(conf.getInt("size", 0), is(10)); assertThat(conf.get("breadth", "wide"), is("wide")); // ^^ SingleResourceConfigurationTest }//w w w.j a v a 2s . com
From source file:DAAL.CovarianceDenseStep1Mapper.java
License:Open Source License
@Override public void setup(Context context) { index = context.getTaskAttemptID().getTaskID().getId(); Configuration conf = context.getConfiguration(); totalTasks = conf.getInt("mapred.map.tasks", 0); }
From source file:de.l3s.concatgz.io.ImmediateOutput.java
License:Open Source License
public ImmediateOutput(TaskInputOutputContext context, boolean flushOnWrite) throws IOException { this.context = context; this.flushOnWrite = flushOnWrite; Configuration conf = context.getConfiguration(); this.dir = getPath(conf); this.fs = FileSystem.newInstance(conf); this.bufferSize = conf.getInt("io.file.buffer.size", 4096); this.replication = getReplication(conf); String idPrefix = getIdPrefix(conf); file = "" + context.getTaskAttemptID().getTaskID().getId(); while (file.length() < 5) file = "0" + file; if (idPrefix.length() > 0) file = idPrefix + "-" + file; file = "-" + file; }
From source file:de.l3s.concatgz.io.ImmediateOutput.java
License:Open Source License
public static short getReplication(Configuration conf) { return (short) conf.getInt(REPLICATION_KEY, 2); }
From source file:de.rwhq.hdfs.index.LineRecordReader.java
License:Apache License
public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException { FileSplit split = (FileSplit) genericSplit; Configuration job = context.getConfiguration(); this.maxLineLength = job.getInt("mapred.linerecordreader.maxlength", Integer.MAX_VALUE); start = split.getStart();// w ww . jav a 2s . c om end = start + split.getLength(); final Path file = split.getPath(); compressionCodecs = new CompressionCodecFactory(job); final CompressionCodec codec = compressionCodecs.getCodec(file); // open the file and seek to the start of the split FileSystem fs = file.getFileSystem(job); fileIn = fs.open(split.getPath()); boolean skipFirstLine = false; if (codec != null) { in = new LineReader(codec.createInputStream(fileIn), job); end = Long.MAX_VALUE; } else { if (start != 0) { skipFirstLine = true; --start; fileIn.seek(start); } in = new LineReader(fileIn, job); } if (skipFirstLine) { // skip first line and re-establish "start". start += in.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start)); } this.pos = start; }
From source file:de.tudarmstadt.lt.n2n.hadoop.GoogleSyntacticsJobDkbd.java
License:Apache License
@Override public AnalysisEngineDescription buildMapperEngine(Configuration conf) throws ResourceInitializationException { try {/*from w w w . j a v a2s . com*/ String extractorConfigurationFiles = conf.get(SHARED_CONSTANTS.PARAM_EXTRACTORCONFIGS); String[] extractorConfigurationFilesArr = extractorConfigurationFiles.split(","); for (int i = 0; i < extractorConfigurationFilesArr.length; i++) { String extractorConfigurationFileName = new File(extractorConfigurationFilesArr[i]).getName(); for (Path p : DistributedCache.getLocalCacheFiles(conf)) if (p.getName().contains(extractorConfigurationFileName)) extractorConfigurationFilesArr[i] = p.toString(); } int maxlength = conf.getInt(SHARED_CONSTANTS.PARAM_MAXIMUM_PATHLENGTH, -1); AggregateBuilder builder = new AggregateBuilder(); // builder.add(AnalysisEngineFactory.createEngineDescription(MetaDataAnnotator.class)); builder.add(AnalysisEngineFactory.createEngineDescription(JoBimRelationPipeline .createGoogleSyntacticsRelationEngine(true/* create_tokens */, true/* create_sentences */, true/* create_dependencies */, true/* create_new_relations */, true/* create_dependency_path */, false/*ignore_nn_relations*/, maxlength/* dependecy_path_maxlength */, false/* create_detailed_output */, extractorConfigurationFilesArr/* extractor_configuration */, SHARED_CONSTANTS.HADOOP_CAS_CONSUMER_OUTPUT_FILENAME/* output_destination */))); return builder.createAggregateDescription(); } catch (IOException e) { throw new ResourceInitializationException(e); } }
From source file:de.tudarmstadt.ukp.dkpro.bigdata.collocations.CollocMapper.java
License:Apache License
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); this.window = conf.getInt(CollocDriver.WINDOW_SIZE, 3); this.windowMode = Window.valueOf(conf.get(CollocDriver.WINDOW_TYPE, Window.SENTENCE.name())); this.emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS); this.metadata = new ResourceMetaData_impl(); final Element aElement; final XMLParser aParser = org.apache.uima.UIMAFramework.getXMLParser(); // try {//from w ww.ja va2s. com // // this.metadata = aParser.parseResourceMetaData(new XMLInputSource(new StringInputStream( // Metadata.getMetadata()), new File("."))); // } // catch (final InvalidXMLException e1) { // // TODO Auto-generated catch block // e1.printStackTrace(); // } if (log.isInfoEnabled()) { // log.info("Max Ngram size is {}", this.maxShingleSize); log.info("Emit Unitgrams is {}", emitUnigrams); log.info("Window Mode is {}", this.windowMode.name()); log.info("Window Size is {}", window); log.info("Emit Unitgrams is {}", emitUnigrams); } }
From source file:de.tudarmstadt.ukp.dkpro.bigdata.collocations.CollocReducer.java
License:Apache License
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); this.minSupport = conf.getInt(MIN_SUPPORT, DEFAULT_MIN_SUPPORT); boolean emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS); emitUnigrams = true;/*from ww w . j a v a 2 s. c o m*/ log.info("Min support is {}", minSupport); log.info("Emit Unitgrams is {}", emitUnigrams); }
From source file:diamondmapreduce.NLineRecordReader.java
License:Apache License
@Override public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException, InterruptedException { FileSplit split = (FileSplit) genericSplit; final Path file = split.getPath(); Configuration conf = context.getConfiguration(); this.maxLineLength = conf.getInt("mapreduce.input.linerecordreader.line.maxlength", Integer.MAX_VALUE); FileSystem fs = file.getFileSystem(conf); start = split.getStart();//from w ww .j a v a2s. co m end = start + split.getLength(); boolean skipFirstLine = false; FSDataInputStream filein = fs.open(split.getPath()); if (start != 0) { skipFirstLine = true; --start; filein.seek(start); } in = new LineReader(filein, conf); if (skipFirstLine) { start += in.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start)); } this.pos = start; }