List of usage examples for org.apache.hadoop.conf Configuration Configuration
public Configuration()
From source file:WriteFileFunctionTest.java
License:Open Source License
static void writeFile(String filename) throws Exception { Head head = new Head(); FieldMap fieldMap = new FieldMap(); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 0)); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 1)); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 2)); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 3)); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 4)); fieldMap.addField(new Field(ConstVar.FieldType_Byte, ConstVar.Sizeof_Byte, (short) 5)); fieldMap.addField(new Field(ConstVar.FieldType_String, 0, (short) 6)); head.setFieldMap(fieldMap);//from w w w. j av a 2 s. co m Configuration conf = new Configuration(); FormatDataFile fd = new FormatDataFile(conf); fd.create(filename, head); Record record = new Record((short) 7); record.addValue(new FieldValue((byte) (0), (short) 0)); record.addValue(new FieldValue((byte) (1), (short) 1)); record.addValue(new FieldValue((byte) (2), (short) 2)); record.addValue(new FieldValue((byte) (3), (short) 3)); record.addValue(new FieldValue((byte) (4), (short) 4)); record.addValue(new FieldValue((byte) (5), (short) 5)); record.addValue(new FieldValue("testtest", (short) 6)); fd.addRecord(record); record = new Record((short) 7); record.addValue(new FieldValue((byte) (10), (short) 0)); record.addValue(new FieldValue((byte) (11), (short) 1)); record.addValue(new FieldValue((byte) (12), (short) 2)); record.addValue(new FieldValue((byte) (13), (short) 3)); record.addValue(new FieldValue((byte) (14), (short) 4)); record.addValue(new FieldValue((byte) (15), (short) 5)); record.addValue(new FieldValue("testtesttest", (short) 6)); fd.addRecord(record); record = new Record((short) 7); record.addValue(new FieldValue((byte) (20), (short) 0)); record.addValue(new FieldValue((byte) (21), (short) 1)); record.addValue(new FieldValue((byte) (22), (short) 2)); record.addValue(new FieldValue((byte) (23), (short) 3)); record.addValue(new FieldValue((byte) (24), (short) 4)); record.addValue(new FieldValue((byte) (25), (short) 5)); record.addValue(new FieldValue("testttesttestest", (short) 6)); fd.addRecord(record); fd.close(); }
From source file:WriteFileFunctionTest.java
License:Open Source License
static void getRecordByValue(String filename) throws Exception { Configuration conf = new Configuration(); FormatDataFile fd3 = new FormatDataFile(conf); fd3.open(filename);/* ww w . j a v a 2s .c om*/ FieldValue[] values = new FieldValue[1]; values[0] = new FieldValue((byte) 1, (short) 1); Record[] records = fd3.getRecordByOrder(values, values.length); if (records != null) { System.out.println("should get null"); } for (int j = 0; j < records.length; j++) { ArrayList<FieldValue> vals = records[j].fieldValues(); for (int k = 0; k < vals.size(); k++) { System.out.print(vals.get(k).toObject() + "\t"); } System.out.println(); } }
From source file:WriteFileFunctionTest.java
License:Open Source License
static void getRecordByLine(String filename, int line) throws Exception { Configuration conf = new Configuration(); FormatDataFile fd2 = new FormatDataFile(conf); fd2.open(filename);/*from w w w . j ava2 s . c o m*/ Record record = fd2.getRecordByLine(-1); if (record != null) { System.out.println("should get null, line -1"); fd2.close(); return; } Record re = fd2.getRecordByLine(line); ArrayList<FieldValue> vals = re.fieldValues(); for (int i = 0; i < vals.size(); i++) { System.out.print(vals.get(i).toObject() + "\t"); } System.out.println(); }
From source file:Edge.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2);/*from w ww . j a v a2 s . com*/ } Path tempDir = new Path("/temp/edge"); Job job = new Job(conf, "word count"); job.setJarByClass(Edge.class); job.setMapperClass(SplitMapper.class); job.setCombinerClass(DuplicateCombiner.class); //job.setSortComparatorClass(DecentComparator.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, tempDir); if (job.waitForCompletion(true)) { Job job2 = new Job(conf, "edge"); job2.setJarByClass(Edge.class); job2.setMapperClass(SwitchMapper.class); job2.setSortComparatorClass(DecentComparator.class); job2.setReducerClass(SwitchReducer.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job2, tempDir); FileOutputFormat.setOutputPath(job2, new Path(otherArgs[1])); System.exit(job2.waitForCompletion(true) ? 0 : 1); } System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:LinkedGraph.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2);//w ww .j av a2 s . c om } Job job = new Job(conf, "Graph"); job.setJarByClass(LinkedGraph.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:BwaInterpreter.java
License:Open Source License
private void combineOutputSamFiles(String outputHdfsDir, List<String> returnedValues) { try {// w w w . j a v a2 s. c o m Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path finalHdfsOutputFile = new Path(outputHdfsDir + "/FullOutput.sam"); FSDataOutputStream outputFinalStream = fs.create(finalHdfsOutputFile, true); // We iterate over the resulting files in HDFS and agregate them into only one file. for (int i = 0; i < returnedValues.size(); i++) { LOG.info("JMAbuin:: SparkBWA :: Returned file ::" + returnedValues.get(i)); BufferedReader br = new BufferedReader( new InputStreamReader(fs.open(new Path(returnedValues.get(i))))); String line; line = br.readLine(); while (line != null) { if (i == 0 || !line.startsWith("@")) { //outputFinalStream.writeBytes(line+"\n"); outputFinalStream.write((line + "\n").getBytes()); } line = br.readLine(); } br.close(); fs.delete(new Path(returnedValues.get(i)), true); } outputFinalStream.close(); fs.close(); } catch (IOException e) { e.printStackTrace(); LOG.error(e.toString()); } }
From source file:TestStringRelevance.java
License:Apache License
public TestStringRelevance() throws IOException { fs = FileSystem.get(new Configuration()); Relevance.TEST_MODE = true; }
From source file:LinkReverser.java
License:Apache License
public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new LinkReverser(), args); System.exit(res);/* w w w .j ava 2 s. c o m*/ }
From source file:DescSorter.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("Usage: flights <in> <in> <out>"); System.exit(2);/* ww w . j a v a 2 s . c om*/ } Job job = new Job(conf, "AvgDelays"); job.setJarByClass(DescSorter.class); job.setMapperClass(FlightMapper.class); job.setMapOutputKeyClass(CompositeKey.class); job.setMapOutputValueClass(IntWritable.class); job.setPartitionerClass(CompositeKeyPartitioner.class); job.setSortComparatorClass(SortComparator.class); job.setGroupingComparatorClass(GroupingComparator.class); job.setReducerClass(AvgDelayReducer.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); for (int i = 0; i < otherArgs.length - 1; ++i) { FileInputFormat.addInputPath(job, new Path(otherArgs[i])); } FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:CalculateHistogram.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2);//from w w w . ja v a 2s.c o m } Job job = new Job(conf, "MRDT - Generate Histogram"); job.setJarByClass(CalculateHistogram.class); job.setMapperClass(HistogramMap.class); job.setReducerClass(HistogramReduce.class); //job.setOutputValueClass(HistogramBucket.class); //job.setMapOutputKeyClass(LongWritable.class); //job.setMapOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }