List of usage examples for org.apache.hadoop.mapred FileInputFormat setInputPaths
public static void setInputPaths(JobConf conf, Path... inputPaths)
From source file:org.sf.xrime.algorithms.partitions.connected.bi.EdgeSetMinorJoin.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, EdgeSetMinorJoin.class); conf.setJobName("EdgeSetMinorJoin"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(EdgeSet.class); conf.setMapperClass(MapClass.class); // Combiner is permitted here. conf.setCombinerClass(ReduceClass.class); conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/* ww w. j ava2s .c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.bi.EdgeSetSummarize.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, EdgeSetSummarize.class); conf.setJobName("EdgeSetSummarize"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(EdgeSet.class); conf.setMapperClass(MapClass.class); conf.setCombinerClass(ReduceClass.class); conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {//w ww. j a va 2 s.c om FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.bi.SpanningConvergenceTest.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, SpanningConvergenceTest.class); conf.setJobName("SpanningConvergenceTest"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are integers (Writable) conf.setOutputValueClass(IntWritable.class); conf.setMapperClass(MapClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); try {/*from w w w. ja va2s .c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(0); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.bi.SpanningTreeGenerate.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, SpanningTreeGenerate.class); conf.setJobName("SpanningTreeGenerate"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(LabeledAdjSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted, since the logic of reducer depends on the completeness // of information. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {//w ww.j ava 2s. c o m FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setMapOutputCompressorClass(GzipCodec.class); conf.setCompressMapOutput(true); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.bi.SpanningTreeRootChoose.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, SpanningTreeRootChoose.class); conf.setJobName("SpanningTreeRootChoose"); // This is necessary. conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(LabeledAdjSetVertex.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setMapperClass(MapClass.class); // Since k2,v2 is different from k3,v3. No combiner is permitted. conf.setReducerClass(ReduceClass.class); // The format of input data is generated with WritableSerialization. conf.setInputFormat(SequenceFileInputFormat.class); try {/*from w ww . j a va2 s. c o m*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); // Only one reducer is permitted, or the largest value will be wrong. conf.setNumReduceTasks(1); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.bi.Tree2EdgeSet.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, Tree2EdgeSet.class); conf.setJobName("Tree2EdgeSet"); conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(PathAsVertexesList.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(EdgeSet.class); conf.setMapperClass(MapClass.class); // No combiner is permitted, since the logic of reducer depends on the completeness // of information. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/* ww w . j ava 2 s . c om*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setCompressMapOutput(true); conf.setMapOutputCompressorClass(GzipCodec.class); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.BackwardTrimPartA.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, BackwardTrimPartA.class); conf.setJobName("BackwardTrimPartA"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertexes (Writable) conf.setOutputValueClass(LabeledAdjBiSetVertex.class); conf.setMapperClass(MapClass.class); // No combiners, no reducers. // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {//from w w w. j ava 2s.c om FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(0); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.partitions.connected.strongly.BackwardTrimPartB.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, BackwardTrimPartB.class); conf.setJobName("BackwardTrimPartB"); // the keys are vertex identifiers (strings) conf.setOutputKeyClass(Text.class); // the values are vertexes (Writable) conf.setOutputValueClass(LabeledAdjBiSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); try {/* w w w. j av a2s . c om*/ FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); conf.setMapOutputCompressorClass(GzipCodec.class); conf.setCompressMapOutput(true); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }