List of usage examples for org.apache.hadoop.mapred FileInputFormat setInputPaths
public static void setInputPaths(JobConf conf, Path... inputPaths)
From source file:org.sf.xrime.algorithms.BC.Pajek2LabeledAdjBiSetVertexTransformer.java
License:Apache License
public void execute() throws ProcessorExecutionException { JobConf jobConf = new JobConf(conf, Pajek2LabeledAdjBiSetVertexTransformer.class); jobConf.setJobName("tansfer_pajek2LabeledAdjBiSetvert"); jobConf.setMapperClass(MapClass.class); jobConf.setReducerClass(ReduceClass.class); jobConf.setOutputFormat(SequenceFileOutputFormat.class); jobConf.setMapOutputKeyClass(Text.class); jobConf.setMapOutputValueClass(Text.class); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class); FileInputFormat.setInputPaths(jobConf, srcPath); FileOutputFormat.setOutputPath(jobConf, destPath); jobConf.setNumMapTasks(mapperNum);//w ww .j a v a 2 s. co m jobConf.setNumReduceTasks(reducerNum); try { this.runningJob = JobClient.runJob(jobConf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.BCApproximation.BCBackwardStep.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { try {// w ww. ja v a 2 s. co m context.setParameter("distance", Integer.toString(dist)); jobConf = new JobConf(context, BCBackwardStep.class); jobConf.setJobName("BC"); jobConf.setMapperClass(BCBackwardMapper.class); jobConf.setReducerClass(BCBackwardReducer.class); //jobConf.setNumMapTasks(getMapperNum()); jobConf.setNumMapTasks(1); //jobConf.setNumReduceTasks(getReducerNum()); jobConf.setNumReduceTasks(1); jobConf.setMapOutputValueClass(LabeledAdjBiSetVertex.class); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class); jobConf.setInputFormat(SequenceFileInputFormat.class); jobConf.setOutputFormat(SequenceFileOutputFormat.class); FileInputFormat.setInputPaths(jobConf, context.getSource().getPath()); FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath()); this.runningJob = JobClient.runJob(jobConf); if (dist > 0) { end = false; } else end = true; } catch (IOException e) { throw new ProcessorExecutionException(e); } catch (IllegalAccessException e) { e.printStackTrace(); } }
From source file:org.sf.xrime.algorithms.BCApproximation.BCForwardStep.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { try {/*from w ww . ja v a 2 s . c om*/ jobConf = new JobConf(context, BCForwardStep.class); jobConf.setJobName("BC"); jobConf.setInputFormat(SequenceFileInputFormat.class); jobConf.setOutputFormat(SequenceFileOutputFormat.class); jobConf.setMapperClass(BCForwardMapper.class); jobConf.setReducerClass(BCForwardReducer.class); //jobConf.setNumMapTasks(getMapperNum()); jobConf.setNumMapTasks(1); //jobConf.setNumReduceTasks(getReducerNum()); jobConf.setNumReduceTasks(1); jobConf.setMapOutputValueClass(LabeledAdjBiSetVertex.class); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class); FileInputFormat.setInputPaths(jobConf, context.getSource().getPath()); FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath()); jobConf.set(maxDistance, "0"); jobConf.set(continueFileKey, continueFlagFile()); this.runningJob = JobClient.runJob(jobConf); if (client == null) { client = FileSystem.get(jobConf); } if (client.exists(new Path(continueFlagFile()))) { end = false; client.delete(new Path(continueFlagFile()), true); } else { end = true; } } catch (IOException e) { throw new ProcessorExecutionException(e); } catch (IllegalAccessException e) { e.printStackTrace(); } }
From source file:org.sf.xrime.algorithms.BCApproximation.Pajek2LabeledAdjBiSetVertexTransformer.java
License:Apache License
public void execute() throws ProcessorExecutionException { JobConf jobConf = new JobConf(conf, Pajek2LabeledAdjBiSetVertexTransformer.class); jobConf.setJobName("tansfer_pajek2LabeledAdjBiSetvert"); jobConf.setMapperClass(MapClass.class); jobConf.setReducerClass(ReduceClass.class); jobConf.setOutputFormat(SequenceFileOutputFormat.class); jobConf.setMapOutputKeyClass(Text.class); jobConf.setMapOutputValueClass(Text.class); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class); FileInputFormat.setInputPaths(jobConf, srcPath); FileOutputFormat.setOutputPath(jobConf, destPath); jobConf.setNumMapTasks(mapperNum);//from w w w. ja va 2 s. co m jobConf.setNumReduceTasks(reducerNum); try { this.runningJob = JobClient.runJob(jobConf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.BFS.alg_1.BFSStep.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { try {/*from ww w. j a v a 2 s . c o m*/ context.setParameter(continueFileKey, continueFlagFile()); jobConf = new JobConf(context, BFSStep.class); jobConf.setJobName("BFS"); FileInputFormat.setInputPaths(jobConf, context.getSource().getPath()); jobConf.setInputFormat(SequenceFileInputFormat.class); jobConf.setMapperClass(BFSMapper.class); jobConf.setNumMapTasks(getMapperNum()); jobConf.setMapOutputValueClass(LabeledAdjVertex.class); jobConf.setReducerClass(BFSReducer.class); jobConf.setNumReduceTasks(getReducerNum()); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjVertex.class); FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath()); jobConf.setOutputFormat(SequenceFileOutputFormat.class); this.runningJob = JobClient.runJob(jobConf); if (client == null) { client = FileSystem.get(jobConf); } if (client.exists(new Path(continueFlagFile()))) { end = false; client.delete(new Path(continueFlagFile()), true); } else { end = true; } } catch (IOException e) { throw new ProcessorExecutionException(e); } catch (IllegalAccessException e) { e.printStackTrace(); } }
From source file:org.sf.xrime.algorithms.BFS.alg_2.BFSStep.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { try {/*from www .j a v a 2 s . c o m*/ JobConf jobConf = new JobConf(context, BFSStep.class); jobConf.setJobName("BFS"); FileInputFormat.setInputPaths(jobConf, context.getSource().getPath()); jobConf.setInputFormat(SequenceFileInputFormat.class); jobConf.setMapperClass(BFSMapper.class); jobConf.setNumMapTasks(getMapperNum()); jobConf.setMapOutputValueClass(LabeledAdjVertex.class); // jobConf.setCombinerClass(BFSCombineClass.class); jobConf.setReducerClass(BFSReducer.class); jobConf.setNumReduceTasks(getReducerNum()); jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(LabeledAdjVertex.class); FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath()); jobConf.setOutputFormat(SequenceFileOutputFormat.class); this.runningJob = JobClient.runJob(jobConf); } catch (IOException e) { throw new ProcessorExecutionException(e); } catch (IllegalAccessException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.clique.maximal.AllMaximalCliquesGenerate.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, AllMaximalCliquesGenerate.class); conf.setJobName("AllMaximalCliquesGenerate"); conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(SetOfVertexSets.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setMapperClass(MapClass.class); // Combiner is not permitted. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); // Enable compression. conf.setCompressMapOutput(true);/* ww w.j av a2s .c o m*/ conf.setMapOutputCompressorClass(GzipCodec.class); try { FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.clique.maximal.InducedNeighborhoodGenerate.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, InducedNeighborhoodGenerate.class); conf.setJobName("InducedNeighborhoodGenerate"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(LabeledAdjSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted, since the logic of reducer depends on the completeness // of information. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); // Enable compression. conf.setCompressMapOutput(true);//www . j a va 2 s. co m conf.setMapOutputCompressorClass(GzipCodec.class); try { FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.clique.maximal.StrongNeighborhoodGenerate.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, StrongNeighborhoodGenerate.class); conf.setJobName("StrongNeighborhoodGenerate"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(LabeledAdjSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted, since the logic of reducer depends on the completeness // of information. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); // Enable compression. conf.setCompressMapOutput(true);//from w w w . j a v a 2 s . co m conf.setMapOutputCompressorClass(GzipCodec.class); try { FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }
From source file:org.sf.xrime.algorithms.clique.maximal.WeakNeighborhoodGenerate.java
License:Apache License
@Override public void execute() throws ProcessorExecutionException { JobConf conf = new JobConf(context, WeakNeighborhoodGenerate.class); conf.setJobName("WeakNeighborhoodGenerate"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(LabeledAdjSetVertex.class); conf.setMapperClass(MapClass.class); // No combiner is permitted, since the logic of reducer depends on the completeness // of information. conf.setReducerClass(ReduceClass.class); // makes the file format suitable for machine processing. conf.setInputFormat(SequenceFileInputFormat.class); conf.setOutputFormat(SequenceFileOutputFormat.class); // Enable compression. conf.setCompressMapOutput(true);//from w w w . j a v a2s . c om conf.setMapOutputCompressorClass(GzipCodec.class); try { FileInputFormat.setInputPaths(conf, getSource().getPath()); FileOutputFormat.setOutputPath(conf, getDestination().getPath()); } catch (IllegalAccessException e1) { throw new ProcessorExecutionException(e1); } conf.setNumMapTasks(getMapperNum()); conf.setNumReduceTasks(getReducerNum()); try { this.runningJob = JobClient.runJob(conf); } catch (IOException e) { throw new ProcessorExecutionException(e); } }