List of usage examples for org.apache.hadoop.mapred FileInputFormat setInputPaths
public static void setInputPaths(JobConf conf, Path... inputPaths)
From source file:hibench.DataGenerator.java
License:Apache License
public void sumUpZipf(Path fin, Path fout, ZipfRandom zipf) throws IOException { LOG.info("Summing up Zipfian Id Distirubtion..."); JobConf job = new JobConf(WebDataGen.class); String jobname = fin.getName() + " -> " + fout.getName(); job.setJobName(jobname);// w w w . j a v a 2 s . c om zipf.setJobConf(job); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(SumUpZipfMapper.class); job.setReducerClass(SumUpZipfReducer.class); job.setNumReduceTasks(1); // Important to sequentially accumulate the required space job.setInputFormat(NLineInputFormat.class); job.setOutputFormat(TextOutputFormat.class); DataPaths.checkHdfsFile(fout, false); FileInputFormat.setInputPaths(job, fin); FileOutputFormat.setOutputPath(job, fout); LOG.info("Running Job: " + jobname); LOG.info("Dummy file: " + fin); LOG.info("Zipfian sum up file as Ouput: " + fout); JobClient.runJob(job); LOG.info("Finished Running Job: " + jobname); }
From source file:hibench.DataGenerator.java
License:Apache License
public void createZipf(Path fin, Path fout, ZipfRandom zipf) throws IOException { LOG.info("Creating Zipfian Id Distirubtion..."); JobConf job = new JobConf(WebDataGen.class); String jobname = fin.getName() + " -> " + fout.getName(); job.setJobName(jobname);/*from w w w. j av a 2 s .c o m*/ zipf.setJobConf(job); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(CreateZipfDistrMapper.class); job.setNumReduceTasks(0); job.setInputFormat(NLineInputFormat.class); job.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(job, fin); FileOutputFormat.setOutputPath(job, fout); DataPaths.checkHdfsFile(fout, false); LOG.info("Running Job: " + jobname); LOG.info("Zipfian Sum File: " + fin); LOG.info("Zipfian Id distribution as Ouput: " + fout); RunningJob jobCreateZipf = JobClient.runJob(job); LOG.info("Finished Running Job: " + jobname); long vElems = jobCreateZipf.getCounters().getCounter(MAP_OUTPUT_RECORDS); LOG.info("Created " + vElems + " virtual zipfian elements"); zipf.setVirtElems(vElems); }
From source file:hibench.DataGenerator.java
License:Apache License
public void createHtmlPages(Path dummy, HtmlConf html) throws IOException { LOG.info("Creating Html Pages..."); Path fout = new Path(dummy.getParent(), "tmp"); JobConf job = new JobConf(WebDataGen.class); String jobname = "Create html pages to " + fout.getName(); job.setJobName(jobname);/*from w ww. j a v a 2s.c o m*/ html.setJobConf(job); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(CreateHtmlPagesMapper.class); job.setNumReduceTasks(0); FileInputFormat.setInputPaths(job, dummy); // first create result files under tmp folder FileOutputFormat.setOutputPath(job, fout); // begin from dummy file job.setInputFormat(NLineInputFormat.class); // use MultipleTextOutputFormat to produce three out files defined // in PathConf, i.e., LINK, PAGE_ZLINK_TABLE, PAGE_ZWORD_TABLE job.setOutputFormat(HtmlMultipleTextOutputFormat.class); LOG.info("Running Job: " + jobname); LOG.info("Dummy file: " + dummy); LOG.info("Multiple result Html files as <links, words, urls>"); JobClient.runJob(job); LOG.info("Finished Running Job: " + jobname); // Move result files under tmp into parent path // and remove the empty tmp path finally DataPaths.moveFilesToParent(fout); }
From source file:hibench.PageRankDataGenerator.java
License:Apache License
private void createPageRankNodes() throws IOException { LOG.info("Creating PageRank nodes...", null); JobConf job = new JobConf(WebDataGen.class); String jobname = "Create " + paths.dname + " pagerank nodes"; job.setJobName(jobname);/* w w w . ja v a 2s . c om*/ job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); FileInputFormat.setInputPaths(job, paths.getPath(DataPaths.LINKS)); job.setInputFormat(TextInputFormat.class); if (options.PAGERANK_NODE_BALANCE) { /*** * Balance the output order of nodes, to prevent the running * of pagerank bench from potential data skew */ job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(NullWritable.class); job.setMapperClass(BalancedLinkNodesMapper.class); job.setReducerClass(BalancedLinkNodesReducer.class); // job.setPartitionerClass(ModulusPartitioner.class); if (options.reds > 0) { job.setNumReduceTasks(options.reds); } else { job.setNumReduceTasks(DataOptions.getMaxNumReduce()); } } else { job.setMapOutputKeyClass(Text.class); job.setMapperClass(OutputLinkNodesMapper.class); job.setNumReduceTasks(0); } if (options.SEQUENCE_OUT) { job.setOutputFormat(SequenceFileOutputFormat.class); } else { job.setOutputFormat(TextOutputFormat.class); } if (null != options.codecClass) { job.set("mapred.output.compression.type", "BLOCK"); FileOutputFormat.setCompressOutput(job, true); FileOutputFormat.setOutputCompressorClass(job, options.codecClass); } FileOutputFormat.setOutputPath(job, paths.getResult(DataPaths.VERTICALS)); LOG.info("Running Job: " + jobname); LOG.info("Links file " + paths.getPath(DataPaths.LINKS) + " as input"); LOG.info("Vertices file " + paths.getResult(DataPaths.VERTICALS) + " as output"); JobClient.runJob(job); LOG.info("Finished Running Job: " + jobname); LOG.info("Cleaning temp files..."); paths.cleanTempFiles(paths.getResult(DataPaths.VERTICALS)); }
From source file:hibench.PageRankDataGenerator.java
License:Apache License
/*** * Create pagerank edge table, output link A->B as <A, B> pairs * @throws IOException//from ww w. j a va2s . com */ private void createPageRankLinks() throws IOException { LOG.info("Creating PageRank links", null); JobConf job = new JobConf(WebDataGen.class); String jobname = "Create " + paths.dname + " pagerank links"; job.setJobName(jobname); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); job.setMapOutputKeyClass(Text.class); job.setNumReduceTasks(0); FileInputFormat.setInputPaths(job, paths.getPath(DataPaths.T_LINK_PAGE)); job.setInputFormat(TextInputFormat.class); job.setMapperClass(OutputLinkEdgesMapper.class); if (options.SEQUENCE_OUT) { job.setOutputFormat(SequenceFileOutputFormat.class); } else { job.setOutputFormat(TextOutputFormat.class); } if (null != options.codecClass) { job.set("mapred.output.compression.type", "BLOCK"); FileOutputFormat.setCompressOutput(job, true); FileOutputFormat.setOutputCompressorClass(job, options.codecClass); } FileOutputFormat.setOutputPath(job, paths.getResult(DataPaths.EDGES)); LOG.info("Running Job: " + jobname); LOG.info("Table link-page " + paths.getPath(DataPaths.T_LINK_PAGE) + " as input"); LOG.info("Edges file " + paths.getResult(DataPaths.EDGES) + " as output"); JobClient.runJob(job); LOG.info("Finished Running Job: " + jobname); LOG.info("Cleaning temp files..."); paths.cleanTempFiles(paths.getResult(DataPaths.EDGES)); }
From source file:hitune.analysis.mapreduce.processor.HadoopMetrics.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, HadoopMetrics.class); try {//from w ww. ja v a2 s .c o m conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(HadoopMetrics.MapClass.class); conf.setReducerClass(SystemLog.ReduceClass.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.HistoryLog.java
License:Apache License
public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, HistoryLog.class); try {//from w w w.jav a 2 s. com conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(HistoryLog.MapClass.class); conf.setReducerClass(HistoryLog.ReduceClass.class); conf.setOutputKeyClass(Text.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.InstrumentDataflow.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, InstrumentDataflow.class); try {//w w w.j ava2 s. c o m conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(InstrumentDataflow.MapClass.class); conf.setReducerClass(InstrumentDataflow.ReduceClass.class); conf.setOutputKeyClass(Text.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); //FileInputFormat.setInputPathFilter(conf, evtFileFilter.class); //conf.setNumReduceTasks(1); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.InstrumentSamplingTop.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); try {//from w w w . j a v a 2s . c om JobConf conf = new JobConf(this.conf, InstrumentSamplingTop.class); conf.setJobName(this.getClass().getSimpleName() + "_1_" + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(InstrumentSamplingTop.MapClass.class); conf.setReducerClass(InstrumentSamplingTop.ReduceClass.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputKeyClass(outputKeyClass); conf.setOutputValueClass(outputValueClass); conf.setOutputFormat(SequenceFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(outputPaths + "_1_" + timestamp)); try { //first job JobClient.runJob(conf); JobConf secondconf = new JobConf(this.conf, InstrumentSamplingTop.class); secondconf.setJobName(this.getClass().getSimpleName() + "_2_" + timestamp); secondconf.setInputFormat(SequenceFileInputFormat.class); secondconf.setMapperClass(IdentityMapper.class); secondconf.setReducerClass(InstrumentSamplingTop.TopClass.class); secondconf.setMapOutputKeyClass(outputKeyClass); secondconf.setMapOutputValueClass(outputValueClass); secondconf.setOutputKeyClass(Text.class); secondconf.setOutputValueClass(TextArrayWritable.class); secondconf.setOutputFormat(CSVFileOutputFormat.class); FileInputFormat.setInputPaths(secondconf, outputPaths + "_1_" + timestamp); FileOutputFormat.setOutputPath(secondconf, new Path(temp_outputPaths)); //second job to get ranking list JobClient.runJob(secondconf); moveResults(secondconf, outputPaths, temp_outputPaths); Path temp = new Path(outputPaths + "_1_" + timestamp); temp.getFileSystem(conf).delete(temp); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }
From source file:hitune.analysis.mapreduce.processor.SystemLog.java
License:Apache License
@Override public void run() { // TODO Auto-generated method stub long timestamp = System.currentTimeMillis(); JobConf conf = new JobConf(this.conf, SystemLog.class); try {// w w w.j av a 2 s .co m conf.setJobName(this.getClass().getSimpleName() + timestamp); conf.setInputFormat(MultiSequenceFileInputFormat.class); conf.setMapperClass(SystemLog.MapClass.class); conf.setReducerClass(SystemLog.ReduceClass.class); Class<? extends WritableComparable> outputKeyClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputKeyClass)) .asSubclass(WritableComparable.class); Class<? extends Writable> outputValueClass = Class .forName(conf.get(AnalysisProcessorConfiguration.mapoutputValueClass)) .asSubclass(Writable.class); conf.setMapOutputKeyClass(outputKeyClass); conf.setMapOutputValueClass(outputValueClass); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(TextArrayWritable.class); conf.setOutputFormat(CSVFileOutputFormat.class); String outputPaths = conf.get(AnalysisProcessorConfiguration.reportfolder) + "/" + conf.get(AnalysisProcessorConfiguration.reportfile); String temp_outputPaths = getTempOutputDir(outputPaths); if (this.inputfiles != null) { log.debug("inputPaths:" + inputfiles); FileInputFormat.setInputPaths(conf, inputfiles); FileOutputFormat.setOutputPath(conf, new Path(temp_outputPaths)); try { JobClient.runJob(conf); moveResults(conf, outputPaths, temp_outputPaths); } catch (IOException e) { // TODO Auto-generated catch block log.warn("For " + getOutputFileName() + " :JOB fails!"); log.warn(e); e.printStackTrace(); this.MOVE_DONE = false; } } else { log.warn("For " + getOutputFileName() + " :No input path!"); } } catch (Exception e) { log.warn("Job preparation failure!"); log.warn(e); e.printStackTrace(); } }