Example usage for org.apache.hadoop.conf Configuration setLong

List of usage examples for org.apache.hadoop.conf Configuration setLong

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setLong.

Prototype

public void setLong(String name, long value) 

Source Link

Document

Set the value of the name property to a long.

Usage

From source file:tv.icntv.grade.film.grade.time.TimeReducer.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();
    //        System.out.println(configuration.get("mapred.task.timeout"));
    configuration.setLong("mapred.task.timeout", 6000000L);
    //        System.out.println("setup configuration"+configuration.get("mapred.task.timeout"));
    scoreValue = Double.parseDouble(configuration.get(scoreKey, "10.0"));
}

From source file:tv.icntv.grade.film.recommend.CFRecommendJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = getConf();
    configuration.setLong("mapred.min.split.size", 512 * 1024 * 1024L);
    HadoopUtils.deleteIfExist(strings[1]);
    Job timeJob = new Job(configuration, "calculate film time middle job");
    MapReduceUtils.initMapperJob(TimeMaper.class, Text.class, Text.class, this.getClass(), timeJob,
            getPaths(strings[0].split(",")));
    timeJob.setCombinerClass(TimeCombiner.class);
    MapReduceUtils.initReducerJob(new Path(strings[1]), TimeReducer.class, timeJob);
    timeJob.waitForCompletion(true);/*from   w  w w . j  a v a  2s.co m*/

    HadoopUtils.deleteIfExist(strings[3]);
    HadoopUtils.deleteIfExist(strings[4]);
    return ToolRunner.run(configuration, new RecommenderJob(), strings[2].split(" "));
}

From source file:tv.icntv.grade.film.recommend.CorrelateJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = getConf();
    HadoopUtils.deleteIfExist(strings[1]);
    Job correlate = new Job(configuration, "icntv correlate job");
    MapReduceUtils.initMapperJob(UserHistoryMapper.class, Text.class, Text.class, this.getClass(), correlate,
            getPaths(strings[0].split(",")));
    MapReduceUtils.initReducerJob(new Path(strings[1]), UserHistoryReducer.class, correlate);
    if (!correlate.waitForCompletion(true)) {
        return 1;
    }//from   w  ww. j  a v a2  s .  co m
    ;
    Parameters parameter = getParameter(strings[2]);
    HadoopUtils.deleteIfExist(parameter.get("output"));
    PFPGrowth.runPFPGrowth(parameter, configuration);
    String output = parameter.get("output") + "/frequentpatterns";
    long count = HadoopUtils.count(new Path(output), new PathFilter() {
        @Override
        public boolean accept(Path path) {
            return path.getName().matches("part-r-\\d*"); //To change body of implemented methods use File | Settings | File Templates.
        }
    });
    if (count == 0) {
        return 1;
    }
    configuration.setLong("icntv.correlate.total.size", count);
    HadoopUtils.deleteIfExist(strings[3]);
    Job result = new Job(configuration, "correlate result calculate");
    MapReduceUtils.initMapperJob(CorrelateInputMapper.class, Text.class, Text.class, this.getClass(), result,
            new Path(output));
    result.setInputFormatClass(SequenceFileInputFormat.class);
    //        TableMapReduceUtil.initTableReducerJob("");
    MapReduceUtils.initReducerJob(new Path(strings[3]), CorrelateOutPutReducer.class, result);
    result.waitForCompletion(true);
    return 0; //To change body of implemented methods use File | Settings | File Templates.
}

From source file:tv.icntv.grade.film.recommend.CorrelateResultJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = super.getConf();
    String output = strings[0] + "/frequentpatterns";
    long count = HadoopUtils.count(new Path(output), new PathFilter() {
        @Override//from   w w  w .j  a v  a 2s  .  c  o  m
        public boolean accept(Path path) {
            return path.getName().matches("part-r-\\d*"); //To change body of implemented methods use File | Settings | File Templates.
        }
    });
    System.out.println("count =" + count);
    if (count == 0) {
        return 1;
    }

    configuration.setLong("icntv.correlate.total.size", count);
    Job result = new Job(configuration, "correlate result calculate");
    MapReduceUtils.initMapperJob(CorrelateInputMapper.class, Text.class, Text.class, this.getClass(), result,
            new Path(output));
    result.setInputFormatClass(SequenceFileInputFormat.class);
    //        TableMapReduceUtil.initTableReducerJob("");
    MapReduceUtils.initReducerJob(new Path(strings[1]), CorrelateOutPutReducer.class, result);
    result.waitForCompletion(true);
    return 0; //To change body of implemented methods use File | Settings | File Templates.
}

From source file:tv.icntv.grade.film.recommend.TopNJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = getConf();
    configuration.setLong("mapred.min.split.size", 512 * 1024 * 1024L);
    Job numJob = new Job(configuration, "calculate film program seed num job ");
    Path[] paths = getPaths(strings[0].split(","));
    HadoopUtils.deleteIfExist(strings[1]);
    MapReduceUtils.initMapperJob(NumCountMapper.class, Text.class, Text.class, this.getClass(), numJob, paths);
    //TableMapReduceUtil.initTableReducerJob(strings[1], NumCountReducer.class, numJob);
    MapReduceUtils.initReducerJob(new Path(strings[1]), NumCountReducer.class, numJob);
    numJob.waitForCompletion(true);//from  w  w w .  j av a  2 s  .c  o  m
    Job programeSets = new Job(configuration, "calculate program set num job");
    HadoopUtils.deleteIfExist(strings[2]);
    MapReduceUtils.initMapperJob(NumProgramSetsMapper.class, Text.class, Text.class, this.getClass(),
            programeSets, new Path(strings[1]));
    programeSets.setCombinerClass(NumProgramSetCombiner.class);
    MapReduceUtils.initReducerJob(new Path(strings[2]), NumProgramSetsReducer.class, programeSets);
    return programeSets.waitForCompletion(true) ? 0 : 1;
    //        return 0;
}

From source file:tv.icntv.recommend.algorithm.CorrelateJob.java

License:Apache License

/**
 * new String[]{// www .ja  va  2  s . c  o  m
 String.format(configuration.get(sourceProperty),date),
 middleDirectory,
 sb.toString(),
 String.format(configuration.get(targetResultProperty),date)
 }
 * @param strings
 * @return
 * @throws Exception
 */
@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = getConf();
    Date date = getDateAdd(-1);

    String middleDirectory = String.format(configuration.get(correlateInputProperty), date);
    StringBuilder sb = new StringBuilder();
    sb.append("minSupport=").append(configuration.get(minSupportProperty, "3")).append(split)
            .append("maxHeapSize=1024").append(split).append("splitterPattern='[\t ]'").append(split)
            .append("input=").append(middleDirectory).append(split).append("output=")
            .append(String.format(configuration.get(fpGrowthProperty), date));

    HadoopUtils.deleteIfExist(middleDirectory);
    Job correlate = new Job(configuration, "???fp-growth");
    MapReduceUtils.initMapperJob(UserHistoryMapper.class, Text.class, Text.class, this.getClass(), correlate,
            getInput(configuration, -1));//new Path(String.format(configuration.get(sourceProperty),date))
    //        MapReduceUtils.initReducerJob(new Path(middleDirectory), UserHistoryReducer.class, correlate);
    correlate.setReducerClass(UserHistoryReducer.class);
    correlate.setOutputKeyClass(NullWritable.class);
    correlate.setOutputValueClass(Text.class);
    //        correlate.setCombinerClass(UserHistoryCombiner.class);
    FileOutputFormat.setOutputPath(correlate, new Path(middleDirectory));
    if (!correlate.waitForCompletion(true)) {
        return 1;
    }
    ;
    Parameters parameter = getParameter(sb.toString());
    HadoopUtils.deleteIfExist(parameter.get("output"));
    PFPGrowth.runPFPGrowth(parameter, configuration);
    String output = parameter.get("output") + "/frequentpatterns";
    long count = HadoopUtils.count(new Path(output), new PathFilter() {
        @Override
        public boolean accept(Path path) {
            return path.getName().matches("part-r-\\d*");
        }
    });
    if (count == 0) {
        return 1;
    }
    String resultPath = String.format(configuration.get(targetResultProperty), date);
    configuration.setLong("icntv.correlate.total.size", count);
    HadoopUtils.deleteIfExist(resultPath);
    Job result = new Job(configuration, "?");
    MapReduceUtils.initMapperJob(CorrelateInputMapper.class, Text.class, Text.class, this.getClass(), result,
            new Path(output));
    result.setInputFormatClass(SequenceFileInputFormat.class);

    MapReduceUtils.initReducerJob(new Path(resultPath), CorrelateOutPutReducer.class, result);
    result.waitForCompletion(true);
    return 0;
}