Example usage for org.apache.hadoop.mapred FileInputFormat setInputPaths

List of usage examples for org.apache.hadoop.mapred FileInputFormat setInputPaths

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred FileInputFormat setInputPaths.

Prototype

public static void setInputPaths(JobConf conf, Path... inputPaths) 

Source Link

Document

Set the array of Path s as the list of inputs for the map-reduce job.

Usage

From source file:org.sf.xrime.algorithms.HITS.DeliveryStep.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    try {/*  ww  w.  j a  v a2 s .  c o  m*/
        context.setParameter(continueFileKey, continueFlagFile());

        jobConf = new JobConf(context, DeliveryStep.class);
        jobConf.setJobName("Delivery");

        FileInputFormat.setInputPaths(jobConf, context.getSource().getPath());

        jobConf.setInputFormat(SequenceFileInputFormat.class);
        jobConf.setMapperClass(DeliveryMapper.class);
        jobConf.setNumMapTasks(getMapperNum());
        jobConf.setMapOutputValueClass(ObjectWritable.class);

        jobConf.setReducerClass(DeliveryReducer.class);
        jobConf.setNumReduceTasks(getReducerNum());

        jobConf.setOutputKeyClass(Text.class);
        jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class);

        FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath());
        jobConf.setOutputFormat(SequenceFileOutputFormat.class);

        this.runningJob = JobClient.runJob(jobConf);

        if (client == null) {
            client = FileSystem.get(jobConf);
        }

        if (client.exists(new Path(continueFlagFile()))) {
            end = false;
            client.delete(new Path(continueFlagFile()), true);
        } else {
            end = true;
        }
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    }
}

From source file:org.sf.xrime.algorithms.HITS.NormalizeStep.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    try {//  www  .  j  av a  2 s.  c o m
        context.setParameter(continueFileKey, continueFlagFile());

        // HITSSummer summer
        // summer.getHubSum();
        context.setParameter(HITSSummer.hubCounterKey, Double.toString(summer.getHubSum()));
        context.setParameter(HITSSummer.authorityCounterKey, Double.toString(summer.getAuthoritySum()));
        // prop.setProperty(HITSSummer.hubCounterKey,
        // Double.toString(VertexEdgeDoubleCounter.hubsummer));
        // prop.setProperty(HITSSummer.authorityCounterKey,
        // Double.toString(VertexEdgeDoubleCounter.authoritysummer));

        jobConf = new JobConf(conf, NormalizeStep.class);
        jobConf.setJobName("Normalize");

        FileInputFormat.setInputPaths(jobConf, context.getSource().getPath());

        jobConf.setInputFormat(SequenceFileInputFormat.class);
        jobConf.setMapperClass(NormalizeMapper.class);
        jobConf.setNumMapTasks(getMapperNum());
        jobConf.setMapOutputValueClass(LabeledAdjBiSetVertex.class);

        // jobConf.setReducerClass(NormalizeReducer.class);
        // jobConf.setNumReduceTasks(getReducerNum());

        jobConf.setOutputKeyClass(Text.class);
        jobConf.setOutputValueClass(LabeledAdjBiSetVertex.class);

        FileOutputFormat.setOutputPath(jobConf, context.getDestination().getPath());
        jobConf.setOutputFormat(SequenceFileOutputFormat.class);

        this.runningJob = JobClient.runJob(jobConf);

        if (client == null) {
            client = FileSystem.get(jobConf);
        }

        if (client.exists(new Path(continueFlagFile()))) {
            end = false;
            client.delete(new Path(continueFlagFile()), true);
        } else {
            end = true;
        }
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    }
}

From source file:org.sf.xrime.algorithms.kcore.undirected.ElementRemoval.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    // Use the graph algorithm context to create a job configuration object.
    JobConf conf = new JobConf(context, ElementRemoval.class);
    conf.setJobName("ElementRemoval");

    // the keys are vertex identifiers (strings)
    conf.setOutputKeyClass(Text.class);
    // the values are vertexes (Writable)
    conf.setOutputValueClass(AdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // Combiner is permitted! But we don't use for now.
    conf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {//from  www .  j  a  v a 2  s.  c o m
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(getReducerNum());
    conf.setCompressMapOutput(true);
    conf.setMapOutputCompressorClass(GzipCodec.class);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.circular.CoordinatesCalculation.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, CoordinatesCalculation.class);
    conf.setJobName("CoordinatesCalculation");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner or reducer is needed.
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {/*from  www.j  a v  a 2s . c om*/
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    // Only one mapper is permitted.
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.circular.SequentialNumAssign.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, SequentialNumAssign.class);
    conf.setJobName("SequentialNumAssign");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner or reducer is needed.
    conf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {/* w ww .j  a va  2  s  .co m*/
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    // The numbers here do not matter.
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(getReducerNum());

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.ellipse.CoordinatesCalculation.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, CoordinatesCalculation.class);
    conf.setJobName("CoordinatesCalculation");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner or reducer is needed.
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {/*from  www . ja v  a  2  s  .c  o  m*/
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }

    // Only one mapper is permitted.
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.gfr.AttractiveForceDisp.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, AttractiveForceDisp.class);
    conf.setJobName("AttractiveForceDisp");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner is permitted, since the logic of reducer depends on the completeness
    // of information.
    conf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    conf.setCompressMapOutput(true);/*  ww  w . j  av  a2  s.  c  o m*/
    conf.setMapOutputCompressorClass(GzipCodec.class);
    try {
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(getReducerNum());

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.gfr.CoordinatesAdjust.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, CoordinatesAdjust.class);
    conf.setJobName("CoordinatesAdjust");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner or reducer is needed..
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {/*  w  w w  .jav a  2  s . co m*/
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.gfr.DisplacementSummarize.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, DisplacementSummarize.class);
    conf.setJobName("DisplacementSummarize");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner is permitted, since the logic of reducer depends on the completeness
    // of information.
    conf.setReducerClass(ReduceClass.class);
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    // Enable compression.
    conf.setCompressMapOutput(true);/*  w  w w.j av a 2 s  .co m*/
    conf.setMapOutputCompressorClass(GzipCodec.class);
    try {
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    conf.setNumMapTasks(getMapperNum());
    conf.setNumReduceTasks(getReducerNum());

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}

From source file:org.sf.xrime.algorithms.layout.gfr.RandomInitialLayoutGenerate.java

License:Apache License

@Override
public void execute() throws ProcessorExecutionException {
    JobConf conf = new JobConf(context, RandomInitialLayoutGenerate.class);
    conf.setJobName("RandomInitialLayoutGenerate");

    // the keys are vertex identifiers (strings)
    conf.setOutputKeyClass(Text.class);
    // the values are vertexes (Writable)
    conf.setOutputValueClass(LabeledAdjSetVertex.class);
    conf.setMapperClass(MapClass.class);
    // No combiner or reducer is needed.
    // makes the file format suitable for machine processing.
    conf.setInputFormat(SequenceFileInputFormat.class);
    conf.setOutputFormat(SequenceFileOutputFormat.class);
    try {//from  w ww  .j  av a2s .  co  m
        FileInputFormat.setInputPaths(conf, getSource().getPath());
        FileOutputFormat.setOutputPath(conf, getDestination().getPath());
    } catch (IllegalAccessException e1) {
        throw new ProcessorExecutionException(e1);
    }
    // Make sure the random numbers are good.
    conf.setNumMapTasks(1);
    conf.setNumReduceTasks(0);

    try {
        this.runningJob = JobClient.runJob(conf);
    } catch (IOException e) {
        throw new ProcessorExecutionException(e);
    }
}