Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:edu.umn.cs.spatialHadoop.nasa.HTTPFileSystem.java

License:Open Source License

@Override
public void initialize(URI uri, Configuration conf) throws IOException { // get
    super.initialize(uri, conf);
    // get host information from uri (overrides info in conf)
    String host = uri.getHost();//  w  w w. j  a va2s. c o m
    host = (host == null) ? conf.get("fs.http.host", null) : host;
    if (host == null) {
        throw new IOException("Invalid host specified");
    }
    conf.set("fs.http.host", host);

    // get port information from uri, (overrides info in conf)
    int port = uri.getPort();
    port = (port == -1) ? DEFAULT_PORT : port;
    conf.setInt("fs.http.host.port", port);

    setConf(conf);
    this.uri = uri;
    retries = conf.getInt(HTTP_RETRIES, 3);
}

From source file:edu.umn.cs.spatialHadoop.visualization.CanvasOutputFormat.java

License:Open Source License

protected static void mergeImages(final Configuration conf, final Path outPath)
        throws IOException, InterruptedException {
    final int width = conf.getInt("width", 1000);
    final int height = conf.getInt("height", 1000);
    final Rectangle inputMBR = (Rectangle) OperationsParams.getShape(conf, InputMBR);

    final boolean vflip = conf.getBoolean("vflip", true);

    // List all output files resulting from reducers
    final FileSystem outFs = outPath.getFileSystem(conf);
    final FileStatus[] resultFiles = outFs.listStatus(outPath, new PathFilter() {
        @Override//  w ww  . j  av  a 2  s.com
        public boolean accept(Path path) {
            return path.toUri().getPath().contains("part-");
        }
    });

    if (resultFiles.length == 0) {
        System.err.println("Error! Couldn't find any partial output. Exiting!");
        return;
    }
    System.out.println(System.currentTimeMillis() + ": Merging " + resultFiles.length + " layers into one");
    List<Canvas> intermediateLayers = Parallel.forEach(resultFiles.length,
            new Parallel.RunnableRange<Canvas>() {
                @Override
                public Canvas run(int i1, int i2) {
                    Plotter plotter = Plotter.getPlotter(conf);
                    // The canvas that contains the merge of all assigned layers
                    Canvas finalLayer = null;
                    Canvas tempLayer = plotter.createCanvas(1, 1, new Rectangle());
                    for (int i = i1; i < i2; i++) {
                        FileStatus resultFile = resultFiles[i];
                        try {
                            FSDataInputStream inputStream = outFs.open(resultFile.getPath());
                            while (inputStream.getPos() < resultFile.getLen()) {
                                if (tempLayer == finalLayer) {
                                    // More than one layer. Create a separate final layer to merge
                                    finalLayer = plotter.createCanvas(width, height, inputMBR);
                                    plotter.merge(finalLayer, tempLayer);
                                }
                                tempLayer.readFields(inputStream);

                                if (finalLayer == null) {
                                    // First layer. Treat it as a final layer to avoid merging
                                    // if it is the only layer
                                    finalLayer = tempLayer;
                                } else {
                                    // More than only layer. Merge into the final layer
                                    plotter.merge(finalLayer, tempLayer);
                                }
                            }
                            inputStream.close();
                        } catch (IOException e) {
                            System.err.println("Error reading " + resultFile);
                            e.printStackTrace();
                        }
                    }
                    return finalLayer;
                }
            }, conf.getInt("parallel", Runtime.getRuntime().availableProcessors()));

    // Merge all intermediate layers into one final layer
    Plotter plotter = Plotter.getPlotter(conf);
    Canvas finalLayer;
    if (intermediateLayers.size() == 1) {
        finalLayer = intermediateLayers.get(0);
    } else {
        finalLayer = plotter.createCanvas(width, height, inputMBR);
        for (Canvas intermediateLayer : intermediateLayers) {
            plotter.merge(finalLayer, intermediateLayer);
        }
    }

    // Finally, write the resulting image to the given output path
    System.out.println(System.currentTimeMillis() + ": Writing final image");
    outFs.delete(outPath, true); // Delete old (non-combined) images
    FSDataOutputStream outputFile = outFs.create(outPath);
    plotter.writeImage(finalLayer, outputFile, vflip);
    outputFile.close();
}

From source file:edu.umn.cs.spatialHadoop.visualization.Plotter.java

License:Open Source License

/**
 * Configures this plotter according to the MapReduce program.
 * @param conf//  w  w  w  .  j ava  2  s.c  o m
 */
public void configure(Configuration conf) {
    this.inputMBR = (Rectangle) OperationsParams.getShape(conf, "mbr");
    this.imageWidth = conf.getInt("width", 1000);
    this.imageHeight = conf.getInt("height", 1000);
}

From source file:edu.umn.cs.spatialHadoop.visualization.RasterOutputFormat.java

License:Open Source License

protected static void mergeImages(final Configuration conf, final Path outPath)
        throws IOException, InterruptedException {
    final int width = conf.getInt("width", 1000);
    final int height = conf.getInt("height", 1000);
    final Rectangle inputMBR = (Rectangle) OperationsParams.getShape(conf, InputMBR);

    final boolean vflip = conf.getBoolean("vflip", true);

    // List all output files resulting from reducers
    final FileSystem outFs = outPath.getFileSystem(conf);
    final FileStatus[] resultFiles = outFs.listStatus(outPath, new PathFilter() {
        @Override/*from  ww  w  .jav a  2 s .  co m*/
        public boolean accept(Path path) {
            return path.toUri().getPath().contains("part-");
        }
    });

    if (resultFiles.length == 0) {
        System.err.println("Error! Couldn't find any partial output. Exiting!");
        return;
    }
    System.out.println(System.currentTimeMillis() + ": Merging " + resultFiles.length + " layers into one");
    Vector<RasterLayer> intermediateLayers = Parallel.forEach(resultFiles.length,
            new Parallel.RunnableRange<RasterLayer>() {
                @Override
                public RasterLayer run(int i1, int i2) {
                    Rasterizer rasterizer = Rasterizer.getRasterizer(conf);
                    // The raster layer that contains the merge of all assigned layers
                    RasterLayer finalLayer = null;
                    RasterLayer tempLayer = rasterizer.createRaster(1, 1, new Rectangle());
                    for (int i = i1; i < i2; i++) {
                        FileStatus resultFile = resultFiles[i];
                        try {
                            FSDataInputStream inputStream = outFs.open(resultFile.getPath());
                            while (inputStream.getPos() < resultFile.getLen()) {
                                if (tempLayer == finalLayer) {
                                    // More than one layer. Create a separate final layer to merge
                                    finalLayer = rasterizer.createRaster(width, height, inputMBR);
                                    rasterizer.merge(finalLayer, tempLayer);
                                }
                                tempLayer.readFields(inputStream);

                                if (finalLayer == null) {
                                    // First layer. Treat it as a final layer to avoid merging
                                    // if it is the only layer
                                    finalLayer = tempLayer;
                                } else {
                                    // More than only layer. Merge into the final layer
                                    rasterizer.merge(finalLayer, tempLayer);
                                }
                            }
                            inputStream.close();
                        } catch (IOException e) {
                            System.err.println("Error reading " + resultFile);
                            e.printStackTrace();
                        }
                    }
                    return finalLayer;
                }
            });

    // Merge all intermediate layers into one final layer
    Rasterizer rasterizer = Rasterizer.getRasterizer(conf);
    RasterLayer finalLayer;
    if (intermediateLayers.size() == 1) {
        finalLayer = intermediateLayers.elementAt(0);
    } else {
        finalLayer = rasterizer.createRaster(width, height, inputMBR);
        for (RasterLayer intermediateLayer : intermediateLayers) {
            rasterizer.merge(finalLayer, intermediateLayer);
        }
    }

    // Finally, write the resulting image to the given output path
    System.out.println(System.currentTimeMillis() + ": Writing final image");
    outFs.delete(outPath, true); // Delete old (non-combined) images
    FSDataOutputStream outputFile = outFs.create(outPath);
    rasterizer.writeImage(finalLayer, outputFile, vflip);
    outputFile.close();
}

From source file:edu.umn.cs.spatialHadoop.visualization.SingleLevelPlot.java

License:Open Source License

/**
 * Generates a single level using a MapReduce job and returns the created job.
 * @param inFiles//  w  w  w.  j av a 2 s .  c  om
 * @param outFile
 * @param plotterClass
 * @param params
 * @return
 * @throws IOException
 * @throws InterruptedException 
 * @throws ClassNotFoundException 
 */
public static Job plotMapReduce(Path[] inFiles, Path outFile, Class<? extends Plotter> plotterClass,
        OperationsParams params) throws IOException, InterruptedException, ClassNotFoundException {
    Plotter plotter;
    try {
        plotter = plotterClass.newInstance();
    } catch (InstantiationException e) {
        throw new RuntimeException("Error creating rastierizer", e);
    } catch (IllegalAccessException e) {
        throw new RuntimeException("Error creating rastierizer", e);
    }

    Job job = new Job(params, "SingleLevelPlot");
    job.setJarByClass(SingleLevelPlot.class);
    job.setJobName("SingleLevelPlot");
    // Set plotter
    Configuration conf = job.getConfiguration();
    Plotter.setPlotter(conf, plotterClass);
    // Set input file MBR
    Rectangle inputMBR = (Rectangle) params.getShape("mbr");
    Rectangle drawRect = (Rectangle) params.getShape("rect");
    if (inputMBR == null)
        inputMBR = drawRect != null ? drawRect : FileMBR.fileMBR(inFiles, params);
    OperationsParams.setShape(conf, InputMBR, inputMBR);
    if (drawRect != null)
        OperationsParams.setShape(conf, SpatialInputFormat3.InputQueryRange, drawRect);

    // Adjust width and height if aspect ratio is to be kept
    int imageWidth = conf.getInt("width", 1000);
    int imageHeight = conf.getInt("height", 1000);
    if (params.getBoolean("keepratio", true)) {
        // Adjust width and height to maintain aspect ratio
        if (inputMBR.getWidth() / inputMBR.getHeight() > (double) imageWidth / imageHeight) {
            // Fix width and change height
            imageHeight = (int) (inputMBR.getHeight() * imageWidth / inputMBR.getWidth());
            // Make divisible by two for compatibility with ffmpeg
            if (imageHeight % 2 == 1)
                imageHeight--;
            conf.setInt("height", imageHeight);
        } else {
            imageWidth = (int) (inputMBR.getWidth() * imageHeight / inputMBR.getHeight());
            conf.setInt("width", imageWidth);
        }
    }

    boolean merge = conf.getBoolean("merge", true);
    // Set input and output
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inFiles);
    if (conf.getBoolean("output", true)) {
        if (merge) {
            job.setOutputFormatClass(CanvasOutputFormat.class);
            conf.setClass("mapred.output.committer.class", CanvasOutputFormat.ImageWriterOld.class,
                    org.apache.hadoop.mapred.OutputCommitter.class);
        } else {
            job.setOutputFormatClass(ImageOutputFormat.class);
        }
        CanvasOutputFormat.setOutputPath(job, outFile);
    } else {
        job.setOutputFormatClass(NullOutputFormat.class);
    }

    // Set mapper and reducer based on the partitioning scheme
    String partition = conf.get("partition", "none");
    ClusterStatus clusterStatus = new JobClient(new JobConf()).getClusterStatus();
    if (partition.equalsIgnoreCase("none")) {
        LOG.info("Using no-partition plot");
        job.setMapperClass(NoPartitionPlotMap.class);
        job.setCombinerClass(NoPartitionPlotCombine.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(plotter.getCanvasClass());
        if (merge) {
            int numSplits = new SpatialInputFormat3().getSplits(job).size();
            job.setReducerClass(NoPartitionPlotReduce.class);
            // Set number of reduce tasks according to cluster status
            int maxReduce = Math.max(1, clusterStatus.getMaxReduceTasks() * 7 / 8);
            job.setNumReduceTasks(Math.max(1, Math.min(maxReduce, numSplits / maxReduce)));
        } else {
            job.setNumReduceTasks(0);
        }
    } else {
        LOG.info("Using repartition plot");
        Partitioner partitioner;
        if (partition.equals("pixel")) {
            // Special case for pixel level partitioning as it depends on the
            // visualization parameters
            partitioner = new GridPartitioner(inputMBR, imageWidth, imageHeight);
        } else if (partition.equals("grid")) {
            int numBlocks = 0;
            for (Path in : inFiles) {
                FileSystem fs = in.getFileSystem(params);
                long size = FileUtil.getPathSize(fs, in);
                long blockSize = fs.getDefaultBlockSize(in);
                numBlocks += Math.ceil(size / (double) blockSize);
            }
            int numPartitions = numBlocks * 1000;
            int gridSize = (int) Math.ceil(Math.sqrt(numPartitions));
            partitioner = new GridPartitioner(inputMBR, gridSize, gridSize);
        } else {
            // Use a standard partitioner as created by the indexer
            partitioner = Indexer.createPartitioner(inFiles, outFile, conf, partition);
        }
        Shape shape = params.getShape("shape");
        job.setMapperClass(RepartitionPlotMap.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(shape.getClass());
        job.setReducerClass(RepartitionPlotReduce.class);
        // Set number of reducers according to cluster size
        job.setNumReduceTasks(Math.max(1, clusterStatus.getMaxReduceTasks() * 9 / 10));
        Partitioner.setPartitioner(conf, partitioner);
    }

    // Use multithreading in case the job is running locally
    conf.setInt(LocalJobRunner.LOCAL_MAX_MAPS, Runtime.getRuntime().availableProcessors());

    // Start the job
    if (params.getBoolean("background", false)) {
        // Run in background
        job.submit();
    } else {
        job.waitForCompletion(params.getBoolean("verbose", false));
    }
    return job;
}

From source file:edu.usc.pgroup.louvain.hadoop.MapCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();

    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    nb_pass = configuration.getInt(LouvainMR.NB_PASS, 0);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    outpath = configuration.get(LouvainMR.OUT_PATH);

    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);

}

From source file:edu.usc.pgroup.louvain.hadoop.ReduceCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();
    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    this.outpath = configuration.get(LouvainMR.OUT_PATH);
    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);
}

From source file:eu.scape_project.spacip.Spacip.java

License:Apache License

/**
 * Start Hadoop job//from  ww  w. ja  v a 2  s .c  o  m
 *
 * @param conf Hadoop job configuration
 */
public static void startHadoopJob(Configuration conf) {
    try {
        Job job = new Job(conf, "spacip_" + conf.getInt("num_items_per_task", 0));

        // local debugging (pseudo-distributed)
        //             job.getConfiguration().set("mapred.job.tracker", "local");
        //             job.getConfiguration().set("fs.default.name", "file:///");

        job.setJarByClass(Spacip.class);

        job.setMapperClass(Spacip.ContainerProcessingMapper.class);
        // No reducer needed
        job.setNumReduceTasks(0);

        job.setInputFormatClass(TextInputFormat.class);

        MultipleOutputs.addNamedOutput(job, "keyfilmapping", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "tomarinput", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "error", TextOutputFormat.class, Text.class, Text.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(ObjectWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(ObjectWritable.class);

        TextInputFormat.addInputPath(job, new Path(config.getDirStr()));
        String outpath = StringUtils.normdir(conf.get("joboutput_hdfs_path", "spacip_joboutput"))
                + System.currentTimeMillis();
        FileOutputFormat.setOutputPath(job, new Path(outpath));
        LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
        job.waitForCompletion(true);
        // print output path (taverna integration)
        System.out.print(outpath);
        System.exit(0);
    } catch (Exception e) {
        logger.error("I/O error", e);
    }
}

From source file:eu.scape_project.tpid.TomarPrepareInputdata.java

License:Apache License

/**
 * Start Hadoop job//  w  w  w  . j ava  2  s.co  m
 *
 * @param conf Hadoop job configuration
 */
public static void startHadoopJob(Configuration conf) {
    try {
        Job job = new Job(conf, "tpid_" + conf.getInt("num_items_per_task", 0));
        if (conf.getBoolean("pseudo_distributed", false)) {
            job.getConfiguration().set("mapred.job.tracker", "local");
            job.getConfiguration().set("fs.default.name", "file:///");
        }
        job.setJarByClass(TomarPrepareInputdata.class);

        job.setMapperClass(TomarPrepareInputdata.ContainerProcessingMapper.class);
        // No reducer needed
        job.setNumReduceTasks(0);

        job.setInputFormatClass(TextInputFormat.class);

        MultipleOutputs.addNamedOutput(job, "keyfilmapping", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "tomarinput", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "error", TextOutputFormat.class, Text.class, Text.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(ObjectWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(ObjectWritable.class);

        TextInputFormat.addInputPath(job, new Path(config.getInputStr()));
        String outpath = StringUtils.normdir(conf.get("joboutput_hdfs_path", "tpid_joboutput"))
                + System.currentTimeMillis();
        FileOutputFormat.setOutputPath(job, new Path(outpath));
        LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
        job.waitForCompletion(true);
        // print output path (taverna integration)
        System.out.print(outpath);
        System.exit(0);
    } catch (Exception e) {
        LOG.error("I/O error", e);
    }
}

From source file:ezbake.amino.impl.dataloader.WarehausNumberLoader.java

License:Apache License

@Override
public void setConfig(Configuration config) {
    super.setConfig(config);
    number2Max = config.getInt("number2-max", 500);
    logger.info("Number2-max set to " + number2Max);
}