Example usage for org.apache.hadoop.conf Configuration getFloat

List of usage examples for org.apache.hadoop.conf Configuration getFloat

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getFloat.

Prototype

public float getFloat(String name, float defaultValue) 

Source Link

Document

Get the value of the name property as a float.

Usage

From source file:org.apache.tez.runtime.library.conf.TestUnorderedPartitionedKVEdgeConfigurer.java

License:Apache License

@Test
public void tetCommonConf() {

    Configuration fromConf = new Configuration(false);
    fromConf.set("test.conf.key.1", "confkey1");
    fromConf.setBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, false);
    fromConf.setFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT, 0.11f);
    fromConf.setInt(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB, 123);
    fromConf.set("io.shouldExist", "io");
    Map<String, String> additionalConfs = new HashMap<String, String>();
    additionalConfs.put("test.key.2", "key2");
    additionalConfs.put(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, "1111");
    additionalConfs.put(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, "0.22f");
    additionalConfs.put(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_MAX_PER_BUFFER_SIZE_BYTES, "2222");
    additionalConfs.put("file.shouldExist", "file");

    UnorderedPartitionedKVEdgeConfigurer.Builder builder = UnorderedPartitionedKVEdgeConfigurer
            .newBuilder("KEY", "VALUE", "PARTITIONER", null).setAdditionalConfiguration("fs.shouldExist", "fs")
            .setAdditionalConfiguration("test.key.1", "key1")
            .setAdditionalConfiguration(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, "3333")
            .setAdditionalConfiguration(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, "0.33f")
            .setAdditionalConfiguration(additionalConfs).setFromConfiguration(fromConf);

    UnorderedPartitionedKVEdgeConfigurer configuration = builder.build();

    byte[] outputBytes = configuration.getOutputPayload();
    byte[] inputBytes = configuration.getInputPayload();

    OnFileUnorderedPartitionedKVOutputConfiguration rebuiltOutput = new OnFileUnorderedPartitionedKVOutputConfiguration();
    rebuiltOutput.fromByteArray(outputBytes);
    ShuffledUnorderedKVInputConfiguration rebuiltInput = new ShuffledUnorderedKVInputConfiguration();
    rebuiltInput.fromByteArray(inputBytes);

    Configuration outputConf = rebuiltOutput.conf;
    Configuration inputConf = rebuiltInput.conf;

    assertEquals(false, outputConf.getBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, true));
    assertEquals(1111, outputConf.getInt(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 0));
    assertEquals(3333, outputConf.getInt(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, 0));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT));
    assertEquals(123, outputConf.getInt(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB, 0));
    assertEquals(2222,//from  www  . ja  v a  2s  .  c  o m
            outputConf.getInt(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_MAX_PER_BUFFER_SIZE_BYTES, 0));
    assertEquals("io", outputConf.get("io.shouldExist"));
    assertEquals("file", outputConf.get("file.shouldExist"));
    assertEquals("fs", outputConf.get("fs.shouldExist"));

    assertEquals(false, inputConf.getBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, true));
    assertEquals(1111, inputConf.getInt(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 0));
    assertEquals(3333, inputConf.getInt(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, 0));
    assertEquals(0.11f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT, 0.0f),
            0.001f);
    assertEquals(0.22f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, 0.0f),
            0.001f);
    assertEquals(0.33f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, 0.0f), 0.001f);
    assertNull(inputConf.get(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB));
    assertNull(inputConf.get(TezJobConfig.TEZ_RUNTIME_UNORDERED_OUTPUT_MAX_PER_BUFFER_SIZE_BYTES));
    assertEquals("io", inputConf.get("io.shouldExist"));
    assertEquals("file", inputConf.get("file.shouldExist"));
    assertEquals("fs", inputConf.get("fs.shouldExist"));

}

From source file:org.apache.tez.runtime.library.conf.TestUnorderedUnpartitionedKVEdgeConfigurer.java

License:Apache License

@Test
public void tetCommonConf() {

    Configuration fromConf = new Configuration(false);
    fromConf.set("test.conf.key.1", "confkey1");
    fromConf.setBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, false);
    fromConf.setFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT, 0.11f);
    fromConf.set("io.shouldExist", "io");
    Map<String, String> additionalConfs = new HashMap<String, String>();
    additionalConfs.put("test.key.2", "key2");
    additionalConfs.put(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, "1111");
    additionalConfs.put(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, "0.22f");
    additionalConfs.put("file.shouldExist", "file");

    UnorderedUnpartitionedKVEdgeConfigurer.Builder builder = UnorderedUnpartitionedKVEdgeConfigurer
            .newBuilder("KEY", "VALUE").setAdditionalConfiguration("fs.shouldExist", "fs")
            .setAdditionalConfiguration("test.key.1", "key1")
            .setAdditionalConfiguration(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, "3333")
            .setAdditionalConfiguration(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, "0.33f")
            .setAdditionalConfiguration(additionalConfs).setFromConfiguration(fromConf);

    UnorderedUnpartitionedKVEdgeConfigurer configuration = builder.build();

    byte[] outputBytes = configuration.getOutputPayload();
    byte[] inputBytes = configuration.getInputPayload();

    OnFileUnorderedKVOutputConfiguration rebuiltOutput = new OnFileUnorderedKVOutputConfiguration();
    rebuiltOutput.fromByteArray(outputBytes);
    ShuffledUnorderedKVInputConfiguration rebuiltInput = new ShuffledUnorderedKVInputConfiguration();
    rebuiltInput.fromByteArray(inputBytes);

    Configuration outputConf = rebuiltOutput.conf;
    Configuration inputConf = rebuiltInput.conf;

    assertEquals(false, outputConf.getBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, true));
    assertEquals(1111, outputConf.getInt(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 0));
    assertEquals(3333, outputConf.getInt(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, 0));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT));
    assertNull(outputConf.get(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT));
    assertEquals("io", outputConf.get("io.shouldExist"));
    assertEquals("file", outputConf.get("file.shouldExist"));
    assertEquals("fs", outputConf.get("fs.shouldExist"));

    assertEquals(false, inputConf.getBoolean(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD, true));
    assertEquals(1111, inputConf.getInt(TezJobConfig.TEZ_RUNTIME_IFILE_READAHEAD_BYTES, 0));
    assertEquals(3333, inputConf.getInt(TezJobConfig.TEZ_RUNTIME_IO_FILE_BUFFER_SIZE, 0));
    assertEquals(0.11f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_INPUT_BUFFER_PERCENT, 0.0f),
            0.001f);/*from   www.j  a va  2  s.  co  m*/
    assertEquals(0.22f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT, 0.0f),
            0.001f);
    assertEquals(0.33f, inputConf.getFloat(TezJobConfig.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, 0.0f), 0.001f);
    assertEquals("io", inputConf.get("io.shouldExist"));
    assertEquals("file", inputConf.get("file.shouldExist"));
    assertEquals("fs", inputConf.get("fs.shouldExist"));

}

From source file:org.apache.tez.runtime.library.shuffle.common.impl.SimpleFetchedInputAllocator.java

License:Apache License

public SimpleFetchedInputAllocator(String uniqueIdentifier, Configuration conf, long maxTaskAvailableMemory,
        long memoryAvailable) {
    this.conf = conf;
    this.maxAvailableTaskMemory = maxTaskAvailableMemory;
    this.initialMemoryAvailable = memoryAvailable;

    this.fileNameAllocator = new TezTaskOutputFiles(conf, uniqueIdentifier);
    this.localDirAllocator = new LocalDirAllocator(TezRuntimeFrameworkConfigs.LOCAL_DIRS);

    // Setup configuration
    final float maxInMemCopyUse = conf.getFloat(
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT,
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT_DEFAULT);
    if (maxInMemCopyUse > 1.0 || maxInMemCopyUse < 0.0) {
        throw new IllegalArgumentException("Invalid value for "
                + TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT + ": " + maxInMemCopyUse);
    }/*from w w  w  .  j  a  v a 2  s  .co m*/

    long memReq = (long) (conf.getLong(Constants.TEZ_RUNTIME_TASK_MEMORY,
            Math.min(maxAvailableTaskMemory, Integer.MAX_VALUE)) * maxInMemCopyUse);

    if (memReq <= this.initialMemoryAvailable) {
        this.memoryLimit = memReq;
    } else {
        this.memoryLimit = initialMemoryAvailable;
    }

    LOG.info("RequestedMem=" + memReq + ", Allocated: " + this.memoryLimit);

    final float singleShuffleMemoryLimitPercent = conf.getFloat(
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT,
            TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT_DEFAULT);
    if (singleShuffleMemoryLimitPercent <= 0.0f || singleShuffleMemoryLimitPercent > 1.0f) {
        throw new IllegalArgumentException(
                "Invalid value for " + TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT + ": "
                        + singleShuffleMemoryLimitPercent);
    }

    this.maxSingleShuffleLimit = (long) (memoryLimit * singleShuffleMemoryLimitPercent);

    LOG.info("SimpleInputManager -> " + "MemoryLimit: " + this.memoryLimit + ", maxSingleMemLimit: "
            + this.maxSingleShuffleLimit);
}

From source file:org.avenir.regress.LogisticRegressionJob.java

License:Apache License

/**
 * @param conf//from w w  w .j  a  v a  2  s. c  om
 * @return
 * @throws IOException
 */
private int checkConvergence(Configuration conf) throws IOException {
    int status = 0;
    List<String> lines = Utility.getFileLines(conf, "coeff.file.path");

    String convCriteria = conf.get("convergence.criteria", ITER_LIMIT);
    if (convCriteria.equals(ITER_LIMIT)) {
        int iterLimit = conf.getInt("iteration.limit", 10);
        status = lines.size() < iterLimit ? NOT_CONVERGED : CONVERGED;
    } else {
        double[] prevCoeff = Utility.doubleArrayFromString(lines.get(lines.size() - 2));
        double[] curCoeff = Utility.doubleArrayFromString(lines.get(lines.size() - 1));
        LogisticRegressor regressor = new LogisticRegressor(prevCoeff);
        regressor.setAggregates(curCoeff);
        regressor.setConvergeThreshold((double) conf.getFloat("convergence.threshold", (float) 5.0));
        if (convCriteria.equals(ALL_BELOW_THRESHOLD)) {
            status = regressor.isAllConverged() ? CONVERGED : NOT_CONVERGED;
        } else if (convCriteria.equals(AVERAGE_BELOW_THRESHOLD)) {
            status = regressor.isAverageConverged() ? CONVERGED : NOT_CONVERGED;
        } else {
            throw new IllegalArgumentException("Invalid convergence criteria:" + convCriteria);
        }
    }

    return status;
}

From source file:org.creativecommons.nutch.CCQueryFilter.java

License:Apache License

public void setConf(Configuration conf) {
    this.conf = conf;
    setBoost(conf.getFloat("query.cc.boost", 0.0f));
}

From source file:org.data2semantics.giraph.pagerank.numerical.RandomWalkWorkerContext.java

License:Apache License

@Override
public void preApplication() throws InstantiationException, IllegalAccessException {
    Configuration configuration = getContext().getConfiguration();
    MAX_SUPERSTEPS = configuration.getInt(RandomWalkComputation.MAX_SUPERSTEPS, DEFAULT_MAX_SUPERSTEPS);
    TELEPORTATION_PROBABILITY = configuration.getFloat(RandomWalkComputation.TELEPORTATION_PROBABILITY,
            DEFAULT_TELEPORTATION_PROBABILITY);
    SOURCES = initializeSources(configuration);
}

From source file:org.dennisit.graph.RandomWalkWorkerContext.java

License:Apache License

@Override
public void preApplication() throws InstantiationException, IllegalAccessException {
    Configuration configuration = getContext().getConfiguration();
    MAX_SUPERSTEPS = configuration.getInt(RandomWalkVertex.MAX_SUPERSTEPS, DEFAULT_MAX_SUPERSTEPS);
    TELEPORTATION_PROBABILITY = configuration.getFloat(RandomWalkVertex.TELEPORTATION_PROBABILITY,
            DEFAULT_TELEPORTATION_PROBABILITY);
    SOURCES = initializeSources(configuration);
}

From source file:org.hedera.io.input.WikiRevisionInputFormat.java

License:Apache License

/** 
 * This code is copied from StreamWikiDumpNewInputFormat.java by Yusuke Matsubara.
 * Thanks to Tu Meteora for adjusting the code to the new mapreduce framework
 * @param job the job context/*from   ww w. j  a v a  2  s .  com*/
 * @throws IOException
 */
public List<InputSplit> getSplits(JobContext jc, FileStatus file, long splitSize) throws IOException {

    List<InputSplit> splits = new ArrayList<InputSplit>();
    Path path = file.getPath();

    LOG.info("Splitting file " + path.getName());

    Configuration conf = jc.getConfiguration();
    configure(conf);

    long length = file.getLen();
    FileSystem fs = file.getPath().getFileSystem(conf);
    BlockLocation[] blkLocations = fs.getFileBlockLocations(file, 0, length);
    if ((length != 0) && isSplitable(jc, path)) {
        long bytesRemaining = length;

        SeekableInputStream in = SeekableInputStream.getInstance(path, 0, length, fs, this.compressionCodecs);
        SplitCompressionInputStream is = in.getSplitCompressionInputStream();
        long start = 0;
        long skip = 0;
        if (is != null) {
            start = is.getAdjustedStart();
            length = is.getAdjustedEnd();
            is.close();
            in = null;
        }
        FileSplit split = null;
        Set<Long> processedPageEnds = new HashSet<Long>();
        float factor = conf.getFloat(KEY_SKIP_FACTOR, 1.2F);

        READLOOP: while (((double) bytesRemaining) / splitSize > factor && bytesRemaining > 0) {
            // prepare matcher
            ByteMatcher matcher;
            {
                long st = Math.min(start + skip + splitSize, length - 1);
                split = makeSplit(path, st, Math.min(splitSize, length - st), blkLocations);
                if (in != null)
                    in.close();
                if (split.getLength() <= 1) {
                    break;
                }
                in = SeekableInputStream.getInstance(split, fs, this.compressionCodecs);
            }
            matcher = new ByteMatcher(in);

            // read until the next page end in the look-ahead split
            while (!matcher.readUntilMatch(END_PAGE_TAG, null, split.getStart() + split.getLength(), null)) {
                if (matcher.getPos() >= length || split.getLength() == length - split.getStart())
                    break READLOOP;
                split = makeSplit(path, split.getStart(),
                        Math.min(split.getLength() + splitSize, length - split.getStart()), blkLocations);
            }
            if (matcher.getLastUnmatchPos() > 0 && matcher.getPos() > matcher.getLastUnmatchPos()
                    && !processedPageEnds.contains(matcher.getPos())) {
                splits.add(makeSplit(path, start, matcher.getPos() - start, blkLocations));
                processedPageEnds.add(matcher.getPos());
                long newstart = Math.max(matcher.getLastUnmatchPos(), start);
                bytesRemaining = length - newstart;
                start = newstart;
                skip = 0;
            } else {
                skip = matcher.getPos() - start;
            }
        }

        if (bytesRemaining > 0 && !processedPageEnds.contains(length)) {
            splits.add(makeSplit(path, length - bytesRemaining, bytesRemaining,
                    blkLocations[blkLocations.length - 1].getHosts()));
        }
        if (in != null)
            in.close();
    } else if (length != 0) {
        splits.add(makeSplit(path, 0, length, blkLocations));
    } else {
        //Create empty hosts array for zero length files
        splits.add(makeSplit(path, 0, length, new String[0]));
    }
    return splits;
}

From source file:org.mrgeo.data.tile.TiledInputFormatContext.java

License:Apache License

public static TiledInputFormatContext load(final Configuration conf) {
    TiledInputFormatContext context = new TiledInputFormatContext();
    context.inputs = new HashSet<String>();
    String strInputs = conf.get(INPUTS);
    if (strInputs != null) {
        String[] confInputs = strInputs.split(",");
        for (String confInput : confInputs) {
            context.inputs.add(confInput);
        }/* ww w.j a v a  2  s .  c  o m*/
    }
    context.zoomLevel = conf.getInt(ZOOM_LEVEL, 1);
    context.tileSize = conf.getInt(TILE_SIZE, MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT_INT);
    String confBounds = conf.get(BOUNDS);
    if (confBounds != null) {
        context.bounds = Bounds.fromDelimitedString(confBounds);
    }
    context.includeEmptyTiles = conf.getBoolean(INCLUDE_EMPTY_TILES, false);
    if (context.includeEmptyTiles) {
        context.fillValue = conf.getFloat(FILL_VALUE, Float.NaN);
    }
    int providerPropertyCount = conf.getInt(PROVIDER_PROPERTY_COUNT, 0);
    if (providerPropertyCount > 0) {
        context.inputProviderProperties = new Properties();
        for (int i = 0; i < providerPropertyCount; i++) {
            String key = conf.get(PROVIDER_PROPERTY_KEY + i);
            String value = conf.get(PROVIDER_PROPERTY_VALUE + i);
            context.inputProviderProperties.setProperty(key, value);
        }
    }
    return context;
}

From source file:org.mrgeo.hdfs.ingest.format.IngestImageSplittingInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(final JobContext context) throws IOException {
    final List<InputSplit> splits = new LinkedList<InputSplit>();
    // mapred.input.dir
    final Path[] inputs = FileInputFormat.getInputPaths(context);

    final Configuration conf = context.getConfiguration();

    int tilesize = -1;
    try {//from ww w .j  ava  2s . c  om
        //metadata = HadoopUtils.getMetadata(conf);
        Map<String, MrsImagePyramidMetadata> meta = HadoopUtils.getMetadata(context.getConfiguration());
        if (!meta.isEmpty()) {
            MrsImagePyramidMetadata metadata = meta.values().iterator().next();
            tilesize = metadata.getTilesize();
        }
    } catch (ClassNotFoundException e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }

    if (tilesize < 0) {
        tilesize = conf.getInt("tilesize", -1);
        if (tilesize < 1) {
            throw new MrsImageException(
                    "Error, no \"tilesize\" or \"metadata\" parameter in configuration, tilesize needs to be calculated & set before map/reduce");
        }

    }

    final int zoomlevel = conf.getInt("zoomlevel", -1);

    // get the tilesize in bytes (default to 3 band, 1 byte per band)
    final long tilebytes = conf.getLong("tilebytes", tilesize * tilesize * 3 * 1);

    if (zoomlevel < 1) {
        throw new MrsImageException(
                "Error, no \"zoomlevel\" parameter in configuration, zoomlevel needs to be calculated & set before map/reduce");
    }

    // get the spill buffer percent, then take 95% of it for extra padding...
    double spillpct = conf.getFloat("io.sort.spill.percent", (float) 0.8) * 0.95;
    long spillsize = (long) (conf.getFloat("io.sort.mb", 200) * spillpct) * 1024 * 1024;
    log.info("Spill size for splitting is: " + spillsize + "b");

    Map<String, Bounds> lookup = new HashMap<>();

    final String adhocname = conf.get(IngestImageDriver.INGEST_BOUNDS_LOCATION, null);
    if (adhocname != null) {
        AdHocDataProvider dp = DataProviderFactory.getAdHocDataProvider(adhocname,
                DataProviderFactory.AccessMode.READ, conf);
        InputStream is = dp.get(IngestImageDriver.INGEST_BOUNDS_FILE);
        BufferedReader reader = new BufferedReader(new InputStreamReader(is));

        String line;
        while ((line = reader.readLine()) != null) {
            String[] data = line.split("\\|");
            if (data.length == 2) {
                lookup.put(data[0], Bounds.fromDelimitedString(data[1]));
            }
        }
        is.close();
    }
    //log.info("Creating splits for: " + output.toString());
    for (final Path input : inputs) {
        final FileSystem fs = HadoopFileUtils.getFileSystem(conf, input);
        LongRectangle bounds = null;

        if (lookup.containsKey(input.toString())) {
            Bounds b = lookup.get(input.toString());
            bounds = TMSUtils.boundsToTile(b.getTMSBounds(), zoomlevel, tilesize).toLongRectangle();
        } else {
            log.info("  reading: " + input.toString());
            log.info("    zoomlevel: " + zoomlevel);

            final AbstractGridCoverage2DReader reader = GeotoolsRasterUtils.openImage(input.toString());

            if (reader != null) {
                try {
                    bounds = GeotoolsRasterUtils.calculateTiles(reader, tilesize, zoomlevel);
                } finally {
                    try {
                        GeotoolsRasterUtils.closeStreamFromReader(reader);
                    } catch (Exception e) {
                        e.printStackTrace();
                        throw new IOException(e);
                    }
                }
            }
        }

        if (bounds != null) {
            final long minTx = bounds.getMinX();
            final long maxTx = bounds.getMaxX();
            final long minTy = bounds.getMinY();
            final long maxTy = bounds.getMaxY();

            final long width = bounds.getWidth();
            final long height = bounds.getHeight();

            final long totaltiles = width * height;

            final FileStatus status = fs.getFileStatus(input);

            // for now, we'll just use the 1st block location for the split.
            // we can get more sophisticated later...
            final BlockLocation[] blocks = fs.getFileBlockLocations(status, 0, 0);

            String location = null;
            if (blocks.length > 0) {
                final String hosts[] = blocks[0].getHosts();
                if (hosts.length > 0) {
                    location = hosts[0];
                }
            }

            // long filelen = status.getLen();
            final long totalbytes = totaltiles * tilebytes;

            // if uncompressed tile sizes are greater than the spillsize, break it
            // into pieces
            if (totalbytes > spillsize) {
                final long numsplits = (totalbytes / spillsize) + 1;

                final long splitrange = (totaltiles / numsplits);
                long leftovers = totaltiles - (numsplits * splitrange);

                long start = 0;
                long end = 0;

                for (int i = 0; i < numsplits; i++) {
                    end = start + splitrange;
                    if (leftovers > 0) {
                        end++;
                        leftovers--;
                    }

                    final long sy = (start / width);
                    final long sx = (start - (sy * width));

                    // since the tile range is inclusive, calculate with end-1
                    final long ey = ((end - 1) / width);
                    final long ex = ((end - 1) - (ey * width));

                    // System.out.println("start: " + start + " end: " + end);
                    // System.out.println("  sx: " + sx + " sy: " + sy);
                    // System.out.println("  ex: " + ex + " ey: " + ey);
                    splits.add(new IngestImageSplit(input.toString(), minTx + sx, minTx + ex, minTy + sy,
                            minTy + ey, (end - start), bounds, zoomlevel, tilesize, location));

                    start = end;
                }
            } else {
                splits.add(new IngestImageSplit(input.toString(), minTx, maxTx, minTy, maxTy,
                        (maxTx + 1 - minTx) * (maxTy + 1 - minTy), bounds, zoomlevel, tilesize, location));
            }
        }
    }

    return splits;
}