Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:com.gemstone.gemfire.cache.hdfs.internal.hoplog.mapreduce.GFOutputFormat.java

License:Apache License

/**
 * Creates instance of {@link ClientCache} by connecting to GF cluster through
 * GF server//from  ww w.  ja va 2s.  c  o  m
 */
public ClientCache createGFWriterUsingServer(Configuration conf) {
    String server = conf.get(SERVER_HOST);
    // if server port is not provided assume default server port, 40404
    int port = conf.getInt(SERVER_PORT, CacheServer.DEFAULT_PORT);

    // create gemfire client cache instance
    ClientCacheFactory ccf = new ClientCacheFactory();
    ccf.addPoolServer(server, port);
    ClientCache cache = ccf.create();
    return cache;
}

From source file:com.geneix.bottle.WordRecordReader.java

License:Apache License

public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
    if (LOG.isInfoEnabled()) {
        LOG.info("Initializing WordRecordReader");
    }/*from  ww  w . jav a  2 s.  c  o  m*/
    FileSplit split = (FileSplit) genericSplit;
    Configuration job = context.getConfiguration();
    this.maxWordLength = job.getInt(MAX_WORD_LENGTH, Integer.MAX_VALUE);
    start = split.getStart();
    end = start + split.getLength();
    final Path file = split.getPath();

    // open the file and seek to the start of the split
    final FileSystem fs = file.getFileSystem(job);
    fileIn = fs.open(file);

    CompressionCodec codec = new CompressionCodecFactory(job).getCodec(file);
    if (null != codec) {
        throw new IOException("Cannot handle compressed files right now");
    } else {
        fileIn.seek(start);
        in = new WordReader(fileIn, job);
        filePosition = fileIn;
    }
    // If this is not the first split, we always throw away first record
    // because we always (except the last split) read one extra line in
    // next() method.
    if (start != 0) {
        start += in.readWord(new Text(), 0, maxBytesToConsume(start));
    }
    this.pos = start;
}

From source file:com.github.hdl.tensorflow.yarn.app.TFApplicationRpcServer.java

License:Apache License

@Override
public void run() {
    Configuration conf = new Configuration();
    YarnRPC rpc = YarnRPC.create(conf);/*w w w  .j a  va 2s.  c o  m*/
    InetSocketAddress address = new InetSocketAddress(rpcAddress, rpcPort);
    this.server = rpc.getServer(TensorFlowCluster.class, this, address, conf, null,
            conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT,
                    YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT));

    this.server.start();
}

From source file:com.github.ygf.pagerank.InLinks.java

License:Apache License

public int run(String[] args) throws Exception {
    if (args.length != 3) {
        System.out.println("Usage: InLinks <links-simple-sorted.txt> <titles-dir> <output-dir>");
        ToolRunner.printGenericCommandUsage(System.out);
        return 2;
    }// w  w  w  .  j  a  va2  s.  c  o  m

    Path linksFile = new Path(args[0]);
    Path titlesDir = new Path(args[1]);
    Path outputDir = new Path(args[2]);

    Configuration conf = getConf();

    // Do not create _SUCCESS files. MapFileOutputFormat.getReaders calls
    // try to read the _SUCCESS as another MapFile dir.
    conf.set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false");

    // Default values of the parameters of the algorithm.
    conf.setInt("inlinks.top_results", conf.getInt("inlinks.top_results", 100));

    conf.set("inlinks.titles_dir", titlesDir.toString());

    computeInLinks(conf, linksFile, outputDir);
    summarizeResults(conf, outputDir);

    return 0;
}

From source file:com.github.ygf.pagerank.PageRank.java

License:Apache License

public int run(String[] args) throws Exception {
    if (args.length != 3) {
        System.out.println("Usage: PageRank <links-simple-sorted.txt> <titles-dir> <output-dir>");
        ToolRunner.printGenericCommandUsage(System.out);
        return 2;
    }//from   w ww. ja  va 2s.  co m

    Path linksFile = new Path(args[0]);
    Path titlesDir = new Path(args[1]);
    Path outputDir = new Path(args[2]);

    Configuration conf = getConf();

    // Do not create _SUCCESS files. MapFileOutputFormat.getReaders calls
    // try to read the _SUCCESS as another MapFile dir.
    conf.set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false");

    // Default values of the parameters of the algorithm.
    conf.setInt("pagerank.block_size", conf.getInt("pagerank.block_size", 10000));
    conf.setInt("pagerank.max_iterations", conf.getInt("pagerank.max_iterations", 2));
    conf.setFloat("pagerank.damping_factor", conf.getFloat("pagerank.damping_factor", 0.85f));
    conf.setInt("pagerank.top_results", conf.getInt("pagerank.top_results", 100));

    conf.set("pagerank.titles_dir", titlesDir.toString());
    int numPages = getNumPages(conf, titlesDir);
    conf.setLong("pagerank.num_pages", numPages);

    createTransitionMatrix(conf, linksFile, outputDir);

    int maxIters = Integer.parseInt(conf.get("pagerank.max_iterations"));
    for (int iter = 1; iter <= maxIters; iter++) {
        conf.setInt("pagerank.iteration", iter);
        pageRankIteration(iter, conf, outputDir);
        cleanPreviousIteration(iter, conf, outputDir);
    }

    summarizeResults(maxIters, conf, outputDir);

    return 0;
}

From source file:com.google.appengine.tools.mapreduce.BlobstoreInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();
    String blobKey = configuration.get(BLOB_KEYS);
    int shardCount = configuration.getInt(SHARD_COUNT, DEFAULT_SHARD_COUNT);
    long blobSize = blobKeyToSize.apply(blobKey);
    return getSplits(blobKey, blobSize, shardCount);
}

From source file:com.google.appengine.tools.mapreduce.BlobstoreInputFormatTest.java

License:Apache License

/**
 * Tests that public {@code getSplits} passes expected arguments to package
 * private one from the {@code JobContext}.
 *///from   ww w .  j ava  2 s .co m
public void test_getSplits() throws Exception {
    String blobKey = "blobKey";
    int shardCount = 3;
    long blobSize = 1024;
    IMocksControl control = EasyMock.createControl();

    @SuppressWarnings("unchecked")
    Function<String, Long> blobKeyToSize = control.createMock(Function.class);

    JobContext jobContext = control.createMock(JobContext.class);
    Configuration configuration = control.createMock(Configuration.class);
    EasyMock.expect(jobContext.getConfiguration()).andReturn(configuration).anyTimes();
    EasyMock.expect(configuration.get(BlobstoreInputFormat.BLOB_KEYS)).andReturn(blobKey);
    EasyMock.expect(
            configuration.getInt(BlobstoreInputFormat.SHARD_COUNT, BlobstoreInputFormat.DEFAULT_SHARD_COUNT))
            .andReturn(shardCount);
    EasyMock.expect(blobKeyToSize.apply(blobKey)).andReturn(blobSize);
    // this is what we are testing

    control.replay();
    BlobstoreInputFormat inputFormat = new BlobstoreInputFormat();

    inputFormat.setBlobKeyToSize(blobKeyToSize);
    assertSplits(inputFormat.getSplits(jobContext), blobKey, blobSize, shardCount);

    control.verify();
}

From source file:com.google.appengine.tools.mapreduce.MapReduceXmlTest.java

License:Apache License

public void testTemplateIsIntact() {
    MapReduceXml mrXml = new MapReduceXml(inputStreamFromString(SAMPLE_CONFIGURATION_XML));
    String templateString = mrXml.getTemplateAsXmlString("Bar");
    ConfigurationTemplatePreprocessor preprocessor = new ConfigurationTemplatePreprocessor(templateString);
    String configString = preprocessor.preprocess(new HashMap<String, String>());
    Configuration conf = ConfigurationXmlUtil.getConfigurationFromXml(configString);
    assertEquals(2, conf.getInt("Baz", 0));
}

From source file:com.google.appengine.tools.mapreduce.RangeInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    long shardCount = conf.getInt(SHARD_COUNT_KEY, DEFAULT_SHARD_COUNT);
    long rangeStart = getNonNegativeLong(conf, RANGE_START_KEY);
    long rangeEnd = getNonNegativeLong(conf, RANGE_END_KEY);
    if (rangeStart >= rangeEnd) {
        throw new InvalidConfigurationException("Invalid range. Start: " + rangeStart + " >= end: " + rangeEnd);
    }/* ww  w. ja  v a2 s  .  c o  m*/

    double increment = ((double) rangeEnd - rangeStart) / shardCount;
    ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < shardCount - 1; i++) {
        splits.add(new RangeInputSplit(rangeStart + Math.round(i * increment),
                rangeStart + Math.round((i + 1) * increment)));
    }

    // Make sure that the final split hits end
    splits.add(new RangeInputSplit(rangeStart + Math.round((shardCount - 1) * increment), rangeEnd));

    return splits;
}

From source file:com.google.cloud.bigtable.hbase.adapters.Adapters.java

License:Open Source License

public static PutAdapter createPutAdapter(Configuration config) {
    return new PutAdapter(config.getInt("hbase.client.keyvalue.maxsize", -1));
}