Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:com.basho.riak.hadoop.config.RiakConfig.java

License:Apache License

/**
 * Set the size of the hadoop cluster, this is used by the
 * {@link RiakInputFormat} to try and optimize the number of
 * {@link InputSplit}s to create//from  w ww  .j ava2s.c  om
 * 
 * @param conf
 *            the {@link Configuration} to store the hadoop cluster size in
 * @param hadoopClusterSize
 *            the size of the hadoop cluster
 * @return the {@link Configuration} updated with the passed
 *         <code>hadoopClusterSize</code>
 */
public static Configuration setHadoopClusterSize(Configuration conf, int hadoopClusterSize) {
    conf.setInt(CLUSTER_SIZE_PROPERTY, hadoopClusterSize);
    return conf;

}

From source file:com.bigdata.diane.MiniTestDFSIO.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    int testType = TEST_TYPE_READ;
    int bufferSize = DEFAULT_BUFFER_SIZE;
    int fileSize = 1;
    int nrFiles = 1;
    /* String resFileName = DEFAULT_RES_FILE_NAME; */
    boolean isSequential = false;
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");
    System.out.println("Hey look its diane's progream!!!!!!!!!");

    String className = MiniTestDFSIO.class.getSimpleName();
    String usage = "Usage: " + className + " -read | -write | -clean "
            + "[-nrFiles N] [-fileSize MB] [-resFile resultFileName] " + "[-bufferSize Bytes] ";

    /* System.out.println(version); */
    if (args.length == 0) {
        System.err.println(usage);
        return -1;
    }// w w w  .ja va2  s .  c om
    for (int i = 0; i < args.length; i++) { // parse command line
        if (args[i].startsWith("-write")) {
            testType = TEST_TYPE_WRITE;
        } else if (args[i].equals("-nrFiles")) {
            nrFiles = Integer.parseInt(args[++i]);
        } else if (args[i].equals("-fileSize")) {
            fileSize = Integer.parseInt(args[++i]);
        }
    }

    try {
        Configuration fsConfig = new Configuration(getConf());
        fsConfig.setInt("test.io.file.buffer.size", bufferSize);
        FileSystem fs = FileSystem.get(fsConfig);

        if (isSequential) {
            long tStart = System.currentTimeMillis();
            sequentialTest(fs, testType, fileSize, nrFiles);
            long execTime = System.currentTimeMillis() - tStart;
            String resultLine = "Seq Test exec time sec: " + (float) execTime / 1000;
            LOG.info(resultLine);
            return 0;
        }

        createControlFile(fs, fileSize, nrFiles, fsConfig);
        /*   long tStart = System.currentTimeMillis(); */
        if (testType == TEST_TYPE_WRITE)
            writeTest(fs, fsConfig);
    } catch (Exception e) {
        System.err.print(StringUtils.stringifyException(e));
        return -1;
    }
    return 0;
}

From source file:com.blackberry.logtools.LogTools.java

License:Apache License

public void runMRJob(boolean quiet, boolean silent, Configuration conf, ArrayList<String> D_options, String out,
        Logger LOG, String field_separator, String queue_name, String[] args, String job, Tool tool)
        throws Exception {

    logConsole(quiet, silent, info, "Running Mapreduce job & Calling " + job);

    if (out.equals("-")) {
        //Uncompress results to be able to read to stdout
        D_options.add("-Dmapreduce.output.fileoutputformat.compress=false");
    }//from www  .j ava  2s .c o  m

    try {
        conf.set("zk.connect.string", System.getenv("ZK_CONNECT_STRING"));
        conf.setBoolean("mapreduce.output.fileoutputformat.compress", true);
        conf.set("mapred.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        conf.setInt("mapred.max.split.size", 256 * 1024 * 1024);
        conf.set("logdriver.output.field.separator", field_separator);
        conf.set("mapred.job.queue.name", StringEscapeUtils.escapeJava(queue_name));

        dOpts(D_options, silent, out, conf);

        //Now run JOB and send arguments
        LOG.info("Sending args to " + job + ": {}", args);
        ToolRunner.run(conf, tool, args);
    } catch (IOException e) {
        if (e.toString().contains("Failed to find any Kerberos")) {
            logConsole(true, true, error, "No/bad Kerberos ticket - please authenticate.");
            System.exit(1);
        } else if (e.toString().contains("Permission denied")) {
            logConsole(true, true, error, "Permission denied.");
            System.err.println("; Please go to https://go/itforms and filled out the Hadoop Onboarding Form "
                    + "to get access to the requested data.  Paste the following data into the ticket to help with your request:\n"
                    + "Error Message" + e);
            System.exit(1);
        } else if (e.toString().contains("quota") && e.toString().contains("exceeded")) {
            logConsole(true, true, error, "Disk quota Exceeded.");
            System.exit(1);
        }
        logConsole(true, true, error,
                "\n\tError running mapreduce job." + generalError() + "\n\tCommand stopped");
        e.printStackTrace();
        System.exit(1);
    }
}

From source file:com.cloudera.crunch.WordCountHBaseTest.java

License:Open Source License

@Before
public void setUp() throws Exception {
    Configuration conf = hbaseTestUtil.getConfiguration();
    File tmpDir = File.createTempFile("logdir", "");
    tmpDir.delete();//  w ww . ja  v  a  2s  .com
    tmpDir.mkdir();
    tmpDir.deleteOnExit();
    conf.set("hadoop.log.dir", tmpDir.getAbsolutePath());
    conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
    conf.setInt("hbase.master.info.port", -1);
    conf.setInt("hbase.regionserver.info.port", -1);
    hbaseTestUtil.startMiniZKCluster();
    hbaseTestUtil.startMiniCluster();
    hbaseTestUtil.startMiniMapReduceCluster();
}

From source file:com.cloudera.integration.oracle.goldengate.ldv.mapreduce.lib.input.LengthDelimitedInputFormatTest.java

@Test
public void test() throws IOException, InterruptedException {
    Configuration conf = new Configuration(false);
    conf.set("fs.default.name", "file:///");
    conf.setInt(Constants.RECORD_PREFIX_LENGTH, 4);
    conf.setInt(Constants.FIELD_PREFIX_LENGTH, 4);

    Path path = new Path(tempFile.getAbsoluteFile().toURI());

    TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
    LengthDelimitedInputFormat inputFormat = ReflectionUtils.newInstance(LengthDelimitedInputFormat.class,
            conf);//from   w w  w.j a  v a 2 s . c  o  m
    try (LengthDelimitedRecordReader reader = (LengthDelimitedRecordReader) inputFormat.createRecordReader(null,
            context)) {
        FileSplit split = new FileSplit(path, 0, tempFile.length(), null);
        reader.initialize(split, context);

        while (reader.nextKeyValue()) {
            LengthDelimitedWritable writable = reader.getCurrentValue();
            Assert.assertNotNull(writable);
            Timestamp timestamp = new Timestamp(writable.getTimestamp().get());

            Assert.assertEquals("2014-12-31 23:06:06.255", timestamp.toString());
            FieldValueWritable[] writables = writable.getWritables();
            for (int i = 0; i < chars.length(); i++) {
                String value = chars.substring(0, i);
                FieldValueWritable fieldValueWritable = writables[i];
                Assert.assertEquals(value, fieldValueWritable.getData());
            }

            //          System.out.println(reader.getCurrentValue());
        }
    }

}

From source file:com.cloudera.knittingboar.conf.cmdline.ModelTrainerCmdLineDriver.java

License:Apache License

public Configuration generateDebugConfigurationObject() {

    Configuration c = new Configuration();

    // feature vector size
    c.setInt("com.cloudera.knittingboar.setup.FeatureVectorSize", 10000);

    c.setInt("com.cloudera.knittingboar.setup.numCategories", 20);

    c.setInt("com.cloudera.knittingboar.setup.BatchSize", 200);

    c.setInt("com.cloudera.knittingboar.setup.NumberPasses", 1);

    // local input split path
    c.set("com.cloudera.knittingboar.setup.LocalInputSplitPath", "hdfs://127.0.0.1/input/0");

    // setup 20newsgroups
    c.set("com.cloudera.knittingboar.setup.RecordFactoryClassname",
            "com.cloudera.knittingboar.records.TwentyNewsgroupsRecordFactory");

    return c;/*from w w  w.  j  a v a 2  s  . c  o  m*/

}

From source file:com.cloudera.knittingboar.metrics.Test20NewsApplyModel.java

License:Apache License

public Configuration generateDebugConfigurationObject() {

    Configuration c = new Configuration();

    // feature vector size
    c.setInt("com.cloudera.knittingboar.setup.FeatureVectorSize", 10000);

    c.setInt("com.cloudera.knittingboar.setup.numCategories", 20);

    c.setInt("com.cloudera.knittingboar.setup.BatchSize", 500);

    // local input split path
    c.set("com.cloudera.knittingboar.setup.LocalInputSplitPath", "hdfs://127.0.0.1/input/0");

    // setup 20newsgroups
    c.set("com.cloudera.knittingboar.setup.RecordFactoryClassname",
            RecordFactory.TWENTYNEWSGROUPS_RECORDFACTORY);

    return c;/*from  w  w w.  j  a  v  a 2s.  c  om*/

}

From source file:com.cloudera.knittingboar.metrics.Test20NewsNoSaveModel.java

License:Apache License

public Configuration generateDebugConfigurationObject() {

    Configuration c = new Configuration();

    // feature vector size
    c.setInt("com.cloudera.knittingboar.setup.FeatureVectorSize", 10000);

    c.setInt("com.cloudera.knittingboar.setup.numCategories", 20);

    c.setInt("com.cloudera.knittingboar.setup.BatchSize", 200);

    // local input split path
    c.set("com.cloudera.knittingboar.setup.LocalInputSplitPath", "hdfs://127.0.0.1/input/0");

    // setup 20newsgroups
    c.set("com.cloudera.knittingboar.setup.RecordFactoryClassname",
            "com.cloudera.knittingboar.records.TwentyNewsgroupsRecordFactory");

    return c;/*from   w  ww .ja v  a  2  s . c o m*/

}

From source file:com.cloudera.knittingboar.metrics.TestRCV1ApplyModel.java

License:Apache License

public Configuration generateDebugConfigurationObject() {

    Configuration c = new Configuration();

    // feature vector size
    c.setInt("com.cloudera.knittingboar.setup.FeatureVectorSize", 10000);

    c.setInt("com.cloudera.knittingboar.setup.numCategories", 2);

    c.setInt("com.cloudera.knittingboar.setup.BatchSize", 200);

    // local input split path
    c.set("com.cloudera.knittingboar.setup.LocalInputSplitPath", "hdfs://127.0.0.1/input/0");

    // setup 20newsgroups
    c.set("com.cloudera.knittingboar.setup.RecordFactoryClassname", RecordFactory.RCV1_RECORDFACTORY);

    return c;/* w ww .j a v  a  2  s  .  co  m*/

}

From source file:com.cloudera.knittingboar.sgd.olr.TestBaseOLRTest20Newsgroups.java

License:Apache License

public Configuration generateDebugConfigurationObject() {

    Configuration c = new Configuration();

    // feature vector size
    c.setInt("com.cloudera.knittingboar.setup.FeatureVectorSize", 10000);

    c.setInt("com.cloudera.knittingboar.setup.numCategories", 20);

    c.setInt("com.cloudera.knittingboar.setup.BatchSize", 200);

    // local input split path
    c.set("com.cloudera.knittingboar.setup.LocalInputSplitPath", "hdfs://127.0.0.1/input/0");

    // setup 20newsgroups
    c.set("com.cloudera.knittingboar.setup.RecordFactoryClassname",
            RecordFactory.TWENTYNEWSGROUPS_RECORDFACTORY);

    return c;/*ww  w.  j  av a 2s  . co m*/

}