Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:com.mellanox.r4h.TestHFlush.java

License:Apache License

/**
 * The test calls//from w w w .j  a va 2s  .c o  m
 * {@link #doTheJob(Configuration, String, long, short, boolean, EnumSet)}
 * while requiring the semantic of {@link SyncFlag#UPDATE_LENGTH}.
 * Similar with {@link #hFlush_03()} , it writes a file with a custom block
 * size so the writes will be happening across block's and checksum'
 * boundaries.
 */
@Test
public void hSyncUpdateLength_03() throws IOException {
    Configuration conf = new HdfsConfiguration();
    int customPerChecksumSize = 400;
    int customBlockSize = customPerChecksumSize * 3;
    // Modify defaul filesystem settings
    conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, customPerChecksumSize);
    conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, customBlockSize);

    doTheJob(conf, fName, customBlockSize, (short) 2, true, EnumSet.of(SyncFlag.UPDATE_LENGTH));
}

From source file:com.mellanox.r4h.TestHFlush.java

License:Apache License

/** This creates a slow writer and check to see 
 * if pipeline heartbeats work fine//ww  w  .ja v  a 2  s.  c o m
 */
@Test
public void testPipelineHeartbeat() throws Exception {
    final int DATANODE_NUM = 2;
    final int fileLen = 6;
    Configuration conf = new HdfsConfiguration();
    final int timeout = 2000;
    conf.setInt(DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, timeout);

    final Path p = new Path("/pipelineHeartbeat/foo");
    System.out.println("p=" + p);

    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DATANODE_NUM).build();
    try {
        DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem();

        byte[] fileContents = AppendTestUtil.initBuffer(fileLen);

        // create a new file.
        FSDataOutputStream stm = AppendTestUtil.createFile(fs, p, DATANODE_NUM);

        stm.write(fileContents, 0, 1);
        Thread.sleep(timeout);
        stm.hflush();
        System.out.println("Wrote 1 byte and hflush " + p);

        // write another byte
        Thread.sleep(timeout);
        stm.write(fileContents, 1, 1);
        stm.hflush();

        stm.write(fileContents, 2, 1);
        Thread.sleep(timeout);
        stm.hflush();

        stm.write(fileContents, 3, 1);
        Thread.sleep(timeout);
        stm.write(fileContents, 4, 1);
        stm.hflush();

        stm.write(fileContents, 5, 1);
        Thread.sleep(timeout);
        stm.close();

        // verify that entire file is good
        AppendTestUtil.checkFullFile(fs, p, fileLen, fileContents, "Failed to slowly write to a file");
    } finally {
        cluster.shutdown();
    }
}

From source file:com.ML_Hadoop.K_meansClustering.K_meansClusteringMapReduce.java

public static void main(String[] args) throws Exception {
    int iteration = 0, num_of_iteration = 30;
    int feature_size = 2;
    FileSystem fs;/* ww w.j a  va  2 s  . c  o  m*/
    int number_of_clusters = 2;

    do {
        Configuration conf = new Configuration();
        fs = FileSystem.get(conf);

        Job job = new Job(conf, "K_meansClusteringMapReduce");
        job.setJarByClass(K_meansClusteringMapReduce.class);

        conf = job.getConfiguration(); // This line is mandatory. 

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(FloatArrayWritable.class);

        job.setMapperClass(K_meansClusteringMap.class);
        job.setReducerClass(K_meansClusteringReduce.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        job.setNumReduceTasks(1); // set number of reducers to one.

        FileInputFormat.addInputPath(job, new Path(args[0]));
        Path out = new Path(args[1]);
        if (fs.exists(out))
            fs.delete(out, true);

        FileOutputFormat.setOutputPath(job, out);
        number_of_clusters = Integer.parseInt(args[2]);
        num_of_iteration = Integer.parseInt(args[3]);
        feature_size = Integer.parseInt(args[4]);

        conf.setInt("number_of_clusters", number_of_clusters);
        conf.setInt("feature_size", feature_size);
        conf.setInt("current_iteration_num", iteration);

        try {
            job.waitForCompletion(true);
            iteration++;
        } catch (IOException e) {
            e.printStackTrace();
        }
    } while (iteration < num_of_iteration);

}

From source file:com.ML_Hadoop.MultipleLinearRegression.MultipleLinearRegressionMapReduce.java

public static void main(String[] args) throws Exception {
    String[] theta;/*w  ww .j a  v  a  2  s  . co  m*/
    int iteration = 0, num_of_iteration = 1;
    int feature_size = 0, input_data_size = 0;
    FileSystem fs;
    Float alpha = 0.1f;

    do {
        Configuration conf = new Configuration();
        fs = FileSystem.get(conf);

        Job job = new Job(conf, "LinearRegressionMapReduce");
        job.setJarByClass(MultipleLinearRegressionMapReduce.class);

        // the following two lines are needed for propagating "theta"
        conf = job.getConfiguration();

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(FloatWritable.class);

        job.setMapperClass(MultipleLinearRegressionMap.class);
        job.setReducerClass(MultipleLinearRegressionReduce.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        job.setNumReduceTasks(1); // set mapred.reduce.tasks = 1 (only one reducer)

        FileInputFormat.addInputPath(job, new Path(args[0]));
        Path out = new Path(args[1]);
        if (fs.exists(out))
            fs.delete(out, true);

        FileOutputFormat.setOutputPath(job, out);
        alpha = Float.parseFloat(args[2]);
        num_of_iteration = Integer.parseInt(args[3]);
        feature_size = Integer.parseInt(args[4]);
        input_data_size = Integer.parseInt(args[5]);
        conf.setFloat("alpha", alpha);
        conf.setInt("feature_size", feature_size);
        conf.setInt("input_data_size", input_data_size);
        conf.setInt("iteration", iteration);

        theta = new String[feature_size];

        if (iteration == 0) { // first iteration
            for (int i = 0; i < theta.length; i++)
                theta[i] = "0.0";
            conf.setStrings("theta", theta);
        } else {
            try {
                String uri = "/user/hduser/theta.txt";
                fs = FileSystem.get(conf);
                //FSDataInputStream in = fs.open(new Path(uri));
                BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(uri))));
                theta = br.readLine().split(",");
            } catch (Exception e) {

            }
            conf.setStrings("theta", theta);
        }

        for (int i = 0; i < theta.length; i++)
            System.out.println("In MapRedce main function: theta[ " + i + " ]" + theta[i]);

        try {
            job.waitForCompletion(true);
            iteration++;
        } catch (IOException e) {
            e.printStackTrace();
        }
    } while (iteration < num_of_iteration);

}

From source file:com.ML_Hadoop.NaiveBayesClassifier_Continuous_Features.NaiveBayesClassifierMapReduce_Continuous_Features.java

/**
 * @param args// ww w.  jav a2s .c  om
 * @throws IOException 
 * @throws ClassNotFoundException 
 * @throws InterruptedException 
 */
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    int number_of_classes = 1;
    int number_of_features = 1;
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);

    Job job = new Job(conf, "NaiveBayesClassifierMapReduce_Continuous_Features");
    job.setJarByClass(NaiveBayesClassifierMapReduce_Continuous_Features.class);

    conf = job.getConfiguration(); // This line is mandatory. 

    job.setOutputKeyClass(LongWritable.class);
    job.setOutputValueClass(FloatArrayWritable.class);

    job.setMapOutputKeyClass(LongWritable.class);
    job.setMapOutputValueClass(MapArrayWritable.class);

    job.setMapperClass(NaiveBayesClassifierMap_Continuous_Features.class);
    job.setReducerClass(NaiveBayesClassifierReduce_Continuous_Features.class);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    job.setNumReduceTasks(1);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    Path out = new Path(args[1]);
    if (fs.exists(out))
        fs.delete(out, true);
    FileOutputFormat.setOutputPath(job, out);
    number_of_classes = Integer.parseInt(args[2]);
    number_of_features = Integer.parseInt(args[3]);
    conf.setInt("number_of_classes", number_of_classes);
    conf.setInt("number_of_features", number_of_features);

    try {
        job.waitForCompletion(true);

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static void setLimit(final Configuration conf, final int limit) {
    conf.setInt(INPUT_LIMIT, limit);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static void setSkip(final Configuration conf, final int skip) {
    conf.setInt(INPUT_SKIP, skip);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static void setSplitSize(final Configuration conf, final int value) {
    conf.setInt(INPUT_SPLIT_SIZE, value);
}

From source file:com.mongodb.hadoop.util.MapredMongoConfigUtil.java

License:Apache License

public static Configuration buildConfiguration(final Map<String, Object> data) {
    Configuration newConf = new Configuration();
    for (Map.Entry<String, Object> entry : data.entrySet()) {
        String key = entry.getKey();
        Object val = entry.getValue();
        if (val instanceof String) {
            newConf.set(key, (String) val);
        } else if (val instanceof Boolean) {
            newConf.setBoolean(key, (Boolean) val);
        } else if (val instanceof Integer) {
            newConf.setInt(key, (Integer) val);
        } else if (val instanceof Float) {
            newConf.setFloat(key, (Float) val);
        } else if (val instanceof DBObject) {
            setDBObject(newConf, key, (DBObject) val);
        } else {/*from  w ww  . j ava 2 s. com*/
            throw new RuntimeException("can't convert " + val.getClass() + " into any type for Configuration");
        }
    }
    return newConf;
}

From source file:com.moz.fiji.hadoop.configurator.TestHadoopConfigurator.java

License:Apache License

@Test
public void testConfigure() {
    Configuration conf = new Configuration();
    conf.setBoolean("my.boolean.value", true);
    conf.setFloat("my.float.value", 3.1f);
    conf.setFloat("my.double.value", 1.9f);
    conf.setInt("my.int.value", 12);
    conf.set("my.string.value", "bar");
    conf.setStrings("my.string.collection", "apple", "banana");
    conf.setStrings("my.string.array", "red", "green", "blue");
    conf.setBoolean("your.boolean.value", true);
    conf.setFloat("your.float.value", 1.0f);
    conf.setFloat("your.double.value", 2.0f);
    conf.setInt("your.int.value", 1);
    conf.setLong("your.long.value", 2L);
    conf.set("your.string.value", "asdf");

    MyConfiguredClass instance = ReflectionUtils.newInstance(MyConfiguredClass.class, conf);
    assertEquals(true, instance.getBooleanValue());
    assertEquals(3.1f, instance.getFloatValue(), 1e-6f);
    assertEquals(1.9, instance.getDoubleValue(), 1e-6);
    assertEquals(12, instance.getIntValue());
    assertEquals(456L, instance.getLongValue());
    assertEquals("bar", instance.getStringValue());
    assertEquals(true, instance.getYourBoolean());
    assertEquals(1.0f, instance.getYourFloat(), 1e-6f);
    assertEquals(2.0, instance.getYourDouble(), 1e-6);
    assertEquals(1, instance.getYourInt());
    assertEquals(2L, instance.getYourLong());
}