Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:co.nubetech.hiho.mapreduce.TestMySQLLoadMapper.java

License:Apache License

@Test
public final void testSetup() throws Exception {

    Context context = mock(Context.class);
    MySQLLoadDataMapper mapper = new MySQLLoadDataMapper() {
        protected void connect(String curl, String u, String p) {
        }// w w  w.  jav a 2 s. c  o  m
    };
    Configuration conf = new Configuration();
    String url = "jdbc:mysql://localhost:3306/hiho";
    String usrname = "root";
    String password = "newpwd";
    conf.set(DBConfiguration.URL_PROPERTY, url);
    conf.set(DBConfiguration.USERNAME_PROPERTY, usrname);
    conf.set(DBConfiguration.PASSWORD_PROPERTY, password);
    when(context.getConfiguration()).thenReturn(conf);
    mapper.setup(context);
    verify(context, times(3)).getConfiguration();
}

From source file:co.nubetech.hiho.mapreduce.TestMySQLLoadMapper.java

License:Apache License

/**
 * @param string/*from w ww. java2s  . com*/
 * @throws IOException
 * @throws SQLException
 * @throws InterruptedException
 */
private void runMapper(String tablename) throws IOException, SQLException, InterruptedException {
    Context context = mock(Context.class);
    MySQLLoadDataMapper mapper = new MySQLLoadDataMapper();
    FSDataInputStream val;
    val = new FSDataInputStream(new MyInputStream());
    Connection con = mock(Connection.class);
    com.mysql.jdbc.Statement stmt = mock(com.mysql.jdbc.Statement.class);
    mapper.setConnection(con);
    String query = "load data local infile 'abc.txt' into table tablename " + QUERY_SUFFIX
            + " (col1,col2,col3)";
    when(con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)).thenReturn(stmt);
    Configuration conf = new Configuration();
    conf.set(HIHOConf.LOAD_QUERY_SUFFIX, QUERY_SUFFIX);
    conf.setBoolean(HIHOConf.LOAD_KEY_IS_TABLENAME, true);
    conf.setBoolean(HIHOConf.LOAD_HAS_HEADER, true);
    when(context.getConfiguration()).thenReturn(conf);
    when(stmt.executeUpdate(query)).thenReturn(10);
    Counter counter = mock(Counter.class);
    when(context.getCounter("MySQLLoadCounters", "ROWS_INSERTED_TABLE_tablename")).thenReturn(counter);
    when(context.getCounter("MySQLLoadCounters", "ROWS_INSERTED_TOTAL")).thenReturn(counter);
    mapper.map(new Text(tablename), val, context);
    verify(stmt).setLocalInfileInputStream(val);
    verify(stmt).executeUpdate(query);
    verify(counter, times(2)).increment(10);
}

From source file:co.nubetech.hiho.mapreduce.TestOracleLoadMapper.java

License:Apache License

@Test
public final void testSetup() throws Exception {
    Mapper.Context context = mock(Mapper.Context.class);
    OracleLoadMapper mapper = new OracleLoadMapper();
    FTPClient ftpClient = mock(FTPClient.class);
    Configuration conf = new Configuration();
    String ip = "192.168.128.8";
    String portno = "21";
    String user = "nube";
    String password = "nube123";
    String externalDirectory = "dir";
    conf.set(HIHOConf.ORACLE_FTP_ADDRESS, ip);
    conf.set(HIHOConf.ORACLE_FTP_PORT, portno);
    conf.set(HIHOConf.ORACLE_FTP_USER, user);
    conf.set(HIHOConf.ORACLE_FTP_PASSWORD, password);
    conf.set(HIHOConf.ORACLE_EXTERNAL_TABLE_DIR, externalDirectory);
    when(context.getConfiguration()).thenReturn(conf);
    mapper.setFtpClient(ftpClient);//  www.  j  a v a  2  s .co m
    mapper.setup(context);
    verify(ftpClient).connect(ip, Integer.parseInt(portno));
    verify(ftpClient).login(user, password);
    verify(ftpClient).changeWorkingDirectory(externalDirectory);
}

From source file:co.nubetech.hiho.merge.MergeJob.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    populateConfiguration(args);//from  w w w  . j a v  a2 s . c om
    try {
        checkMandatoryConfs();
    } catch (HIHOException e1) {
        e1.printStackTrace();
        throw new Exception(e1);
    }

    Class inputFormatClass = Class.forName(inputFormat);
    Class outputFormatClass = Class.forName(outputFormat);
    Class inputKeyClass = Class.forName(inputKeyClassName);
    Class inputValueClass = Class.forName(inputValueClassName);

    Configuration conf = getConf();
    conf.set(HIHOConf.MERGE_OLD_PATH, oldPath);
    conf.set(HIHOConf.MERGE_NEW_PATH, newPath);

    Job job = new Job(conf);
    job.setJobName("Merge job");
    job.setJarByClass(MergeJob.class);

    if (mergeBy.equals("key")) {
        job.setMapperClass(MergeKeyMapper.class);
        job.setReducerClass(MergeKeyReducer.class);

    } else if (mergeBy.equals("value")) {
        job.setMapperClass(MergeValueMapper.class);
        job.setReducerClass(MergeValueReducer.class);
    }

    job.setInputFormatClass(inputFormatClass);
    DelimitedTextInputFormat.setProperties(job, delimiter, column);
    job.setMapOutputKeyClass(HihoTuple.class);
    job.setMapOutputValueClass(HihoValue.class);

    job.setOutputKeyClass(inputKeyClass);
    job.setOutputValueClass(inputValueClass);
    FileInputFormat.setInputPaths(job, oldPath + "," + newPath);
    job.setOutputFormatClass(outputFormatClass);
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    try {
        logger.debug("Output format class is " + job.getOutputFormatClass());
        logger.debug("Class is " + ReflectionUtils
                .newInstance(job.getOutputFormatClass(), job.getConfiguration()).getClass().getName());
        job.waitForCompletion(false);
        if (job.isComplete()) {
            Counters counters = job.getCounters();
            totalRecordsOld = counters.findCounter(MergeRecordCounter.TOTAL_RECORDS_OLD).getValue();
            totalRecordsNew = counters.findCounter(MergeRecordCounter.TOTAL_RECORDS_NEW).getValue();
            badRecords = counters.findCounter(MergeRecordCounter.BAD_RECORD).getValue();
            output = counters.findCounter(MergeRecordCounter.OUTPUT).getValue();
            logger.info("Total old records read are: " + totalRecordsOld);
            logger.info("Total new records read are: " + totalRecordsNew);
            logger.info("Bad Records are: " + badRecords);
            logger.info("Output records are: " + output);
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    return 0;
}

From source file:com.addthis.hydra.task.output.HDFSOutputWrapperFactory.java

License:Apache License

@JsonCreator
public HDFSOutputWrapperFactory(@JsonProperty(value = "hdfsUrl", required = true) String hdfsUrl,
        @JsonProperty(value = "dir", required = true) Path dir) throws IOException {
    Configuration config = new Configuration();
    config.set("fs.defaultFS", hdfsUrl);
    config.set("fs.automatic.close", "false");
    config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    this.fileSystem = FileSystem.get(config);
    this.dir = dir;
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeConfigUtil.java

License:Apache License

public static void setInputHost(Configuration conf, String host) {
    log.info("setting " + INPUT_HOST + " to " + host);
    conf.set(INPUT_HOST, host);
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeConfigUtil.java

License:Apache License

public static void setInputNamespace(Configuration conf, String namespace) {
    log.info("setting " + INPUT_NAMESPACE + " to " + namespace);
    conf.set(INPUT_NAMESPACE, namespace);
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeConfigUtil.java

License:Apache License

public static void setInputSetName(Configuration conf, String setname) {
    log.info("setting " + INPUT_SETNAME + " to " + setname);
    conf.set(INPUT_SETNAME, setname);
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeConfigUtil.java

License:Apache License

public static void setInputBinNames(Configuration conf, String bins) {
    log.info("setting " + INPUT_BINNAMES + " to " + bins);
    conf.set(INPUT_BINNAMES, bins);
}

From source file:com.aerospike.hadoop.mapreduce.AerospikeConfigUtil.java

License:Apache License

public static void setInputOperation(Configuration conf, String operation) {
    if (!operation.equals("scan") && !operation.equals("numrange"))
        throw new UnsupportedOperationException("input operation must be 'scan' or 'numrange'");
    log.info("setting " + INPUT_OPERATION + " to " + operation);
    conf.set(INPUT_OPERATION, operation);
}