List of usage examples for org.apache.hadoop.conf Configuration set
public void set(String name, String value)
value
of the name
property. From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForInputTableNameAndInputQuery() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); // conf.set(DBConfiguration.INPUT_QUERY, "value"); // conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); conf.set(HIHOConf.HIVE_DRIVER, "value"); conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value"); conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForHiveLoadToPath() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); conf.set(DBConfiguration.INPUT_QUERY, "value"); conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, "value"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); // conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); conf.set(HIHOConf.HIVE_DRIVER, "value"); conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value"); conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForHiveDriver() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); conf.set(DBConfiguration.INPUT_QUERY, "value"); conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, "value"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); // conf.set(HIHOConf.HIVE_DRIVER, "value"); conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value"); conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForHiveUrl() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); conf.set(DBConfiguration.INPUT_QUERY, "value"); conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, "value"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); conf.set(HIHOConf.HIVE_DRIVER, "value"); // conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value"); conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForHiveOutputTableNameInCaseOfMultiPartition() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); conf.set(DBConfiguration.INPUT_QUERY, "value"); conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, "value"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); conf.set(HIHOConf.HIVE_DRIVER, "value"); conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value:us,india"); // conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test(expected = HIHOException.class) public void testCheckMandatoryConfsForInputBoundingQuery() throws HIHOException { Configuration conf = new Configuration(); conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, "value"); conf.set(DBConfiguration.URL_PROPERTY, "value"); conf.set(DBConfiguration.USERNAME_PROPERTY, "value"); conf.set(DBConfiguration.PASSWORD_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_PATH, "value"); conf.set(HIHOConf.INPUT_OUTPUT_STRATEGY, "DELIMITED"); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, "value"); conf.set(DBConfiguration.INPUT_QUERY, "value"); // conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, "value"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "value"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO, "hive"); conf.set(HIHOConf.INPUT_OUTPUT_LOADTO_PATH, "value"); conf.set(HIHOConf.HIVE_DRIVER, "value"); conf.set(HIHOConf.HIVE_URL, "value"); conf.set(HIHOConf.HIVE_PARTITION_BY, "value:value:us,india"); conf.set(HIHOConf.HIVE_TABLE_NAME, "value"); new DBQueryInputJob().checkMandatoryConfs(conf); }
From source file:co.nubetech.hiho.mapreduce.lib.db.apache.DataDrivenDBInputFormat.java
License:Apache License
/** Set the user-defined bounding query to use with a user-defined query. This *must* include the substring "$CONDITIONS" (DataDrivenDBInputFormat.SUBSTITUTE_TOKEN) inside the WHERE clause, so that DataDrivenDBInputFormat knows where to insert split clauses. e.g., "SELECT foo FROM mytable WHERE $CONDITIONS" This will be expanded to something like: SELECT foo FROM mytable WHERE (id > 100) AND (id < 250) inside each split./*www. ja v a2 s .c o m*/ */ public static void setBoundingQuery(Configuration conf, String query) { if (null != query) { // If the user's settng a query, warn if they don't allow conditions. if (query.indexOf(SUBSTITUTE_TOKEN) == -1) { LOG.warn("Could not find " + SUBSTITUTE_TOKEN + " token in query: " + query + "; splits may not partition data."); } } conf.set(DBConfiguration.INPUT_BOUNDING_QUERY, query); }
From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java
License:Apache License
@Test public final void testMapperValidValues() throws IOException, InterruptedException { Mapper.Context context = mock(Mapper.Context.class); Configuration conf = new Configuration(); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, ","); when(context.getConfiguration()).thenReturn(conf); DBInputDelimMapper mapper = new DBInputDelimMapper(); ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn"); ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn"); ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn"); ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn"); ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn"); ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn"); ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn"); ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn"); ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn"); ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT, "floatColumn"); ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>(); columns.add(intColumn);/*from w ww. j a v a2 s .com*/ columns.add(stringColumn); columns.add(dateColumn); columns.add(longColumn); columns.add(booleanColumn); columns.add(doubleColumn); columns.add(charColumn); columns.add(timeColumn); columns.add(timeStampColumn); columns.add(floatColumn); ArrayList<Comparable> values = new ArrayList<Comparable>(); values.add(new Integer(12)); values.add(new String("sam")); values.add(new Date()); values.add(new Long(26564l)); values.add(true); values.add(1.235); values.add('a'); values.add(new Time(new Date().getTime())); values.add(new Time(new Date().getTime())); values.add(new Float(1.0f)); GenericDBWritable val = new GenericDBWritable(columns, values); LongWritable key = new LongWritable(1); mapper.map(key, val, context); Text outkey = new Text(); Text outval = new Text(); StringBuilder builder = new StringBuilder(); builder.append(new Integer(12) + "," + new String("sam") + "," + new Date() + "," + new Long(26564l) + "," + true + "," + 1.235 + "," + 'a' + "," + new Time(new Date().getTime()) + "," + new Time(new Date().getTime()) + "," + new Float(1.0f)); outval.set(builder.toString()); verify(context).write(outkey, outval); }
From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java
License:Apache License
@Test public final void testMapperValidValuesDelmiter() throws IOException, InterruptedException { Mapper.Context context = mock(Mapper.Context.class); Configuration conf = new Configuration(); String delimiter = "DELIM"; conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, delimiter); when(context.getConfiguration()).thenReturn(conf); DBInputDelimMapper mapper = new DBInputDelimMapper(); ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn"); ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn"); ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn"); ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn"); ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn"); ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn"); ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn"); ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn"); ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn"); ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT, "floatColumn"); ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>(); columns.add(intColumn);/* www . j av a 2s . c o m*/ columns.add(stringColumn); columns.add(dateColumn); columns.add(longColumn); columns.add(booleanColumn); columns.add(doubleColumn); columns.add(charColumn); columns.add(timeColumn); columns.add(timeStampColumn); columns.add(floatColumn); ArrayList<Comparable> values = new ArrayList<Comparable>(); values.add(new Integer(12)); values.add(new String("sam")); values.add(new Date()); values.add(new Long(26564l)); values.add(true); values.add(1.235); values.add('a'); values.add(new Time(new Date().getTime())); values.add(new Time(new Date().getTime())); values.add(new Float(1.0f)); GenericDBWritable val = new GenericDBWritable(columns, values); LongWritable key = new LongWritable(1); mapper.map(key, val, context); Text outkey = new Text(); Text outval = new Text(); StringBuilder builder = new StringBuilder(); builder.append(new Integer(12) + delimiter + new String("sam") + delimiter + new Date() + delimiter + new Long(26564l) + delimiter + true + delimiter + 1.235 + delimiter + 'a' + delimiter + new Time(new Date().getTime()) + delimiter + new Time(new Date().getTime()) + delimiter + new Float(1.0f)); outval.set(builder.toString()); verify(context).write(outkey, outval); }
From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java
License:Apache License
@Test public final void testMapperNullValues() throws IOException, InterruptedException { Mapper.Context context = mock(Mapper.Context.class); Configuration conf = new Configuration(); conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, ","); when(context.getConfiguration()).thenReturn(conf); DBInputDelimMapper mapper = new DBInputDelimMapper(); ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>(); ArrayList values = new ArrayList(); GenericDBWritable val = new GenericDBWritable(columns, values); LongWritable key = new LongWritable(1); mapper.map(key, val, context); Text outkey = new Text(); Text outval = new Text(); verify(context).write(outkey, outval); }