List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name)
name
property, null
if no such property exists. From source file:co.nubetech.hiho.job.sf.ExportSalesForceJob.java
License:Apache License
public void checkMandatoryConfs(Configuration conf) throws HIHOException { if (inputPath == null) { throw new HIHOException("The provided inputPath is empty, please specify inputPath"); }/* ww w. ja v a2 s. c o m*/ if (conf.get(HIHOConf.SALESFORCE_USERNAME) == null) { throw new HIHOException("The SalesForce UserName is not specified, please specify SalesForce UserName"); } if (conf.get(HIHOConf.SALESFORCE_PASSWORD) == null) { throw new HIHOException("The SalesForce Password is not specified, please specify SalesForce Password"); } if (conf.get(HIHOConf.SALESFORCE_SOBJECTYPE) == null) { throw new HIHOException( "The SalesForce SOBJECTYPE is not specified, please specify SalesForce SOBJECTYPE"); } if (conf.get(HIHOConf.SALESFORCE_HEADERS) == null) { throw new HIHOException("The SalesForce Headers is not specified, please specify SalesForce Headers"); } }
From source file:co.nubetech.hiho.job.sf.TestExportSalesForceJob.java
License:Apache License
@Test public void testPopulateConfiguration() { String[] args = new String[] { "-inputPath", "input", "-sfUserName", "sfaccount@hotmail.com", "-sfPassword", "tryc,cl,cg123avIotXX9dBlGy3iNiGytlrwy", "-sfObjectType", "Account", "-sfHeaders", "AccountNumber,Name,BillingState,Phone" }; ExportSalesForceJob exportSalesForceJob = new ExportSalesForceJob(); Configuration conf = new Configuration(); exportSalesForceJob.populateConfiguration(args, conf); assertEquals("sfaccount@hotmail.com", conf.get(HIHOConf.SALESFORCE_USERNAME)); assertEquals("tryc,cl,cg123avIotXX9dBlGy3iNiGytlrwy", conf.get(HIHOConf.SALESFORCE_PASSWORD)); assertEquals("Account", conf.get(HIHOConf.SALESFORCE_SOBJECTYPE)); assertEquals("AccountNumber,Name,BillingState,Phone", conf.get(HIHOConf.SALESFORCE_HEADERS)); }
From source file:co.nubetech.hiho.job.TestDBQueryInputJob.java
License:Apache License
@Test public void testPopulateConfigurations() throws HIHOException { Configuration conf = new Configuration(); String[] args = new String[] { "-jdbcDriver", "com.mysql.jdbc.Driver", "-jdbcUrl", "jdbc:mysql://localhost:3306/", "-jdbcUsername", "root", "-jdbcPassword", "root", "-outputPath", "/tmp", "-outputStrategy", "DELIMITED", "-delimiter", "|", "-numberOfMappers", "2", "-inputTableName", "table", "-inputFieldNames", "field1,field2", "-inputOrderBy", "field1", "-inputLoadTo", "pig", "-inputLoadToPath", "/tmp", "-hiveDriver", "org.apache.hadoop.hive.jdbc.HiveDriver", "-hiveUrl", "jdbc:hive://localhost:10000/", "-hiveUsername", "hive", "-hivePassword", "hive", "-hivePartitionBy", "country:string:india", "-hiveIfNotExists", "true", "-hiveTableName", "table", "-hiveSortedBy", "id", "-hiveClusteredBy", "country:2", "-inputQuery", "select * from student", "-inputBoundingQuery", "select min(id), max(id) from student" }; new DBQueryInputJob().populateConfiguration(args, conf); assertEquals("com.mysql.jdbc.Driver", conf.get(DBConfiguration.DRIVER_CLASS_PROPERTY)); assertEquals("jdbc:mysql://localhost:3306/", conf.get(DBConfiguration.URL_PROPERTY)); assertEquals("root", conf.get(DBConfiguration.USERNAME_PROPERTY)); assertEquals("root", conf.get(DBConfiguration.PASSWORD_PROPERTY)); assertEquals("/tmp", conf.get(HIHOConf.INPUT_OUTPUT_PATH)); assertEquals("DELIMITED", conf.get(HIHOConf.INPUT_OUTPUT_STRATEGY)); assertEquals("|", conf.get(HIHOConf.INPUT_OUTPUT_DELIMITER)); assertEquals("2", conf.get(HIHOConf.NUMBER_MAPPERS)); assertEquals("table", conf.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY)); assertEquals("field1,field2", conf.get(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY)); assertEquals("field1", conf.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY)); assertEquals("pig", conf.get(HIHOConf.INPUT_OUTPUT_LOADTO)); assertEquals("/tmp", conf.get(HIHOConf.INPUT_OUTPUT_LOADTO_PATH)); assertEquals("org.apache.hadoop.hive.jdbc.HiveDriver", conf.get(HIHOConf.HIVE_DRIVER)); assertEquals("jdbc:hive://localhost:10000/", conf.get(HIHOConf.HIVE_URL)); assertEquals("hive", conf.get(HIHOConf.HIVE_USR_NAME)); assertEquals("hive", conf.get(HIHOConf.HIVE_PASSWORD)); assertEquals("country:string:india", conf.get(HIHOConf.HIVE_PARTITION_BY)); assertEquals("true", conf.get(HIHOConf.HIVE_TABLE_OVERWRITE)); assertEquals("table", conf.get(HIHOConf.HIVE_TABLE_NAME)); assertEquals("id", conf.get(HIHOConf.HIVE_SORTED_BY)); assertEquals("country:2", conf.get(HIHOConf.HIVE_CLUSTERED_BY)); assertEquals("select * from student", conf.get(DBConfiguration.INPUT_QUERY)); assertEquals("select min(id), max(id) from student", conf.get(DBConfiguration.INPUT_BOUNDING_QUERY)); }
From source file:co.nubetech.hiho.job.TestExportToFTPServer.java
License:Apache License
@Test public void testPopulateConfiguration() { String[] args = new String[] { "-inputPath", "input", "-outputPath", "output", "-ftpUserName", "sgoyal", "-ftpAddress", "192.168.128.3", "-ftpPortNumper", "21", "-ftpPassword", "sonalgoyal123" }; ExportToFTPServer exportToFTPServer = new ExportToFTPServer(); Configuration conf = new Configuration(); exportToFTPServer.populateConfiguration(args, conf); assertEquals("sgoyal", conf.get(HIHOConf.FTP_USER)); assertEquals("192.168.128.3", conf.get(HIHOConf.FTP_ADDRESS)); assertEquals("21", conf.get(HIHOConf.FTP_PORT)); assertEquals("sonalgoyal123", conf.get(HIHOConf.FTP_PASSWORD)); }
From source file:co.nubetech.hiho.job.TestExportToMySQLDB.java
License:Apache License
@Test public void testPopulateConfiguration() { String[] args = new String[] { "-inputPath", "input", "-url", "jdbc:mysql://localhost:3306/hiho", "-userName", "root", "-password", "newpwd", "-querySuffix", "mrTest fields terminated by ','" }; ExportToMySQLDB exportToMySQLDB = new ExportToMySQLDB(); Configuration conf = new Configuration(); exportToMySQLDB.populateConfiguration(args, conf); assertEquals("jdbc:mysql://localhost:3306/hiho", conf.get(DBConfiguration.URL_PROPERTY)); assertEquals("root", conf.get(DBConfiguration.USERNAME_PROPERTY)); assertEquals("newpwd", conf.get(DBConfiguration.PASSWORD_PROPERTY)); assertEquals("mrTest fields terminated by ','", conf.get(HIHOConf.LOAD_QUERY_SUFFIX)); }
From source file:co.nubetech.hiho.job.TestExportToOracleDb.java
License:Apache License
@Test public void testAlterTableDMl() throws HIHOException, IOException { Configuration conf = mock(Configuration.class); Path path = mock(Path.class); FileStatus status1 = mock(FileStatus.class); Path path1 = mock(Path.class); when(path1.getName()).thenReturn("part-xxxxx"); when(status1.getPath()).thenReturn(path1); FileStatus status2 = mock(FileStatus.class); Path path2 = mock(Path.class); when(path2.getName()).thenReturn("part-yyyyy"); when(status2.getPath()).thenReturn(path2); FileSystem fs = mock(FileSystem.class); when(fs.listStatus(path)).thenReturn(new FileStatus[] { status1, status2 }); when(path.getFileSystem(conf)).thenReturn(fs); when(conf.get(HIHOConf.EXTERNAL_TABLE_DML)).thenReturn( "create table age( i Number, n Varchar(20), a Number)organization external ( type oracle_loader default directory ext_dir access parameters (records delimited by newlinefields terminated by ','missing field values are null )location (/home/nube/:file.txt) reject' limit unlimited;"); String dml = ExportToOracleDb.getAlterTableDML(path, conf); assertEquals(" ALTER TABLE age LOCATION ('part-xxxxx','part-yyyyy')", dml); }
From source file:co.nubetech.hiho.job.TestExportToOracleDb.java
License:Apache License
@Test public void testPopulateConfiguration() { String[] args = new String[] { "-inputPath", "input", "-oracleFtpAddress", "192.168.128.2", "-oracleFtpPortNumber", "21", "-oracleFtpUserName", "nube", "-oracleFtpPassword", "nube123", "-oracleExternalTableDirectory", "home/nube/age", "-driver", "oracle.jdbc.driver.OracleDriver", "-url", "jdbc:oracle:thin:@192.168.128.2:1521:nube", "-userName", "system", "-password", "nube", "-externalTable", createExternalTable }; ExportToOracleDb exportToOracleDb = new ExportToOracleDb(); Configuration conf = new Configuration(); exportToOracleDb.populateConfiguration(args, conf); assertEquals("192.168.128.2", conf.get(HIHOConf.ORACLE_FTP_ADDRESS)); assertEquals("21", conf.get(HIHOConf.ORACLE_FTP_PORT)); assertEquals("nube", conf.get(HIHOConf.ORACLE_FTP_USER)); assertEquals("nube123", conf.get(HIHOConf.ORACLE_FTP_PASSWORD)); assertEquals("home/nube/age", conf.get(HIHOConf.ORACLE_EXTERNAL_TABLE_DIR)); assertEquals("oracle.jdbc.driver.OracleDriver", conf.get(DBConfiguration.DRIVER_CLASS_PROPERTY)); assertEquals("jdbc:oracle:thin:@192.168.128.2:1521:nube", conf.get(DBConfiguration.URL_PROPERTY)); assertEquals("system", conf.get(DBConfiguration.USERNAME_PROPERTY)); assertEquals("nube", conf.get(DBConfiguration.PASSWORD_PROPERTY)); assertEquals(createExternalTable, conf.get(HIHOConf.EXTERNAL_TABLE_DML)); }
From source file:co.nubetech.hiho.mapreduce.lib.db.DBQueryInputFormat.java
License:Apache License
@Override protected RecordReader<LongWritable, GenericDBWritable> createDBRecordReader(DBInputSplit split, Configuration conf) throws IOException { DBConfiguration dbConf = getDBConf(); @SuppressWarnings("unchecked") // Class<T> inputClass = (Class<T>) (dbConf.getInputClass()); String dbProductName = getDBProductName(); logger.debug("Creating db record reader for db product: " + dbProductName); ArrayList params = null;//from w w w. j av a2 s.com try { if (conf.get(HIHOConf.QUERY_PARAMS) != null) { logger.debug("creating stringifier in DBQueryInputFormat"); DefaultStringifier<ArrayList> stringifier = new DefaultStringifier<ArrayList>(conf, ArrayList.class); logger.debug("created stringifier"); params = stringifier.fromString(conf.get(HIHOConf.QUERY_PARAMS)); logger.debug("created params"); } // use database product name to determine appropriate record reader. if (dbProductName.startsWith("MYSQL")) { // use MySQL-specific db reader. return new MySQLQueryRecordReader(split, conf, getConnection(), dbConf, dbConf.getInputConditions(), dbConf.getInputFieldNames(), dbConf.getInputTableName(), params); } else { // Generic reader. return new DBQueryRecordReader(split, conf, getConnection(), dbConf, dbConf.getInputConditions(), dbConf.getInputFieldNames(), dbConf.getInputTableName(), dbProductName, params); } } catch (SQLException ex) { throw new IOException(ex.getMessage()); } }
From source file:co.nubetech.hiho.mapreduce.lib.output.FTPTextOutputFormat.java
License:Apache License
@Override public RecordWriter<K, V> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException { Configuration conf = job.getConfiguration(); String ip = conf.get(HIHOConf.FTP_ADDRESS); String portno = conf.get(HIHOConf.FTP_PORT); String usr = conf.get(HIHOConf.FTP_USER); String pwd = conf.get(HIHOConf.FTP_PASSWORD); String dir = getOutputPath(job).toString(); System.out.println("\n\ninside ftpoutputformat" + ip + " " + portno + " " + usr + " " + pwd + " " + dir); String keyValueSeparator = conf.get("mapred.textoutputformat.separator", "\t"); FTPClient f = new FTPClient(); f.connect(ip, Integer.parseInt(portno)); f.login(usr, pwd);/*from ww w. jav a 2 s .c o m*/ f.changeWorkingDirectory(dir); f.setFileType(FTP.BINARY_FILE_TYPE); boolean isCompressed = getCompressOutput(job); CompressionCodec codec = null; String extension = ""; if (isCompressed) { Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, GzipCodec.class); codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf); extension = codec.getDefaultExtension(); } Path file = getDefaultWorkFile(job, extension); FileSystem fs = file.getFileSystem(conf); String filename = file.getName(); if (!isCompressed) { // FSDataOutputStream fileOut = fs.create(file, false); OutputStream os = f.appendFileStream(filename); DataOutputStream fileOut = new DataOutputStream(os); return new FTPLineRecordWriter<K, V>(fileOut, new String(keyValueSeparator), f); } else { // FSDataOutputStream fileOut = fs.create(file, false); OutputStream os = f.appendFileStream(filename); DataOutputStream fileOut = new DataOutputStream(os); return new FTPLineRecordWriter<K, V>(new DataOutputStream(codec.createOutputStream(fileOut)), keyValueSeparator, f); } }
From source file:co.nubetech.hiho.merge.MergeKeyMapper.java
License:Apache License
@Override protected void setup(Mapper.Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); InputSplit is = context.getInputSplit(); FileSplit fs = (FileSplit) is;/*from ww w .jav a 2 s .c om*/ Path splitPath = fs.getPath(); if (splitPath.toString().contains(conf.get(HIHOConf.MERGE_OLD_PATH))) { isOld = true; } else if (splitPath.toString().contains(conf.get(HIHOConf.MERGE_NEW_PATH))) { isOld = false; } else { throw new IOException("File " + splitPath + " is not under new path" + conf.get(HIHOConf.MERGE_NEW_PATH) + " and old path" + conf.get(HIHOConf.MERGE_OLD_PATH)); } }