List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name)
name
property, null
if no such property exists. From source file:co.nubetech.hiho.hive.HiveUtility.java
License:Apache License
public static String getLoadQuery(Configuration conf, String hdfsDir, GenericDBWritable writable) throws HIHOException { StringBuilder builder = new StringBuilder(); builder.append("LOAD DATA INPATH '"); builder.append(hdfsDir);/*w w w. ja v a 2s . com*/ builder.append("' OVERWRITE INTO TABLE `"); builder.append(getTableName(conf)); if (conf.get(HIHOConf.HIVE_PARTITION_BY) == null) { builder.append("`"); } return builder.toString(); }
From source file:co.nubetech.hiho.hive.HiveUtility.java
License:Apache License
public static void runQuery(String createQuery, String loadQuery, boolean createTable, Configuration conf) throws HIHOException { try {/* w ww .j a va 2 s . c om*/ Class.forName(conf.get(HIHOConf.HIVE_DRIVER)); Connection con = DriverManager.getConnection(conf.get(HIHOConf.HIVE_URL), conf.get(HIHOConf.HIVE_USR_NAME), conf.get(HIHOConf.HIVE_PASSWORD)); Statement stmt = con.createStatement(); // stmt.executeQuery("drop table " + tableName); if (createTable) { stmt.executeQuery("drop table " + getTableName(conf)); stmt.executeQuery(createQuery); } stmt.executeQuery(loadQuery); /* * ResultSet rs = stmt.executeQuery("select * from " + tableName); * while (rs.next()) { System.out.println(rs.getString(1) + "\t" + * rs.getString(2)); } */ stmt.close(); con.close(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (SQLException e) { e.printStackTrace(); } }
From source file:co.nubetech.hiho.hive.HiveUtility.java
License:Apache License
public static void runQuery(String tmpCreateQuery, String insertQuery, String createQuery, String loadQuery, Configuration conf, boolean createTable) throws HIHOException { try {/*from ww w. j a v a 2 s . c o m*/ Class.forName(conf.get(HIHOConf.HIVE_DRIVER)); Connection con = DriverManager.getConnection(conf.get(HIHOConf.HIVE_URL), conf.get(HIHOConf.HIVE_USR_NAME), conf.get(HIHOConf.HIVE_PASSWORD)); Statement stmt = con.createStatement(); stmt.executeQuery("drop table " + getTableName(conf) + "tmp"); stmt.executeQuery("drop table " + getTableName(conf)); stmt.executeQuery(tmpCreateQuery); stmt.executeQuery(loadQuery); /* * ResultSet rs2 = stmt.executeQuery("select * from " + tableName + * "tmp"); while (rs2.next()) { System.out.println(rs2.getString(1) * + "\t" + rs2.getString(2)); } */ if (createTable) { stmt.executeQuery(createQuery); } stmt.executeQuery(insertQuery); stmt.executeQuery("drop table " + getTableName(conf) + "tmp"); stmt.close(); con.close(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (SQLException e) { e.printStackTrace(); } }
From source file:co.nubetech.hiho.hive.TestHiveUtility.java
License:Apache License
@Test public void testGetInsertQuery() throws HIHOException { ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn"); ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn"); ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>(); columns.add(intColumn);//from w ww . j a v a 2s . c o m columns.add(stringColumn); // HiveUtility.tableName = "employee"; GenericDBWritable writable = new GenericDBWritable(columns, null); Configuration conf = new Configuration(); conf.set(HIHOConf.HIVE_PARTITION_BY, "country:string:us,name:string"); conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, "employee"); assertEquals( "FROM `employeetmp` tmp INSERT OVERWRITE TABLE `employee` PARTITION ( country='us',name) SELECT `tmp`.`intColumn`,`tmp`.`stringColumn`", HiveUtility.getInsertQueryFromTmpToMain(conf, writable, conf.get(HIHOConf.HIVE_PARTITION_BY))); }
From source file:co.nubetech.hiho.hive.TestHiveUtility.java
License:Apache License
@Test public void testGetLoadQuery() throws HIHOException { ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn"); ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn"); ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>(); columns.add(intColumn);//from ww w . j ava 2 s . c o m columns.add(stringColumn); // HiveUtility.tableName = "employee"; GenericDBWritable writable = new GenericDBWritable(columns, null); Configuration config = new Configuration(); // String partitionBy = "country:string"; String partitionBy1 = "country:string:us"; config.set(HIHOConf.INPUT_OUTPUT_PATH, "/user/nube/tableForHiho"); config.set(HIHOConf.HIVE_TABLE_NAME, "employee"); config.set(HIHOConf.HIVE_PARTITION_BY, "country:string:us"); assertEquals( "LOAD DATA INPATH '/user/nube/tableForHiho' OVERWRITE INTO TABLE `employee` PARTITION ( country='us')", HiveUtility.getLoadQuery(config, config.get(HIHOConf.INPUT_OUTPUT_PATH), writable, partitionBy1)); Configuration config1 = new Configuration(); String partitionBy = "country:string"; // String partitionBy1 = "country:string:us"; config1.set(HIHOConf.INPUT_OUTPUT_PATH, "/user/nube/tableForHiho"); config1.set(HIHOConf.HIVE_TABLE_NAME, "employee"); // config1.set(HIHOConf.HIVE_PARTITION_BY, "country:string:us"); assertEquals("LOAD DATA INPATH '/user/nube/tableForHiho' OVERWRITE INTO TABLE `employee`", HiveUtility.getLoadQuery(config1, config.get(HIHOConf.INPUT_OUTPUT_PATH), writable)); }
From source file:co.nubetech.hiho.job.DBQueryInputJob.java
License:Apache License
public void checkMandatoryConfs(Configuration conf) throws HIHOException { if (conf.get(DBConfiguration.DRIVER_CLASS_PROPERTY) == null) { throw new HIHOException("JDBC driver configuration is not specified,please specify JDBC driver class"); }/*w ww .j av a2s.c o m*/ if (conf.get(DBConfiguration.URL_PROPERTY) == null) { throw new HIHOException("JDBC url path configuration is empty,please specify JDBC url path"); } if (!conf.get(DBConfiguration.DRIVER_CLASS_PROPERTY).contains("hsqldb")) { if (conf.get(DBConfiguration.USERNAME_PROPERTY) == null) { throw new HIHOException("JDBC user name configuration is empty,please specify JDBC user name"); } if (conf.get(DBConfiguration.PASSWORD_PROPERTY) == null) { throw new HIHOException("JDBC password configuration is empty,please specify JDBC password"); } } if (conf.get(HIHOConf.INPUT_OUTPUT_PATH) == null) { throw new HIHOException("Output path is not specified,please specify output path"); } if (conf.get(HIHOConf.INPUT_OUTPUT_STRATEGY) != null && conf.get(HIHOConf.INPUT_OUTPUT_STRATEGY).equals("DELIMITED")) { if (conf.get(HIHOConf.INPUT_OUTPUT_DELIMITER) == null) { throw new HIHOException("Delimiter is not specified,please specify delimiter"); } } if (conf.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY) == null && conf.get(DBConfiguration.INPUT_QUERY) == null) { throw new HIHOException( "Input table name and input query both configurations are empty, please specify anyone of them"); } if (conf.get(DBConfiguration.INPUT_QUERY) != null && conf.get(DBConfiguration.INPUT_BOUNDING_QUERY) == null) { throw new HIHOException( "Please specify input bounding query as it is mandatory to be defined with input query "); } if (conf.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY) != null && conf.get(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY) == null) { conf.set(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, "*"); } if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO) != null && conf.get(HIHOConf.INPUT_OUTPUT_LOADTO_PATH) == null) { throw new HIHOException( "Load to path configuration is empty, please specify path to load script in loadTOPath configuration"); } if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO) != null && conf.get(HIHOConf.INPUT_OUTPUT_LOADTO).equals("hive")) { if (conf.get(HIHOConf.HIVE_URL) == null) { throw new HIHOException("The Hive url is not defined, please specify hive url"); } if (conf.get(HIHOConf.HIVE_DRIVER) == null) { throw new HIHOException("The Hive driver is not defined, please specify hive driver"); } if (checkForMultiplePartition(conf.get(HIHOConf.HIVE_PARTITION_BY)) && conf.get(HIHOConf.HIVE_TABLE_NAME) == null) { throw new HIHOException("please specify hive table name"); } } }
From source file:co.nubetech.hiho.job.DBQueryInputJob.java
License:Apache License
@Override public int run(String[] args) throws IOException { Configuration conf = getConf(); populateConfiguration(args, conf);//from ww w . j ava 2s . co m boolean isMultiplePartition = false; if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO) != null) { if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO).equals("hive")) { conf.set("hadoop.job.history.user.location", "none"); if (conf.get(HIHOConf.HIVE_PARTITION_BY) != null) { try { isMultiplePartition = checkForMultiplePartition(conf.get(HIHOConf.HIVE_PARTITION_BY)); } catch (HIHOException e) { e.printStackTrace(); } } } if (isMultiplePartition && conf.get(HIHOConf.INPUT_OUTPUT_LOADTO).equals("hive")) { populateHiveConfigurationForMultiplePartition(conf); } else { runJobs(conf, 0); } } else { runJobs(conf, 0); } return 0; }
From source file:co.nubetech.hiho.job.DBQueryInputJob.java
License:Apache License
private void generatePigScript(Configuration conf, Job job) throws HIHOException { // /see if import to pig or hive if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO).equals("pig")) { try {//w ww .ja v a 2 s .c o m String pigScript = PigUtility.getLoadScript(HIHOConf.INPUT_OUTPUT_PATH, getDBWritable(conf)); // //jobId File file = new File(new File(conf.get(HIHOConf.INPUT_OUTPUT_LOADTO_PATH)), "pigScript" + ".txt"); FileOutputStream fos = new FileOutputStream(file); BufferedWriter w = new BufferedWriter(new OutputStreamWriter(fos)); w.write(pigScript); w.close(); fos.close(); } catch (Exception h) { throw new HIHOException("Unable to generate Pig script", h); } } }
From source file:co.nubetech.hiho.job.DBQueryInputJob.java
License:Apache License
private void generateHiveScript(Configuration conf, Job job, int jobCounter) throws HIHOException { // /see if import to pig or hive if (conf.get(HIHOConf.INPUT_OUTPUT_LOADTO).equals("hive")) { try {/* w w w . j a v a 2 s. c o m*/ HiveUtility.createTable(conf, job, getDBWritable(conf), jobCounter); } catch (Exception h) { throw new HIHOException("Unable to generate Hive script", h); } } }
From source file:co.nubetech.hiho.job.DBQueryInputJob.java
License:Apache License
public static GenericDBWritable getDBWritable(Configuration conf) throws HIHOException { try {/*from w w w . j ava2 s. co m*/ String driverName = conf.get(DBConfiguration.DRIVER_CLASS_PROPERTY); String url = conf.get(DBConfiguration.URL_PROPERTY); String user = conf.get(DBConfiguration.USERNAME_PROPERTY); String password = conf.get(DBConfiguration.PASSWORD_PROPERTY); Class.forName(driverName).newInstance(); Connection conn = DriverManager.getConnection(url, user, password); DatabaseMetaData dbMeta = conn.getMetaData(); String dbProductName = dbMeta.getDatabaseProductName().toUpperCase(); String query = getSelectQuery(conf, dbProductName); PreparedStatement st = conn.prepareStatement(query); logger.warn("\n Query for GenericDBWritable is " + query); GenericDBWritable writable = new GenericDBWritable( GenericDBWritable.populateColumnInfo(st.getMetaData()), null); return writable; } catch (Exception e) { e.printStackTrace(); throw new HIHOException("Unable to get metadata for the query", e); } }