List of usage examples for java.sql Connection setTransactionIsolation
void setTransactionIsolation(int level) throws SQLException;
Connection
object to the one given. From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public int grantUserGroup(String groupName, String namelist, String user) throws MetaException { Connection con = null; Statement ps = null;//from ww w .j av a 2 s . c o m boolean success = false; groupName = groupName.toLowerCase(); namelist = namelist.toLowerCase(); user = user.toLowerCase(); try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("grant user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("grant user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); ps = con.createStatement(); String sql = "select group_name from tdwuser where user_name='" + namelist + "'"; boolean isUserFind = false; String oldGroupName = null; ResultSet groupNameSet = ps.executeQuery(sql); while (groupNameSet.next()) { isUserFind = true; oldGroupName = groupNameSet.getString(1); break; } groupNameSet.close(); if (!isUserFind) { LOG.error("Can not find user group:" + groupName); return 1; } if (groupName.equalsIgnoreCase(oldGroupName)) { LOG.error("grant user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + " group name is same to old group name"); return 2; } sql = "select creator from usergroup where group_name='" + groupName + "'"; boolean isNewGroupFind = false; String newGroupCreator = null; ResultSet groupCreatorSet = ps.executeQuery(sql); while (groupCreatorSet.next()) { isNewGroupFind = true; newGroupCreator = groupCreatorSet.getString(1); break; } groupCreatorSet.close(); if (!isNewGroupFind) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 4; } if (!newGroupCreator.equalsIgnoreCase(user) && !user.equalsIgnoreCase("root")) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 5; } sql = "update tdwuser set group_name='" + groupName.toLowerCase() + "' where user_name='" + namelist.toLowerCase() + "'"; ps.executeUpdate(sql); con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("grant user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) { return 0; } else { return 6; } }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public int revokeUserGroup(String groupName, String namelist, String user) throws MetaException { Connection con = null; Statement ps = null;/*from w ww . java2s. c om*/ boolean success = false; groupName = groupName.toLowerCase(); namelist = namelist.toLowerCase(); user = user.toLowerCase(); try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); ps = con.createStatement(); String sql = "select group_name from tdwuser where user_name='" + namelist + "'"; boolean isUserFind = false; String oldGroupName = null; ResultSet groupNameSet = ps.executeQuery(sql); while (groupNameSet.next()) { isUserFind = true; oldGroupName = groupNameSet.getString(1); break; } groupNameSet.close(); if (!isUserFind) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 1; } if (!groupName.equalsIgnoreCase(oldGroupName)) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 2; } sql = "select creator from usergroup where group_name='" + groupName + "'"; boolean isNewGroupFind = false; String newGroupCreator = null; ResultSet groupCreatorSet = ps.executeQuery(sql); while (groupCreatorSet.next()) { isNewGroupFind = true; newGroupCreator = groupCreatorSet.getString(1); break; } groupCreatorSet.close(); if (!isNewGroupFind) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 3; } if (!newGroupCreator.equalsIgnoreCase(user) && !user.equalsIgnoreCase("root")) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user); return 5; } sql = "update tdwuser set group_name='" + HiveMetaStore.DEFAULT + "' where user_name='" + namelist.toLowerCase() + "'"; ps.executeUpdate(sql); con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("revoke user group error, groupName=" + groupName + ", namelist=" + namelist + ", user=" + user + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) { return 0; } else { return 6; } }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public void addCols(String dbName, String tblName, String modifyUser, List<FieldSchema> newCols) throws InvalidOperationException, MetaException, InvalidObjectException { if (!MetaStoreUtils.validateColNames(newCols)) { throw new InvalidObjectException("new add columns name is not valid object"); }/*from w w w .jav a2 s . c om*/ Connection con = null; PreparedStatement ps = null; boolean success = false; dbName = dbName.toLowerCase(); tblName = tblName.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); ps = con.prepareStatement("select tbl_id, tbl_type, tbl_format, serde_lib" + " from tbls where tbls.db_name=? and tbls.tbl_name=? "); ps.setString(1, dbName); ps.setString(2, tblName); String tblType = null; String serdeLib = null; String tblFormat = null; boolean isTblFind = false; long tblID = 0; ResultSet tblSet = ps.executeQuery(); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); tblType = tblSet.getString(2); tblFormat = tblSet.getString(3); serdeLib = tblSet.getString(4); } tblSet.close(); ps.close(); if (!isTblFind) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new MetaException("can not find table " + dbName + ":" + tblName); } if (tblFormat == null || tblFormat.isEmpty()) { tblFormat = "text"; } if (tblType.equalsIgnoreCase("VITURAL_VIEW")) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new InvalidOperationException("view can not add cloumns"); } if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && !HiveConf.getBoolVar(hiveConf, ConfVars.ALTERSCHEMAACTIVATEEXTTABLE)) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new InvalidOperationException("can not add columns for a extenal table "); } if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat.equalsIgnoreCase("text") && (!HiveConf.getBoolVar(hiveConf, ConfVars.ALTERSCHEMAACTIVATETXTTABLE))) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new InvalidOperationException("can not add columns for a text format table "); } if (serdeLib != null && serdeLib.equals(ProtobufSerDe.class.getName())) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new InvalidOperationException("can not add columns for a pb table "); } if (tblFormat != null && (tblFormat.equalsIgnoreCase("column") || tblFormat.equalsIgnoreCase("format"))) { for (FieldSchema field : newCols) { if (field.getType().equals(Constants.BOOLEAN_TYPE_NAME)) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new InvalidOperationException( "format file or column file not support boolean type rightnow"); } } } Map<String, String> tblParamMap = new HashMap<String, String>(); ps = con.prepareStatement( "select param_key, param_value from table_params where tbl_id=? and param_type='TBL'"); ps.setLong(1, tblID); ResultSet paramSet = ps.executeQuery(); while (paramSet.next()) { tblParamMap.put(paramSet.getString(1), paramSet.getString(2)); } paramSet.close(); ps.close(); boolean containTime = false; boolean contailUser = false; if (tblParamMap.containsKey("last_modified_time")) containTime = true; if (tblParamMap.containsKey("last_modified_by")) contailUser = true; if (containTime && contailUser) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, String.valueOf(System.currentTimeMillis() / 1000)); ps.setLong(2, tblID); ps.setString(3, "last_modified_time"); ps.addBatch(); ps.setString(1, modifyUser); ps.setLong(2, tblID); ps.setString(3, "last_modified_by"); ps.addBatch(); ps.executeBatch(); ps.close(); } else if (!containTime && !contailUser) { ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_time"); ps.setString(4, String.valueOf(System.currentTimeMillis() / 1000)); ps.addBatch(); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_by"); ps.setString(4, modifyUser); ps.addBatch(); ps.executeBatch(); ps.close(); } else if (containTime && !contailUser) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, String.valueOf(System.currentTimeMillis() / 1000)); ps.setLong(2, tblID); ps.setString(3, "last_modified_time"); ps.addBatch(); ps.executeBatch(); ps.close(); ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_by"); ps.setString(4, modifyUser); ps.addBatch(); ps.executeBatch(); ps.close(); } else { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, modifyUser); ps.setLong(2, tblID); ps.setString(3, "last_modified_by"); ps.addBatch(); ps.executeBatch(); ps.close(); ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_time"); ps.setString(4, String.valueOf(System.currentTimeMillis() / 1000)); ps.addBatch(); ps.executeBatch(); ps.close(); } if (serdeLib != null && serdeLib.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { ps = con.prepareStatement("delete from columns where tbl_id=?"); ps.setLong(1, tblID); ps.executeUpdate(); ps = con.prepareStatement( "insert into columns(column_index, tbl_id, column_name, type_name, comment)" + " values(?,?,?,?,?)"); long index = 0; for (FieldSchema field : newCols) { ps.setLong(1, index); ps.setLong(2, tblID); ps.setString(3, field.getName().toLowerCase()); ps.setString(4, field.getType()); ps.setString(5, field.getComment()); ps.addBatch(); index++; } ps.executeBatch(); ps.close(); ps = con.prepareStatement("update tbls set serde_lib=? where tbl_id=?"); ps.setString(1, LazySimpleSerDe.class.getName()); ps.setLong(2, tblID); ps.executeUpdate(); ps.close(); } else { Map<String, Long> colNameMap = new HashMap<String, Long>(); long maxColIndex = 0; ps = con.prepareStatement("select column_name, column_index from " + "columns where tbl_id=? order by column_index asc"); ps.setLong(1, tblID); ResultSet colSet = ps.executeQuery(); while (colSet.next()) { maxColIndex = colSet.getLong(2); colNameMap.put(colSet.getString(1), maxColIndex); } colSet.close(); ps.close(); ps = con.prepareStatement( "insert into columns(column_index, tbl_id, column_name, type_name, comment)" + " values(?,?,?,?,?)"); for (FieldSchema field : newCols) { if (colNameMap.containsKey(field.getName())) { LOG.error("add column error, db=" + dbName + ", tbl=" + tblName); throw new MetaException("column name conflict, conflict column name is " + field.getName()); } ps.setLong(1, maxColIndex + 1); ps.setLong(2, tblID); ps.setString(3, field.getName().toLowerCase()); ps.setString(4, field.getType()); ps.setString(5, field.getComment()); maxColIndex++; ps.addBatch(); } ps.executeBatch(); ps.close(); } con.commit(); success = true; } catch (SQLException ex) { throw new MetaException(ex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } return; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public boolean updatePBInfo(String dbName, String tableName, String modifiedTime) throws InvalidOperationException, MetaException { Connection con = null; PreparedStatement ps = null;/* w w w.j a v a 2 s .c o m*/ boolean success = false; dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); String jarName = "./auxlib/" + dbName + "_" + tableName + "_" + modifiedTime + ".jar"; String className = dbName + "_" + tableName + "_" + modifiedTime; try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("updatePBInfo, db=" + dbName + ", table=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("replace column error, db=" + dbName + ", table=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); ps = con.prepareStatement("select tbl_id, serde_lib" + " from tbls where db_name=? and tbl_name=?"); ps.setString(1, dbName); ps.setString(2, tableName); String serdeLib = null; boolean isTblFind = false; long tblID = 0; ResultSet tblSet = ps.executeQuery(); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); serdeLib = tblSet.getString(2); } tblSet.close(); ps.close(); if (!isTblFind) { throw new MetaException("can not find table " + dbName + ":" + tableName); } if (!serdeLib.equals(ProtobufSerDe.class.getName())) { throw new MetaException("sorry, can only update jar info for a pb table "); } Map<String, String> tblParamMap = new HashMap<String, String>(); ps = con.prepareStatement( "select param_key, param_value from table_params where tbl_id=? and param_type='TBL'"); ps.setLong(1, tblID); ResultSet paramSet = ps.executeQuery(); while (paramSet.next()) { tblParamMap.put(paramSet.getString(1), paramSet.getString(2)); } paramSet.close(); ps.close(); boolean containJar = false; boolean containClass = false; if (tblParamMap.containsKey("pb.jar")) containJar = true; if (tblParamMap.containsKey("pb.outer.class.name")) containClass = true; if (containJar && containClass) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, jarName); ps.setLong(2, tblID); ps.setString(3, "pb.jar"); ps.addBatch(); ps.setString(1, className); ps.setLong(2, tblID); ps.setString(3, "pb.outer.class.name"); ps.addBatch(); ps.executeBatch(); ps.close(); } con.commit(); success = true; } catch (SQLException ex) { ex.printStackTrace(); LOG.error("updatePBInfo, db=" + dbName + ", tbl=" + tableName + ", msg=" + ex.getMessage()); throw new MetaException(ex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } return true; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public Table getTable(String dbName, String tableName) throws MetaException, NoSuchObjectException { boolean success = false; Connection con; Statement ps = null;/* w w w . ja va2s. co m*/ Table tbl = new Table(); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); try { con = getSegmentConnectionForRead(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get table error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get table error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "SELECT tbl_id, create_time" + ", is_compressed, retention, tbl_type, db_name, tbl_name, tbl_owner " + ", tbl_format, pri_part_type, sub_part_type, pri_part_key, sub_part_key " + ", input_format, output_format, serde_name, serde_lib, tbl_location, tbl_comment " + " from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); boolean isTblFind = false; StorageDescriptor sd = null; SerDeInfo sdInfo = null; String priPartKey = null; String subPartKey = null; Partition priPart = null; Partition subPart = null; long tblID = 0; String comment = null; String format = null; Timestamp createTime = null; String tblType = null; boolean hasPriPart = false; boolean hasSubPart = false; while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); createTime = tblSet.getTimestamp(2); if (createTime != null) { tbl.setCreateTime((int) (createTime.getTime() / 1000)); } sd = new StorageDescriptor(); sdInfo = new SerDeInfo(); sd.setCompressed(tblSet.getBoolean(3)); tbl.setRetention((int) tblSet.getLong(4)); tblType = tblSet.getString(5); tbl.setTableType(tblType); tbl.setDbName(tblSet.getString(6)); tbl.setTableName(tblSet.getString(7)); tbl.setOwner(tblSet.getString(8)); format = tblSet.getString(9); priPartKey = tblSet.getString(12); subPartKey = tblSet.getString(13); if (priPartKey != null && !priPartKey.isEmpty()) { hasPriPart = true; priPart = new Partition(); priPart.setLevel(0); priPart.setDbName(tblSet.getString(6)); priPart.setTableName(tblSet.getString(7)); priPart.setParType(tblSet.getString(10)); } if (subPartKey != null && !subPartKey.isEmpty()) { hasSubPart = true; subPart = new Partition(); subPart.setLevel(1); subPart.setDbName(tblSet.getString(6)); subPart.setTableName(tblSet.getString(7)); subPart.setParType(tblSet.getString(11)); } sd.setInputFormat(tblSet.getString(14)); sd.setOutputFormat(tblSet.getString(15)); sdInfo.setName(tblSet.getString(16)); sdInfo.setSerializationLib(tblSet.getString(17)); sd.setLocation(tblSet.getString(18)); comment = tblSet.getString(19); break; } tblSet.close(); if (!isTblFind) { LOG.error(dbName + "." + tableName + " table not found"); throw new NoSuchObjectException(dbName + "." + tableName + " table not found"); } List<FieldSchema> fieldList = new ArrayList<FieldSchema>(); Map<String, FieldSchema> fieldMap = new LinkedHashMap<String, FieldSchema>(); sql = "SELECT column_name, type_name, comment from columns where tbl_id=" + tblID + " order by column_index asc"; ResultSet colSet = ps.executeQuery(sql); while (colSet.next()) { FieldSchema field = new FieldSchema(); field.setName(colSet.getString(1)); field.setType(colSet.getString(2)); field.setComment(colSet.getString(3)); fieldList.add(field); fieldMap.put(colSet.getString(1), field); } colSet.close(); sd.setCols(fieldList); sql = "SELECT param_type, param_key, param_value from table_params where tbl_id=" + tblID; ResultSet paramSet = ps.executeQuery(sql); Map<String, String> tblParamMap = new HashMap<String, String>(); Map<String, String> sdParamMap = new HashMap<String, String>(); Map<String, String> serdeParam = new HashMap<String, String>(); while (paramSet.next()) { String type = paramSet.getString(1); if (type == null) continue; if (type.equalsIgnoreCase("sd")) { sdParamMap.put(paramSet.getString(2), paramSet.getString(3)); } else if (type.equalsIgnoreCase("serde")) { serdeParam.put(paramSet.getString(2), paramSet.getString(3)); } else if (type.equalsIgnoreCase("tbl")) { tblParamMap.put(paramSet.getString(2), paramSet.getString(3)); } else { tblParamMap.put(paramSet.getString(2), paramSet.getString(3)); } } paramSet.close(); if (comment != null && !comment.isEmpty()) { tblParamMap.put("comment", comment); } if (format != null && !format.isEmpty()) { tblParamMap.put("type", format); } tbl.setParameters(tblParamMap); sd.setParameters(sdParamMap); sdInfo.setParameters(serdeParam); List<String> bucketCols = new ArrayList<String>(); sql = "select bucket_col_name from bucket_cols where tbl_id=" + tblID + " order by col_index asc"; ResultSet bucketSet = ps.executeQuery(sql); while (bucketSet.next()) { bucketCols.add(bucketSet.getString(1)); } bucketSet.close(); if (bucketCols.size() > 0) { sd.setBucketCols(bucketCols); String numBucketStr = sd.getParameters().get("NUM_BUCKETS"); if (numBucketStr == null) { sd.setNumBuckets(-1); } else { sd.setNumBuckets(Integer.valueOf(numBucketStr)); } } else { sd.setBucketCols(bucketCols); sd.setNumBuckets(-1); } sd.getParameters().remove("NUM_BUCKETS"); List<Order> sortCols = new ArrayList<Order>(); sql = "select sort_column_name, sort_order from sort_cols where tbl_id=" + tblID + " order by col_index asc"; ResultSet sortSet = ps.executeQuery(sql); while (sortSet.next()) { Order o = new Order(); o.setCol(sortSet.getString(1)); o.setOrder(sortSet.getInt(2)); sortCols.add(o); } sortSet.close(); sd.setSortCols(sortCols); sd.setSerdeInfo(sdInfo); tbl.setSd(sd); if (hasPriPart) { sql = "SELECT level, part_name, part_values from PARTITIONS where tbl_id=" + tblID; ResultSet partSet = ps.executeQuery(sql); Map<String, List<String>> priPartSpace = new LinkedHashMap<String, List<String>>(); Map<String, List<String>> subPartSpace = new LinkedHashMap<String, List<String>>(); while (partSet.next()) { int level = partSet.getInt(1); switch (level) { case 0: String priName = partSet.getString(2); List<String> priValueList = new ArrayList<String>(); Array priSpaceArray = partSet.getArray(3); if (priSpaceArray != null) { ResultSet priValueSet = priSpaceArray.getResultSet(); while (priValueSet.next()) { priValueList.add(priValueSet.getString(2)); } } priPartSpace.put(priName, priValueList); break; case 1: String subName = partSet.getString(2); List<String> subValueList = new ArrayList<String>(); Array subSpaceArray = partSet.getArray(3); if (subSpaceArray != null) { ResultSet subValueSet = subSpaceArray.getResultSet(); while (subValueSet.next()) { subValueList.add(subValueSet.getString(2)); } } subPartSpace.put(subName, subValueList); break; default: break; } } partSet.close(); priPart.setParSpaces(priPartSpace); priPart.setParKey(fieldMap.get(priPartKey.toLowerCase())); if (hasSubPart) { subPart.setParSpaces(subPartSpace); subPart.setParKey(fieldMap.get(subPartKey.toLowerCase())); } } tbl.setPriPartition(priPart); tbl.setSubPartition(subPart); if (tblType.equalsIgnoreCase("VIRTUAL_VIEW")) { sql = "select view_original_text, view_expanded_text, vtables from " + " tdwview where tbl_id=" + tblID; ResultSet viewSet = ps.executeQuery(sql); while (viewSet.next()) { tbl.setViewOriginalText(viewSet.getString(1)); tbl.setViewExpandedText(viewSet.getString(2)); tbl.setVtables(viewSet.getString(3)); break; } } con.commit(); success = true; } catch (SQLException sqlex) { sqlex.printStackTrace(); LOG.error("get table error, db=" + dbName + ", tbl=" + tableName + ", msg=" + sqlex.getMessage()); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return tbl; else return null; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public Map<String, String> getTblsByPriv(String user, String passwd, String db) throws NoSuchObjectException, MetaException { Connection con = null; ;//w w w . ja v a 2 s. c o m Statement ps = null; Set<String> tempSet = new HashSet<String>(); List<String> dbs = new ArrayList<String>(); user = user.toLowerCase(); db = db.toLowerCase(); boolean isDbPriv = false; Map<String, String> tblMap = getTableWithType(db, "*"); Map<String, String> tblMapPriv = new TreeMap<String, String>(); if (tblMap.isEmpty()) { return new TreeMap<String, String>(); } try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("get tbls by priv error, msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get tbls by priv error, msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select db_name from dbpriv where user_name='" + user + "' and select_priv=true and db_name='" + db + "'"; ResultSet dbSet = ps.executeQuery(sql); while (dbSet.next()) { isDbPriv = true; } dbSet.close(); if (!isDbPriv) { sql = "select db_name from tdwuserrole, dbpriv where tdwuserrole.user_name='" + user + "' and tdwuserrole.role_name=dbpriv.user_name and dbpriv.select_priv=true and db_name='" + db + "'"; dbSet = ps.executeQuery(sql); while (dbSet.next()) { isDbPriv = true; } dbSet.close(); } if (isDbPriv) { return tblMap; } else { sql = "select tbl_name from tblpriv where user_name='" + user + "' and select_priv=true and db_name='" + db + "'"; ResultSet tblSet = ps.executeQuery(sql); String tblName = null; String type = null; while (tblSet.next()) { tblName = tblSet.getString(1); type = tblMap.get(tblName); if (type != null) { tblMapPriv.put(tblName, type); } } tblSet.close(); sql = "select a.tbl_name from tblpriv a, tdwuserrole b " + "where a.user_name=b.role_name and b.user_name='" + user + "' and a.select_priv=true and a.db_name='" + db + "'"; tblSet = ps.executeQuery(sql); tblName = null; type = null; while (tblSet.next()) { tblName = tblSet.getString(1); type = tblMap.get(tblName); if (type != null) { tblMapPriv.put(tblName, type); } } tblSet.close(); return tblMapPriv; } } catch (SQLException sqlex) { LOG.error("get db by priv error, msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public Partition getPartition(String dbName, String tableName, int level) throws MetaException { boolean success = false; Connection con = null; Statement ps = null;//from ww w . j ava2s . c o m Partition part = null; dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); Map<String, List<String>> partNameMap = new LinkedHashMap<String, List<String>>(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get partition error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get partition error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); long tblID = 0; boolean isTblFind = false; String priPartType = null; String subPartType = null; boolean hasPriPart = false; boolean hasSubPart = false; String priPartKey = null; String subPartKey = null; String partKey = null; String sql = "SELECT tbl_id, pri_part_type, pri_part_key, sub_part_type, sub_part_key from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartType = tblSet.getString(2); priPartKey = tblSet.getString(3); subPartType = tblSet.getString(4); subPartKey = tblSet.getString(5); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } if (hasPriPart && level == 0) { part = new Partition(); part.setParType(priPartType); partKey = priPartKey; break; } if (hasSubPart && level == 1) { part = new Partition(); part.setParType(subPartType); partKey = subPartKey; break; } con.commit(); return null; } tblSet.close(); if (!isTblFind) { throw new MetaException("can not find table " + dbName + ":" + tableName); } FieldSchema field = null; sql = "select type_name, comment from columns where tbl_id=" + tblID + " and column_name='" + partKey + "'"; ResultSet colSet = ps.executeQuery(sql); while (colSet.next()) { field = new FieldSchema(); field.setType(colSet.getString(1)); field.setComment(colSet.getString(2)); field.setName(partKey); break; } colSet.close(); sql = "select part_name, part_values from partitions where tbl_id=" + tblID + " and level=" + level; ResultSet partSet = ps.executeQuery(sql); while (partSet.next()) { String partName = partSet.getString(1); List<String> valueList = new ArrayList<String>(); Array spaceArray = partSet.getArray(2); ResultSet priValueSet = spaceArray.getResultSet(); if (priValueSet != null) { while (priValueSet.next()) { valueList.add(priValueSet.getString(2)); } } partNameMap.put(partName, valueList); } partSet.close(); part.setParSpaces(partNameMap); part.setDbName(dbName); part.setTableName(tableName); part.setLevel(level); part.setParKey(field); con.commit(); success = true; } catch (SQLException sqlex) { sqlex.printStackTrace(); LOG.error("get partition error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + sqlex.getMessage()); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return part; else return null; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public void addSerde(String dbName, String tblName, AddSerdeDesc addSerdeDesc) throws InvalidOperationException, MetaException { Connection con; PreparedStatement ps = null;//from w ww . j a va2s. c o m boolean success = false; dbName = dbName.toLowerCase(); tblName = tblName.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("add serde error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("add serde error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); ps = con.prepareStatement( "select tbl_id, is_compressed, input_format, output_format, serde_lib, tbl_location " + " from tbls where db_name=? and tbl_name=?"); ps.setString(1, dbName); ps.setString(2, tblName); boolean isTblFind = false; long tblID = 0; String serdeLib = null; String inputFormat = null; String location = null; String outputFormat = null; boolean isCompressed = false; Properties schema = new Properties(); ResultSet tblSet = ps.executeQuery(); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); isCompressed = tblSet.getBoolean(2); inputFormat = tblSet.getString(3); outputFormat = tblSet.getString(4); location = tblSet.getString(6); break; } serdeLib = addSerdeDesc.getSerdeName(); tblSet.close(); ps.close(); if (!isTblFind) { throw new MetaException("can not find table " + dbName + ":" + tblName); } if (inputFormat == null || inputFormat.length() == 0) { inputFormat = org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName(); } schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT, inputFormat); if (outputFormat == null || outputFormat.length() == 0) { outputFormat = org.apache.hadoop.mapred.SequenceFileOutputFormat.class.getName(); } schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_OUTPUT_FORMAT, outputFormat); schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, tblName); if (location != null) { schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_LOCATION, location); } schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.BUCKET_COUNT, "0"); if (isCompressed) { schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.COMPRESS, "true"); } else { } if (serdeLib == null) { throw new MetaException("serde lib for the table " + dbName + ":" + tblName + " is null"); } if (serdeLib != null) { schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB, serdeLib); } String modifyUser = addSerdeDesc.getUser(); Map<String, String> tblParamMap = new HashMap<String, String>(); ps = con.prepareStatement( "select param_key, param_value from table_params where tbl_id=? and param_type='TBL'"); ps.setLong(1, tblID); ResultSet paramSet = ps.executeQuery(); while (paramSet.next()) { tblParamMap.put(paramSet.getString(1), paramSet.getString(2)); } paramSet.close(); ps.close(); boolean containTime = false; boolean contailUser = false; if (tblParamMap.containsKey("last_modified_time")) containTime = true; if (tblParamMap.containsKey("last_modified_by")) contailUser = true; if (containTime && contailUser) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, String.valueOf(System.currentTimeMillis() / 1000)); ps.setLong(2, tblID); ps.setString(3, "last_modified_time"); ps.addBatch(); ps.setString(1, modifyUser); ps.setLong(2, tblID); ps.setString(3, "last_modified_by"); ps.addBatch(); ps.executeBatch(); ps.close(); } else if (!containTime && !contailUser) { ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_time"); ps.setString(4, String.valueOf(System.currentTimeMillis() / 1000)); ps.addBatch(); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_by"); ps.setString(4, modifyUser); ps.addBatch(); ps.executeBatch(); ps.close(); } else if (containTime && !contailUser) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, String.valueOf(System.currentTimeMillis() / 1000)); ps.setLong(2, tblID); ps.setString(3, "last_modified_time"); ps.addBatch(); ps.executeBatch(); ps.close(); ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_by"); ps.setString(4, modifyUser); ps.addBatch(); ps.executeBatch(); ps.close(); } else { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='TBL' and param_key=?"); ps.setString(1, modifyUser); ps.setLong(2, tblID); ps.setString(3, "last_modified_by"); ps.addBatch(); ps.executeBatch(); ps.close(); ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) " + " values(?,?,?,?)"); ps.setLong(1, tblID); ps.setString(2, "TBL"); ps.setString(3, "last_modified_time"); ps.setString(4, String.valueOf(System.currentTimeMillis() / 1000)); ps.addBatch(); ps.executeBatch(); ps.close(); } ps = con.prepareStatement( "select param_key, param_value from table_params where tbl_id=? and (param_type='SERDE' or param_type='TBL')"); ps.setLong(1, tblID); ResultSet sdParamSet = ps.executeQuery(); while (sdParamSet.next()) { schema.setProperty(sdParamSet.getString(1), sdParamSet.getString(2)); } sdParamSet.close(); ps.close(); ps = con.prepareStatement( "select column_name, type_name, comment from columns where tbl_id=? order by column_index asc"); ps.setLong(1, tblID); StringBuilder colNameBuf = new StringBuilder(); StringBuilder colTypeBuf = new StringBuilder(); List<FieldSchema> colList = new ArrayList<FieldSchema>(); ResultSet colSet = ps.executeQuery(); boolean first = true; while (colSet.next()) { String name = colSet.getString(1); String type = colSet.getString(2); String comment = colSet.getString(3); FieldSchema field = new FieldSchema(); field.setName(name); field.setType(type); field.setComment(comment); colList.add(field); if (!first) { colNameBuf.append(","); colTypeBuf.append(":"); } colNameBuf.append(colSet.getString(1)); colTypeBuf.append(colSet.getString(2)); first = false; } colSet.close(); ps.close(); String colNames = colNameBuf.toString(); String colTypes = colTypeBuf.toString(); schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS, colNames); schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMN_TYPES, colTypes); schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, getDDLFromFieldSchema(tblName, colList)); Deserializer deserializer = SerDeUtils.lookupDeserializer(serdeLib); deserializer.initialize(hiveConf, schema); List<FieldSchema> newColList = null; try { newColList = MetaStoreUtils.getFieldsFromDeserializer(tblName, deserializer); } catch (SerDeException e) { throw new MetaException("Error in getting fields from serde. " + e.getMessage()); } catch (MetaException e) { throw new MetaException("Error in getting fields from serde." + e.getMessage()); } ps = con.prepareStatement("delete from columns where tbl_id=?"); ps.setLong(1, tblID); ps.executeUpdate(); ps.close(); ps = con.prepareStatement("insert into columns(column_index, tbl_id, column_name " + ",type_name, comment) values(?,?,?,?,?)"); long index = 0; for (FieldSchema field : newColList) { ps.setLong(1, index); ps.setLong(2, tblID); ps.setString(3, field.getName()); ps.setString(4, field.getType()); ps.setString(5, field.getComment()); ps.addBatch(); index++; } ps.executeBatch(); ps.close(); if ((addSerdeDesc.getProps() != null) && (addSerdeDesc.getProps().size() > 0)) { ps = con.prepareStatement("select param_key, param_value from table_params where tbl_id=? and " + "param_type='SERDE'"); ps.setLong(1, tblID); ResultSet oldParamSet = ps.executeQuery(); Map<String, String> needUpdateMap = new HashMap<String, String>(); Map<String, String> needAddMap = new HashMap<String, String>(); Map<String, String> oldParamMap = new HashMap<String, String>(); while (oldParamSet.next()) { oldParamMap.put(oldParamSet.getString(1), oldParamSet.getString(2)); } oldParamSet.close(); ps.close(); for (Map.Entry<String, String> entry : addSerdeDesc.getProps().entrySet()) { if (oldParamMap.containsKey(entry.getKey())) { needUpdateMap.put(entry.getKey(), entry.getValue()); } else { needAddMap.put(entry.getKey(), entry.getValue()); } } if (!needAddMap.isEmpty()) { ps = con.prepareStatement( "insert into table_params(tbl_id, param_key, param_value, param_type) values(?,?,?,?)"); for (Map.Entry<String, String> entry : needAddMap.entrySet()) { ps.setLong(1, tblID); ps.setString(2, entry.getKey()); ps.setString(3, entry.getValue()); ps.setString(4, "SERDE"); ps.addBatch(); } ps.executeBatch(); ps.close(); } if (!needUpdateMap.isEmpty()) { ps = con.prepareStatement( "update table_params set param_value=? where tbl_id=? and param_type='SERDE' and param_key=?"); for (Map.Entry<String, String> entry : needUpdateMap.entrySet()) { ps.setString(1, entry.getValue()); ps.setLong(2, tblID); ps.setString(3, entry.getKey()); ps.addBatch(); } ps.executeBatch(); ps.close(); } } ps = con.prepareStatement("update tbls set serde_lib=? where tbl_id=?"); ps.setString(1, addSerdeDesc.getSerdeName()); ps.setLong(2, tblID); ps.executeUpdate(); ps.close(); con.commit(); success = true; } catch (Exception ex) { LOG.error("add serde error, db=" + dbName + ", tbl=" + tblName + ", msg=" + ex.getMessage()); ex.printStackTrace(); throw new MetaException(ex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } return; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<List<String>> getPartitionNames(String dbName, String tableName) throws MetaException { boolean success = false; Connection con = null; Statement ps = null;/*from w w w .j av a 2 s . com*/ List<List<String>> ret = new ArrayList<List<String>>(); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); List<String> priPartNames = new ArrayList<String>(); List<String> subPartNames = new ArrayList<String>(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error( "get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error( "get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); long tblID = 0; boolean isTblFind = false; String priPartType = null; String subPartType = null; boolean hasPriPart = false; boolean hasSubPart = false; String sql = "SELECT tbl_id, pri_part_type, sub_part_type from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartType = tblSet.getString(2); subPartType = tblSet.getString(3); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } if (!hasPriPart) { throw new MetaException("get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=table is not a partition table"); } } tblSet.close(); if (!isTblFind) { LOG.error("get partition names error, db=" + dbName + ", tbl=" + tableName); throw new MetaException("can not find table " + dbName + ":" + tableName); } sql = "select part_name, level from PARTITIONS where tbl_id=" + tblID; ResultSet partSet = ps.executeQuery(sql); while (partSet.next()) { String partName = partSet.getString(1); int level = partSet.getInt(2); if (level == 0) { priPartNames.add(partName); } else if (level == 1) { subPartNames.add(partName); } } if (hasPriPart) { if (priPartType.equalsIgnoreCase("hash")) { int numOfHashPar = hiveConf.getInt("hive.hashPartition.num", 500); ret.add(new ArrayList()); ret.get(0).add("hash(" + numOfHashPar + ")"); } else { if (priPartNames.contains("default")) { priPartNames.remove("default"); priPartNames.add("default"); } ret.add(priPartNames); } } if (hasSubPart) { if (subPartType.equalsIgnoreCase("hash")) { int numOfHashPar = hiveConf.getInt("hive.hashPartition.num", 500); ret.add(new ArrayList()); ret.get(1).add("hash(" + numOfHashPar + ")"); } else { if (subPartNames.contains("default")) { subPartNames.remove("default"); subPartNames.add("default"); } ret.add(subPartNames); } } else { ret.add(subPartNames); } con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return ret; else return null; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<String> getPartitionNames(String dbName, String tableName, int level) throws MetaException { boolean success = false; Connection con = null; Statement ps = null;// w w w .j a v a 2s . co m Partition part = null; dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); List<String> ret = new ArrayList<String>(); Map<String, List<String>> partNameMap = new LinkedHashMap<String, List<String>>(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get partition name error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get partition name error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); long tblID = 0; boolean isTblFind = false; String priPartType = null; String subPartType = null; boolean hasPriPart = false; boolean hasSubPart = false; String sql = "SELECT tbl_id, pri_part_type, sub_part_type from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartType = tblSet.getString(2); subPartType = tblSet.getString(3); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } if (hasPriPart && level == 0) { part = new Partition(); part.setParType(tblSet.getString(4)); break; } if (hasSubPart && level == 1) { part = new Partition(); part.setParType(tblSet.getString(5)); break; } throw new MetaException( "can not find partition of level " + level + " for table " + dbName + ":" + tableName); } tblSet.close(); if (!isTblFind) { throw new MetaException("can not find table " + dbName + ":" + tableName); } sql = "select part_name from PARTITIONS where tbl_id=" + tblID + " and level=" + level; ResultSet partSet = ps.executeQuery(sql); while (partSet.next()) { String partName = partSet.getString(1); ret.add(partName); } part.setParSpaces(partNameMap); part.setDbName(dbName); part.setTableName(tableName); part.setLevel(level); con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("get partition error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return ret; else return null; }