List of usage examples for java.sql Connection TRANSACTION_READ_COMMITTED
int TRANSACTION_READ_COMMITTED
To view the source code for java.sql Connection TRANSACTION_READ_COMMITTED.
Click Source Link
From source file:org.apache.hadoop.hive.metastore.MyXid.java
public List<TblPriv> getAuthOnAllTblsNoDistributeTransaction() throws MetaException { Connection con = null;//from w w w. jav a 2 s .co m Statement ps = null; boolean success = false; List<TblPriv> tblPrivs = new ArrayList<TblPriv>(); try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("get user auth on all tbls error, msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get user auth on all tbls error, msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select alter_priv, create_priv, delete_priv " + ",drop_priv, index_priv, insert_priv, select_priv, update_priv, user_name, db_name, tbl_name" + " from tblpriv"; ResultSet tblPrivSet = ps.executeQuery(sql); while (tblPrivSet.next()) { TblPriv tblPriv = new TblPriv(); tblPriv.setAlterPriv(tblPrivSet.getBoolean(1)); tblPriv.setCreatePriv(tblPrivSet.getBoolean(2)); tblPriv.setDeletePriv(tblPrivSet.getBoolean(3)); tblPriv.setDropPriv(tblPrivSet.getBoolean(4)); tblPriv.setIndexPriv(tblPrivSet.getBoolean(5)); tblPriv.setInsertPriv(tblPrivSet.getBoolean(6)); tblPriv.setSelectPriv(tblPrivSet.getBoolean(7)); tblPriv.setUpdatePriv(tblPrivSet.getBoolean(8)); tblPriv.setUser(tblPrivSet.getString(9)); tblPriv.setDb(tblPrivSet.getString(10)); tblPriv.setTbl(tblPrivSet.getString(11)); tblPrivs.add(tblPriv); } success = true; } catch (SQLException sqlex) { LOG.error("get user auth on all tbls error, msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } if (success) { return tblPrivs; } else { return null; } }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<FieldSchema> getPartFieldsJdbc(String dbName, String tableName) throws MetaException { Connection con = null;/*from w ww . j ava 2s.c o m*/ ; Statement ps = null; boolean success = false; List<FieldSchema> columnInfo = new ArrayList<FieldSchema>(); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error( "get partition field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error( "get partition field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select tbl_id, pri_part_key, sub_part_key from tbls where db_name='" + dbName + "' " + " and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); String priPartKey = null; String subPartKey = null; long tblID = 0; boolean isTblFind = false; while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartKey = tblSet.getString(2); subPartKey = tblSet.getString(3); } tblSet.close(); if (isTblFind) { sql = "select column_name, type_name, comment, column_len from " + " columns where tbl_id=" + tblID + " and ( column_name='" + priPartKey + "' or column_name='" + subPartKey + "')"; ResultSet colSet = ps.executeQuery(sql); while (colSet.next()) { FieldSchema field = new FieldSchema(); field.setName(colSet.getString(1)); field.setType(colSet.getString(2)); field.setComment(colSet.getString(3)); columnInfo.add(field); } } con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("get partition field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } return columnInfo; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<FieldSchema> getFieldsJdbc(String dbName, String tableName) throws MetaException { Connection con = null;/* ww w. j av a2s. c o m*/ ; Statement ps = null; List<FieldSchema> columnInfo = new ArrayList<FieldSchema>(); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select columns.column_name, columns.type_name, columns.comment, columns.column_len from " + " tbls, columns where tbls.tbl_id=columns.tbl_id and tbls.db_name='" + dbName + "' " + " and tbls.tbl_name='" + tableName + "' order by column_index asc"; ResultSet colSet = ps.executeQuery(sql); while (colSet.next()) { FieldSchema field = new FieldSchema(); field.setName(colSet.getString(1)); field.setType(colSet.getString(2)); field.setComment(colSet.getString(3)); columnInfo.add(field); } } catch (SQLException sqlex) { LOG.error("get field error, db=" + dbName + ", tbl=" + tableName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } return columnInfo; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<String> getJdbcTables(String dbName, String pattern) throws MetaException { Connection con;//w w w . ja va2 s .com Statement ps = null; List<String> tableList = new ArrayList<String>(); dbName = dbName.toLowerCase(); pattern = pattern.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get table error, db=" + dbName + ", pattern=" + pattern + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get table error, db=" + dbName + ", pattern=" + pattern + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = null; if (pattern == null || pattern.isEmpty() || pattern.equals(".*") || pattern.equals("*")) { sql = "select tbl_name, tbl_type from tbls where db_name='" + dbName + "'"; } else { pattern = pattern.replace('*', '%'); sql = "select tbl_name, tbl_type from tbls where db_name='" + dbName + "'" + " and tbl_name like '" + pattern + "'"; } ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { String item = tblSet.getString(1) + ":" + tblSet.getString(2); tableList.add(item); } } catch (SQLException sqlex) { LOG.error("get table error, db=" + dbName + ", pattern=" + pattern + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } return tableList; }
From source file:tds.dll.mysql.StudentDLL.java
public SingleDataResultSet _InitOpportunityAccommodations_SP(SQLConnection connection, UUID oppkey, String accoms) throws ReturnStatusException { Date starttime = _dateUtil.getDateWRetStatus(connection); Long testee = null;/*from w w w.j a v a 2 s . c om*/ String testId = null, test = null, clientname = null; final String SQL_QUERY1 = "select _efk_TestID as testID, _efk_AdminSubject as test, _efk_Testee as testee, clientname " + " from testopportunity where _key = ${oppkey}"; SqlParametersMaps parms1 = (new SqlParametersMaps()).put("oppkey", oppkey); SingleDataResultSet result = executeStatement(connection, SQL_QUERY1, parms1, false).getResultSets().next(); DbResultRecord record = (result.getCount() > 0 ? result.getRecords().next() : null); if (record != null) { testee = record.<Long>get("testee"); testId = record.<String>get("testID"); test = record.<String>get("test"); clientname = record.<String>get("clientname"); } Integer transactionIsolation = null; Boolean preexistingAutoCommitMode = null; DataBaseTable tbl = null; try { transactionIsolation = connection.getTransactionIsolation(); preexistingAutoCommitMode = connection.getAutoCommit(); connection.setAutoCommit(false); connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); tbl = _commonDll.TestKeyAccommodations_FN(connection, test); final String SQL_QUERY2 = "insert into testeeaccommodations (_fk_TestOpportunity, segment, AccType, AccCode, AccValue, allowChange, " + " testeeControl, isSelectable, IsApproved, valueCount, recordUsage, _date) " + " select ${oppkey}, Segment, AccType, AccCode, AccValue, allowChange, studentControl, IsSelectable, case valcount when 1 then 1 else 0 end, " + " valCount, " + " (select count(*) from ${ConfigDB}.client_toolusage " + " where clientname = ${clientname} and testID = ${testID} and tooltype = AccType and (recordUsage = 1 or reportUsage = 1) limit 1), now(3) " + " from ${tblName} A " + " where IsDefault = 1 and DependsOnToolType is null " + " and not exists (select * from testeeaccommodations ACC where ACC._fk_TestOpportunity = ${oppkey} and ACC.AccCode = A.AccCode)"; SqlParametersMaps parms2 = (new SqlParametersMaps()).put("oppkey", oppkey).put("clientname", clientname) .put("testId", testId); Map<String, String> unquotedParms2 = new HashMap<>(); unquotedParms2.put("tblName", tbl.getTableName()); final String query2 = fixDataBaseNames(SQL_QUERY2); executeStatement(connection, fixDataBaseNames(query2, unquotedParms2), parms2, false).getUpdateCount(); connection.dropTemporaryTable(tbl); tbl = null; connection.commit(); } catch (ReturnStatusException re) { try { connection.rollback(); } catch (SQLException e) { _logger.error(String.format("Problem rolling back transaction: %s", e.getMessage())); } String errmsg = re.getMessage(); if (errmsg == null) errmsg = "no error message logged"; _commonDll._LogDBError_SP(connection, "_InitOpportunityAccommodations", errmsg, null, null, null, oppkey); if (tbl != null) connection.dropTemporaryTable(tbl); throw new ReturnStatusException(re); //return null; } catch (SQLException se) { throw new ReturnStatusException(se); } finally { try { if (preexistingAutoCommitMode != null) { connection.setAutoCommit(preexistingAutoCommitMode); } if (transactionIsolation != null) { connection.setTransactionIsolation(transactionIsolation); } } catch (SQLException e) { e.printStackTrace(); } } if (DbComparator.greaterThan(testee, 0)) { if (accoms == null) { _Ref<String> attValueRef = new _Ref<>(); _rtsDll._GetRTSAttribute_SP(connection, clientname, testee, "--ACCOMMODATIONS--", attValueRef); accoms = attValueRef.get(); if (accoms == null || accoms.length() < 1) { String accomodationsString = _commonDll.P_FormatAccommodations_FN(connection, oppkey); final String SQL_QUERY3 = "update testopportunity_readonly set AccommodationString = ${accomsStr} where _fk_TestOpportunity = ${oppkey}"; SqlParametersMaps parms3 = (new SqlParametersMaps()).put("accomsStr", accomodationsString) .put("oppkey", oppkey); executeStatement(connection, SQL_QUERY3, parms3, false).getUpdateCount(); _commonDll._LogDBLatency_SP(connection, "_InitOpportunityAccommodations", starttime, testee, true, null, oppkey, null, clientname, null); return null; } // -- else fall through to _Update proc } _Ref<String> errorRef = new _Ref<>(); _commonDll._UpdateOpportunityAccommodations_SP(connection, oppkey, 0, accoms, 0, false, false, errorRef); if (errorRef.get() != null) { // -- we are having trouble with deadlocks on _Update, try one more time String error = String.format("Accommodations update failed. Making second attempt.%s", errorRef.get()); _commonDll._LogDBError_SP(connection, "_InitOpportunityAccommodations", error, null, null, null, oppkey); errorRef.set(null); _commonDll._UpdateOpportunityAccommodations_SP(connection, oppkey, 0, accoms, 0, false, false, errorRef); if (errorRef.get() != null) { _commonDll._LogDBError_SP(connection, "_InitOpportunityAccommodations", errorRef.get(), null, null, null, oppkey); return _commonDll._ReturnError_SP(connection, clientname, "_InitOpportunityAccommodations", "Accommodations update failed", null, oppkey, null); } } } _commonDll._LogDBLatency_SP(connection, "_InitOpportunityAccommodations", starttime, testee, true, null, oppkey, null, clientname, null); return null; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public boolean isTableExit(String dbName, String tblName) throws MetaException { Connection con = null;/*w w w . j a v a 2s. co m*/ ; Statement ps = null; boolean ret = false; dbName = dbName.toLowerCase(); tblName = tblName.toLowerCase(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("check table exist error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("check table exist error, db=" + dbName + ", tbl=" + tblName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select tbl_id tbls where db_name='" + dbName + "' " + " and tbl_name='" + tblName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { ret = true; } } catch (SQLException sqlex) { LOG.error("check table exist error, db=" + dbName + ", tbl=" + tblName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } return ret; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public String getGroupname(String userName) throws MetaException { Connection con = null;/*w w w . java 2 s . c om*/ Statement ps = null; String groupName = null; userName = userName.toLowerCase(); try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("get user group error, user=" + userName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get user group error, user=" + userName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = "select group_name from tdwuser where user_name='" + userName + "'"; ResultSet groupNameSet = ps.executeQuery(sql); while (groupNameSet.next()) { groupName = groupNameSet.getString(1); break; } } catch (SQLException sqlex) { LOG.error("get user group error, user=" + userName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } return groupName; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<group> getGroups(String pattern) throws MetaException { Connection con = null;//from w ww . j a v a 2 s. c om Statement ps = null; List<group> groups = new ArrayList<group>(); pattern = pattern.toLowerCase(); try { con = getGlobalConnection(); } catch (MetaStoreConnectException e1) { LOG.error("get user group error, groupName=" + pattern + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get user group error, groupName=" + pattern + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); String sql = null; if (pattern == null || pattern.isEmpty() || pattern.equals(".*") || pattern.equals("*")) { sql = "select usergroup.group_name, usergroup.creator, string_agg(tdwuser.user_name, ',') namelist, count(*) usercount " + " from usergroup left join (select user_name, group_name from tdwuser order by user_name asc) tdwuser on(tdwuser.group_name=usergroup.group_name) " + " group by usergroup.group_name, usergroup.creator"; } else { pattern = pattern.replace('*', '%'); sql = "select usergroup.group_name, usergroup.creator, string_agg(tdwuser.user_name, ',') namelist, count(*) usercount " + " from usergroup left join (select user_name, group_name from tdwuser order by user_name asc) tdwuser on(tdwuser.group_name=usergroup.group_name) " + " where usergroup.group_name like '" + pattern + "'" + " group by usergroup.group_name, usergroup.creator"; } ResultSet groupSet = ps.executeQuery(sql); while (groupSet.next()) { group g = new group(); g.setGroupName(groupSet.getString(1)); g.setCreator(groupSet.getString(2)); String userList = groupSet.getString(3); if (userList == null) { g.setUserList(""); g.setUserNum(0); } else { g.setUserList(groupSet.getString(3)); g.setUserNum((int) groupSet.getLong(4)); } groups.add(g); } groupSet.close(); } catch (SQLException sqlex) { LOG.error("get user group error, groupName=" + pattern + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { closeStatement(ps); closeConnection(con); } return groups; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<List<String>> getPartitionNames(String dbName, String tableName) throws MetaException { boolean success = false; Connection con = null;/*from w ww . j a v a 2 s . c o m*/ Statement ps = null; List<List<String>> ret = new ArrayList<List<String>>(); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); List<String> priPartNames = new ArrayList<String>(); List<String> subPartNames = new ArrayList<String>(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error( "get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error( "get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); long tblID = 0; boolean isTblFind = false; String priPartType = null; String subPartType = null; boolean hasPriPart = false; boolean hasSubPart = false; String sql = "SELECT tbl_id, pri_part_type, sub_part_type from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartType = tblSet.getString(2); subPartType = tblSet.getString(3); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } if (!hasPriPart) { throw new MetaException("get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=table is not a partition table"); } } tblSet.close(); if (!isTblFind) { LOG.error("get partition names error, db=" + dbName + ", tbl=" + tableName); throw new MetaException("can not find table " + dbName + ":" + tableName); } sql = "select part_name, level from PARTITIONS where tbl_id=" + tblID; ResultSet partSet = ps.executeQuery(sql); while (partSet.next()) { String partName = partSet.getString(1); int level = partSet.getInt(2); if (level == 0) { priPartNames.add(partName); } else if (level == 1) { subPartNames.add(partName); } } if (hasPriPart) { if (priPartType.equalsIgnoreCase("hash")) { int numOfHashPar = hiveConf.getInt("hive.hashPartition.num", 500); ret.add(new ArrayList()); ret.get(0).add("hash(" + numOfHashPar + ")"); } else { if (priPartNames.contains("default")) { priPartNames.remove("default"); priPartNames.add("default"); } ret.add(priPartNames); } } if (hasSubPart) { if (subPartType.equalsIgnoreCase("hash")) { int numOfHashPar = hiveConf.getInt("hive.hashPartition.num", 500); ret.add(new ArrayList()); ret.get(1).add("hash(" + numOfHashPar + ")"); } else { if (subPartNames.contains("default")) { subPartNames.remove("default"); subPartNames.add("default"); } ret.add(subPartNames); } } else { ret.add(subPartNames); } con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("get partition names error, db=" + dbName + ", tbl=" + tableName + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return ret; else return null; }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
@Override public List<String> getPartitionNames(String dbName, String tableName, int level) throws MetaException { boolean success = false; Connection con = null;//from ww w . j a v a 2 s.co m Statement ps = null; Partition part = null; dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); List<String> ret = new ArrayList<String>(); Map<String, List<String>> partNameMap = new LinkedHashMap<String, List<String>>(); try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("get partition name error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("get partition name error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); ps = con.createStatement(); long tblID = 0; boolean isTblFind = false; String priPartType = null; String subPartType = null; boolean hasPriPart = false; boolean hasSubPart = false; String sql = "SELECT tbl_id, pri_part_type, sub_part_type from TBLS where db_name='" + dbName + "' and tbl_name='" + tableName + "'"; ResultSet tblSet = ps.executeQuery(sql); while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); priPartType = tblSet.getString(2); subPartType = tblSet.getString(3); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } if (hasPriPart && level == 0) { part = new Partition(); part.setParType(tblSet.getString(4)); break; } if (hasSubPart && level == 1) { part = new Partition(); part.setParType(tblSet.getString(5)); break; } throw new MetaException( "can not find partition of level " + level + " for table " + dbName + ":" + tableName); } tblSet.close(); if (!isTblFind) { throw new MetaException("can not find table " + dbName + ":" + tableName); } sql = "select part_name from PARTITIONS where tbl_id=" + tblID + " and level=" + level; ResultSet partSet = ps.executeQuery(sql); while (partSet.next()) { String partName = partSet.getString(1); ret.add(partName); } part.setParSpaces(partNameMap); part.setDbName(dbName); part.setTableName(tableName); part.setLevel(level); con.commit(); success = true; } catch (SQLException sqlex) { LOG.error("get partition error, db=" + dbName + ", tbl=" + tableName + ", level=" + level + ", msg=" + sqlex.getMessage()); sqlex.printStackTrace(); throw new MetaException(sqlex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } } closeStatement(ps); closeConnection(con); } if (success) return ret; else return null; }