List of usage examples for java.util LinkedHashMap entrySet
public Set<Map.Entry<K, V>> entrySet()
From source file:com.gtwm.pb.model.manageData.DataManagement.java
/** * Used by both the public saveRecord and globalEdit methods *///w w w. ja v a 2 s . c om private void saveRecord(HttpServletRequest request, TableInfo table, LinkedHashMap<BaseField, BaseValue> dataToSave, boolean newRecord, Set<Integer> rowIds, SessionDataInfo sessionData, List<FileItem> multipartItems) throws InputRecordException, ObjectNotFoundException, SQLException, CantDoThatException, CodingErrorException, DisallowedException, MissingParametersException { if ((dataToSave.size() == 0) && (!newRecord)) { // Note: this does actually happen quite a lot, from two particular // users, therefore I've commented out the log warning. // Haven't tracked down the cause but it doesn't seem to be creating // a problem. // logger.warn("Call to saveRecord with no data to save. User = " // + request.getRemoteUser() + ", table = " + table + ", rowIds = " // + rowIds); return; } this.setHiddenFieldValues(request, table, dataToSave, newRecord); boolean globalEdit = false; int rowId = -1; if (rowIds.size() > 1) { globalEdit = true; } else if (rowIds.size() == 1) { rowId = (new LinkedList<Integer>(rowIds)).getFirst(); } else { throw new ObjectNotFoundException("Row ID list " + rowIds + " is invalid"); } StringBuilder SQLCodeBuilder = new StringBuilder(); // Generate CSV of fields and placeholders to use in update/insert SQL // string StringBuilder fieldsCsvBuilder = new StringBuilder(); StringBuilder fieldsAndPlaceholdersCsvBuilder = new StringBuilder(); StringBuilder valuePlaceholdersCsvBuilder = new StringBuilder(); for (BaseField field : dataToSave.keySet()) { fieldsCsvBuilder.append(field.getInternalFieldName()); fieldsCsvBuilder.append(", "); valuePlaceholdersCsvBuilder.append("?, "); fieldsAndPlaceholdersCsvBuilder.append(field.getInternalFieldName()); fieldsAndPlaceholdersCsvBuilder.append("=?, "); } // Used if doing an INSERT String fieldsCsv = fieldsCsvBuilder.toString(); String valuePlaceholdersCsv = valuePlaceholdersCsvBuilder.toString(); // Used if doing an UPDATE String fieldsAndPlaceholdersCsv = fieldsAndPlaceholdersCsvBuilder.toString(); if (!fieldsCsv.equals("")) { fieldsCsv = fieldsCsv.substring(0, fieldsCsv.length() - 2); valuePlaceholdersCsv = valuePlaceholdersCsv.substring(0, valuePlaceholdersCsv.length() - 2); fieldsAndPlaceholdersCsv = fieldsAndPlaceholdersCsv.substring(0, fieldsAndPlaceholdersCsv.length() - 2); } if (newRecord) { SQLCodeBuilder.append("INSERT INTO " + table.getInternalTableName()); if (fieldsCsv.equals("")) { SQLCodeBuilder.append(" VALUES(default)"); } else { SQLCodeBuilder.append("(" + fieldsCsv + ") VALUES (" + valuePlaceholdersCsv + ")"); } } else { SQLCodeBuilder.append("UPDATE " + table.getInternalTableName() + " SET " + fieldsAndPlaceholdersCsv); if (globalEdit) { // add filter for various row ids SQLCodeBuilder.append(" WHERE " + table.getPrimaryKey().getInternalFieldName() + " in (?"); for (int i = 1; i < rowIds.size(); i++) { SQLCodeBuilder.append(",?"); } SQLCodeBuilder.append(")"); } else { // add filter for single row id SQLCodeBuilder.append(" WHERE " + table.getPrimaryKey().getInternalFieldName() + "=?"); } } Connection conn = null; int fieldNumber = 0; // Will be set if we're inserting a record int newRowId = -1; TableDataInfo tableData = new TableData(table); try { conn = this.dataSource.getConnection(); conn.setAutoCommit(false); PreparedStatement statement = conn.prepareStatement(SQLCodeBuilder.toString()); for (BaseField field : dataToSave.keySet()) { // If an exception is raised, currentField will be the field // which caused it // currentField = field; fieldNumber++; BaseValue fieldValue = dataToSave.get(field); if (field instanceof FileField) { if (fieldValue.isNull() || fieldValue.toString().equals("")) { throw new InputRecordException("No file specified for the upload", field); } } if (fieldValue.isNull()) { statement.setNull(fieldNumber, Types.NULL); } else { if (fieldValue instanceof TextValue) { String textValue = ((TextValue) fieldValue).toXmlString(); statement.setString(fieldNumber, textValue); } else if (fieldValue instanceof IntegerValue) { // if no related value, set relation field to null if (field instanceof RelationField && (((IntegerValue) fieldValue).getValueInteger() == -1) || (fieldValue.isNull())) { statement.setNull(fieldNumber, Types.NULL); } else { statement.setInt(fieldNumber, ((IntegerValue) fieldValue).getValueInteger()); } } else if (fieldValue instanceof DurationValue) { statement.setString(fieldNumber, ((DurationValue) fieldValue).getSqlFormatInterval()); } else if (fieldValue instanceof DecimalValue) { statement.setDouble(fieldNumber, ((DecimalValue) fieldValue).getValueFloat()); } else if (fieldValue instanceof DateValue) { if (((DateValue) fieldValue).getValueDate() != null) { java.util.Date javaDateValue = ((DateValue) fieldValue).getValueDate().getTime(); java.sql.Timestamp sqlTimestampValue = new java.sql.Timestamp(javaDateValue.getTime()); statement.setTimestamp(fieldNumber, sqlTimestampValue); } else { statement.setTimestamp(fieldNumber, null); } } else if (fieldValue instanceof CheckboxValue) { statement.setBoolean(fieldNumber, ((CheckboxValue) fieldValue).getValueBoolean()); } else if (fieldValue instanceof FileValue) { statement.setString(fieldNumber, ((FileValue) fieldValue).toString()); } else { throw new CodingErrorException("Field value " + fieldValue + " is of unknown type " + fieldValue.getClass().getSimpleName()); } } } // We've finished setting individual fields, if an SQL error occurs // after here we won't know which // field caused it without looking for it by other means // currentField = null; if (!newRecord) { if (globalEdit) { // Fill in the 'WHERE [row id field] in (?,..,?)' for use in // the UPDATE statement for (Integer aRowId : rowIds) { if (tableData.isRecordLocked(conn, sessionData, aRowId)) { throw new CantDoThatException( "Record " + aRowId + " from table " + table + " is locked to prevent editing"); } statement.setInt(++fieldNumber, aRowId); } } else { // Fill in the 'WHERE [row id field]=?' for use in the // UPDATE statement if (tableData.isRecordLocked(conn, sessionData, rowId)) { throw new CantDoThatException( "Record " + rowId + " from table " + table + " is locked to prevent editing"); } statement.setInt(fieldNumber + 1, rowId); } } int numRowsAffected = statement.executeUpdate(); statement.close(); if ((numRowsAffected != 1) && (!globalEdit)) { conn.rollback(); if (numRowsAffected > 0) { throw new ObjectNotFoundException(String.valueOf(numRowsAffected) + " records would be altered during a single record save"); } else { throw new ObjectNotFoundException( "The current record can't be found to edit - perhaps someone else has deleted it"); } } if (newRecord) { // Find the newly inserted Row ID // postgres-specific code, not database independent String SQLCode = "SELECT currval('" + table.getInternalTableName() + "_" + table.getPrimaryKey().getInternalFieldName() + "_seq')"; statement = conn.prepareStatement(SQLCode); ResultSet results = statement.executeQuery(); if (results.next()) { newRowId = results.getInt(1); } else { results.close(); statement.close(); throw new SQLException( "Row ID not found for the newly inserted record. '" + SQLCodeBuilder + "' didn't work"); } results.close(); statement.close(); } conn.commit(); } catch (SQLException sqlex) { // Find out which field caused the error by looking for internal // field names in the error message String errorMessage = sqlex.getMessage(); for (BaseField possibleCauseField : dataToSave.keySet()) { if (errorMessage.contains(possibleCauseField.getInternalFieldName())) { if (errorMessage.contains("check constraint")) { errorMessage = "The value " + dataToSave.get(possibleCauseField) + " falls outside the allowed range"; } else if (errorMessage.contains("not-null constraint")) { errorMessage = "No value entered"; } else if (errorMessage.contains("unique constraint")) { errorMessage = "Value " + dataToSave.get(possibleCauseField) + " is already in the database and cannot be entered again"; } else if (errorMessage.contains("foreign key constraint") && possibleCauseField instanceof RelationField) { errorMessage = "Please select a valid " + ((RelationField) possibleCauseField).getRelatedTable() + " record first"; } else { errorMessage = "Value " + dataToSave.get(possibleCauseField) + " not allowed (" + Helpers.replaceInternalNames(errorMessage, table.getDefaultReport()) + ")"; } throw new InputRecordException(errorMessage, possibleCauseField, sqlex); } } // Not able to find field errorMessage = Helpers.replaceInternalNames(errorMessage, table.getDefaultReport()); throw new InputRecordException(errorMessage, null, sqlex); } finally { if (conn != null) { conn.close(); } } // If any fields were files to upload, do the actual uploads. // Do this after the commit in case the uploads take a long time and // time out the SQL connection. for (BaseField field : dataToSave.keySet()) { if (field instanceof FileField) { try { if (newRecord) { this.uploadFile(request, (FileField) field, (FileValue) dataToSave.get(field), newRowId, multipartItems); } else { this.uploadFile(request, (FileField) field, (FileValue) dataToSave.get(field), rowId, multipartItems); } } catch (CantDoThatException cdtex) { throw new InputRecordException("Error uploading file: " + cdtex.getMessage(), field, cdtex); } catch (FileUploadException fuex) { throw new InputRecordException("Error uploading file: " + fuex.getMessage(), field, fuex); } } } if (newRecord) { sessionData.setRowId(table, newRowId); } this.logLastDataChangeTime(request); logLastTableDataChangeTime(table); UsageLogger usageLogger = new UsageLogger(this.dataSource); AppUserInfo user = null; if (request.getRemoteUser() == null) { user = ServletUtilMethods.getPublicUserForRequest(request, this.authManager.getAuthenticator()); } else { user = this.authManager.getUserByUserName(request, request.getRemoteUser()); } // Send websocket notification // UsageLogger.sendNotification(user, table, sessionData.getReport(), // rowId, "edit", "Record saved: " + dataToSave); // Log everything apart from hidden (auto set) fields Map<BaseField, BaseValue> dataToLog = new LinkedHashMap<BaseField, BaseValue>(); for (Map.Entry<BaseField, BaseValue> entrySet : dataToSave.entrySet()) { BaseField field = entrySet.getKey(); if (!field.getHidden()) { BaseValue value = entrySet.getValue(); dataToLog.put(field, value); } } if (newRecord) { usageLogger.logDataChange(user, table, null, AppAction.SAVE_NEW_RECORD, newRowId, dataToLog.toString()); } else if (globalEdit) { // TODO: need better logging of global edits usageLogger.logDataChange(user, table, null, AppAction.GLOBAL_EDIT, rowId, dataToLog.toString()); } else { BaseField fieldUpdated = null; Set<BaseField> fieldSet = new TreeSet<BaseField>(); for (BaseField field : dataToSave.keySet()) { if (!field.getHidden()) { fieldSet.add(field); } } if (fieldSet.size() == 1) { fieldUpdated = new LinkedList<BaseField>(fieldSet).getFirst(); } usageLogger.logDataChange(user, table, fieldUpdated, AppAction.UPDATE_RECORD, rowId, dataToLog.toString()); } UsageLogger.startLoggingThread(usageLogger); }
From source file:org.apache.hadoop.hive.metastore.MyXid.java
public void addPartition(String dbName, String tblName, AddPartitionDesc addPartitionDesc) throws InvalidObjectException, MetaException { boolean success = false; Connection con = null;// ww w . j a v a2s . co m PreparedStatement ps = null; Statement stmt = null; dbName = dbName.toLowerCase(); tblName = tblName.toLowerCase(); boolean isPathMaked = false; ArrayList<Path> pathToMake = new ArrayList<Path>(); Warehouse wh = new Warehouse(hiveConf); long tblID = 0; try { con = getSegmentConnection(dbName); } catch (MetaStoreConnectException e1) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } catch (SQLException e1) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + e1.getMessage()); throw new MetaException(e1.getMessage()); } try { con.setAutoCommit(false); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); stmt = con.createStatement(); String tblType = null; boolean hasPriPart = false; boolean hasSubPart = false; String priPartKey = null; String subPartKey = null; String priPartType = null; String subPartType = null; String priKeyType = null; String subKeyType = null; ResultSet tblSet = null; boolean isTblFind = false; boolean isColFind = false; String tblFormat = null; String tblLocation = null; PrimitiveTypeInfo pti = null; ObjectInspector StringIO = null; ObjectInspector ValueIO = null; ObjectInspectorConverters.Converter converter1 = null; ObjectInspectorConverters.Converter converter2 = null; ArrayList<String> partToAdd = new ArrayList<String>(); String sql = null; HiveConf hconf = (HiveConf) hiveConf; boolean externalPartition = hconf.getBoolVar(HiveConf.ConfVars.HIVESUPPORTEXTERNALPARTITION); if (addPartitionDesc.getLevel() == 0) { sql = "SELECT tbl_id, tbl_type, pri_part_type, pri_part_key, tbl_format, tbl_location" + " from TBLS where db_name='" + dbName + "' and tbl_name='" + tblName + "'"; tblSet = stmt.executeQuery(sql); isTblFind = false; while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); tblType = tblSet.getString(2); priPartKey = tblSet.getString(4); priPartType = tblSet.getString(3); tblFormat = tblSet.getString(5); tblLocation = tblSet.getString(6); if (priPartType != null && !priPartType.isEmpty()) { hasPriPart = true; } break; } tblSet.close(); if (!isTblFind) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "can not find table " + dbName + ":" + tblName); throw new MetaException("can not find table " + dbName + ":" + tblName); } if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) { if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null && tblFormat.equalsIgnoreCase("pgdata")) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + tblType + ":" + tblFormat + " can not support alter partition"); throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition"); } if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE") && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) { } else { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + tblType + " can not support alter partition"); throw new MetaException(tblType + " can not support alter partition"); } } if (!hasPriPart) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table " + dbName + ":" + tblName + " is not pri-partitioned"); throw new MetaException("table " + dbName + ":" + tblName + " is not pri-partitioned"); } sql = "SELECT type_name from COLUMNS where tbl_id=" + tblID + " and column_name='" + priPartKey.toLowerCase() + "'"; isColFind = false; ResultSet colSet = stmt.executeQuery(sql); while (colSet.next()) { isColFind = true; priKeyType = colSet.getString(1); break; } colSet.close(); if (!isColFind) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table " + "can not find partition key information " + priPartKey); throw new MetaException("can not find partition key information " + priPartKey); } pti = new PrimitiveTypeInfo(); pti.setTypeName(priKeyType); StringIO = PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING); ValueIO = PrimitiveObjectInspectorFactory .getPrimitiveWritableObjectInspector(pti.getPrimitiveCategory()); converter1 = ObjectInspectorConverters.getConverter(StringIO, ValueIO); converter2 = ObjectInspectorConverters.getConverter(StringIO, ValueIO); if ((addPartitionDesc.getPartType().equalsIgnoreCase("RANGE_PARTITION") && !priPartType.equalsIgnoreCase("range")) || (addPartitionDesc.getPartType().equalsIgnoreCase("LIST_PARTITION") && !priPartType.equalsIgnoreCase("list"))) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "can not add a " + addPartitionDesc.getPartType() + " partition, but the pri-partition type is " + priPartType); throw new MetaException("can not add a " + addPartitionDesc.getPartType() + " partition, but the pri-partition type is " + priPartType); } LinkedHashMap<String, List<String>> partSpaces = new LinkedHashMap<String, List<String>>(); Set<String> subPartNameSet = new TreeSet<String>(); sql = "SELECT level, part_name, part_values from PARTITIONS where" + " tbl_id=" + tblID;// + " order by level asc"; ResultSet partSet = stmt.executeQuery(sql); int partLevel = 0; while (partSet.next()) { partLevel = partSet.getInt(1); if (partLevel == 0) { String partName = partSet.getString(2); List<String> valueList = new ArrayList<String>(); Array spaceArray = partSet.getArray(3); ResultSet priValueSet = spaceArray.getResultSet(); while (priValueSet.next()) { valueList.add(priValueSet.getString(2)); } partSpaces.put(partName, valueList); } else if (partLevel == 1) { String partName = partSet.getString(2); subPartNameSet.add(partName); } } partSet.close(); partToAdd = new ArrayList<String>(); LinkedHashMap<String, List<String>> addPartSpaces = (LinkedHashMap<String, List<String>>) addPartitionDesc .getParSpaces(); Iterator<String> itr = addPartSpaces.keySet().iterator(); while (itr.hasNext()) { String key = itr.next().toLowerCase(); if (partSpaces.containsKey(key)) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName + " have already contain a pri parititon named: " + key); throw new MetaException( "table : " + tblName + " have already contain a pri parititon named: " + key); } partToAdd.add(key); } Iterator<List<String>> listItr = addPartSpaces.values().iterator(); while (listItr.hasNext()) { Iterator<String> valueItr = listItr.next().iterator(); if (valueItr.hasNext()) { String value = valueItr.next(); if (converter1.convert(value) == null) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "value : " + value + " should be type of " + priKeyType); throw new MetaException("value : " + value + " should be type of " + priKeyType); } Iterator<List<String>> PartValuesItr = partSpaces.values().iterator(); while (PartValuesItr.hasNext()) { if (PartValuesItr.next().contains(value)) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName + " have already contain a pri partition contain value: " + value); throw new MetaException("table : " + tblName + " have already contain a pri partition contain value: " + value); } } } } ps = con.prepareStatement( "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)"); for (Map.Entry<String, List<String>> entry : addPartSpaces.entrySet()) { ps.setInt(1, 0); ps.setLong(2, tblID); Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray()); ps.setArray(4, spaceArray); ps.setString(3, entry.getKey()); ps.addBatch(); } ps.executeBatch(); if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) { for (String partName : partToAdd) { if (tblLocation == null || tblLocation.trim().isEmpty()) { pathToMake.addAll(wh.getPriPartitionPaths(dbName, tblName, partName, subPartNameSet)); } else { pathToMake.addAll(Warehouse.getPriPartitionPaths(new Path(tblLocation), partName, subPartNameSet)); } } } else { for (String partName : partToAdd) { pathToMake.addAll( Warehouse.getPriPartitionPaths(new Path(tblLocation), partName, subPartNameSet)); } } } else if (addPartitionDesc.getLevel() == 1) { sql = "SELECT tbl_id, tbl_type, sub_part_type, sub_part_key, tbl_format, tbl_location" + " from TBLS where db_name='" + dbName.toLowerCase() + "' and tbl_name='" + tblName.toLowerCase() + "'"; tblSet = stmt.executeQuery(sql); isTblFind = false; while (tblSet.next()) { isTblFind = true; tblID = tblSet.getLong(1); tblType = tblSet.getString(2); subPartKey = tblSet.getString(4); subPartType = tblSet.getString(3); tblFormat = tblSet.getString(5); tblLocation = tblSet.getString(6); if (subPartType != null && !subPartType.isEmpty()) { hasSubPart = true; } break; } tblSet.close(); if (!isTblFind) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "can not find table " + dbName + ":" + tblName); throw new MetaException("can not find table " + dbName + ":" + tblName); } if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) { if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null && tblFormat.equalsIgnoreCase("pgdata")) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + tblType + ":" + tblFormat + " can not support alter partition"); throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition"); } if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE") && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) { } else { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + tblType + " can not support alter partition"); throw new MetaException(tblType + " can not support alter partition"); } } if (!hasSubPart) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table " + dbName + ":" + tblName + " is not sun-partitioned"); throw new MetaException("table " + dbName + ":" + tblName + " is not sun-partitioned"); } sql = "SELECT type_name from COLUMNS where tbl_id=" + tblID + " and column_name='" + subPartKey.toLowerCase() + "'"; isColFind = false; ResultSet colSet = stmt.executeQuery(sql); while (colSet.next()) { isColFind = true; subKeyType = colSet.getString(1); break; } colSet.close(); if (!isColFind) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "can not find partition key information " + priPartKey); throw new MetaException("can not find partition key information " + priPartKey); } pti = new PrimitiveTypeInfo(); pti.setTypeName(subKeyType); StringIO = PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING); ValueIO = PrimitiveObjectInspectorFactory .getPrimitiveWritableObjectInspector(pti.getPrimitiveCategory()); converter1 = ObjectInspectorConverters.getConverter(StringIO, ValueIO); converter2 = ObjectInspectorConverters.getConverter(StringIO, ValueIO); if ((addPartitionDesc.getPartType().equalsIgnoreCase("RANGE_PARTITION") && !subPartType.equalsIgnoreCase("range")) || (addPartitionDesc.getPartType().equalsIgnoreCase("LIST_PARTITION") && !subPartType.equalsIgnoreCase("list"))) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "you can not add a " + addPartitionDesc.getPartType() + " partition, but the sub-partition type is " + subPartType); throw new MetaException("you can not add a " + addPartitionDesc.getPartType() + " partition, but the sub-partition type is " + subPartType); } LinkedHashMap<String, List<String>> partSpaces = new LinkedHashMap<String, List<String>>(); Set<String> partNameSet = new TreeSet<String>(); sql = "SELECT level, part_name, part_values from PARTITIONS where" + " tbl_id=" + tblID;// + " order by level asc"; ResultSet partSet = stmt.executeQuery(sql); int partLevel = 0; while (partSet.next()) { partLevel = partSet.getInt(1); if (partLevel == 1) { String partName = partSet.getString(2); List<String> valueList = new ArrayList<String>(); Array spaceArray = partSet.getArray(3); ResultSet priValueSet = spaceArray.getResultSet(); while (priValueSet.next()) { valueList.add(priValueSet.getString(2)); } partSpaces.put(partName, valueList); } else if (partLevel == 0) { String partName = partSet.getString(2); partNameSet.add(partName); } } partToAdd = new ArrayList<String>(); LinkedHashMap<String, List<String>> addPartSpaces = (LinkedHashMap<String, List<String>>) addPartitionDesc .getParSpaces(); Iterator<String> itr = addPartSpaces.keySet().iterator(); while (itr.hasNext()) { String key = itr.next().toLowerCase(); if (partSpaces.containsKey(key)) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName + " have already contain a sub parititon named: " + key); throw new MetaException( "table : " + tblName + " have already contain a sub parititon named: " + key); } if (key.equalsIgnoreCase("default")) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "use : 'alter table tblname add default subpartition' to add default subpartition!"); throw new MetaException( "use : 'alter table tblname add default subpartition' to add default subpartition!"); } partToAdd.add(key); } Iterator<List<String>> listItr = addPartSpaces.values().iterator(); while (listItr.hasNext()) { Iterator<String> valueItr = listItr.next().iterator(); if (valueItr.hasNext()) { String value = valueItr.next(); if (converter1.convert(value) == null) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "value : " + value + " should be type of " + priKeyType); throw new MetaException("value : " + value + " should be type of " + priKeyType); } Iterator<List<String>> PartValuesItr = partSpaces.values().iterator(); while (PartValuesItr.hasNext()) { if (PartValuesItr.next().contains(value)) { LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName + " have already contain a sub partition contain value: " + value); throw new MetaException("table : " + tblName + " have already contain a sub partition contain value: " + value); } } } } ps = con.prepareStatement( "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)"); for (Map.Entry<String, List<String>> entry : addPartSpaces.entrySet()) { ps.setInt(1, 1); ps.setLong(2, tblID); Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray()); ps.setArray(4, spaceArray); ps.setString(3, entry.getKey()); ps.addBatch(); } ps.executeBatch(); if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) { for (String partName : partToAdd) { if (tblLocation == null || tblLocation.trim().isEmpty()) { pathToMake.addAll(wh.getSubPartitionPaths(dbName, tblName, partNameSet, partName)); } else { pathToMake.addAll( Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, partName)); } } } else { for (String partName : partToAdd) { pathToMake.addAll( Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, partName)); } } } con.commit(); success = true; } catch (SQLException ex) { ex.printStackTrace(); LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + addPartitionDesc.getLevel() + ", msg=" + ex.getMessage()); throw new MetaException(ex.getMessage()); } finally { if (!success) { try { con.rollback(); } catch (SQLException e) { } if (isPathMaked) { for (Path path : pathToMake) { wh.deleteDir(path, false); } } } closeStatement(ps); closeConnection(con); } if (success) { boolean mkDirOK = false; List<Path> createdPath = new ArrayList<Path>(); try { for (Path path : pathToMake) { mkDirOK = wh.mkdirs(path); if (!mkDirOK) { break; } createdPath.add(path); } } catch (Exception x) { mkDirOK = false; } if (!mkDirOK) { dropPartitionMeta(dbName, tblID, addPartitionDesc); if (!createdPath.isEmpty()) { for (Path path : createdPath) { wh.deleteDir(path, true); } } throw new MetaException("can not create hdfs path, add partition failed"); } } }