List of usage examples for java.sql ResultSet getMetaData
ResultSetMetaData getMetaData() throws SQLException;
ResultSet
object's columns. From source file:com.hangum.tadpole.engine.sql.util.resultset.ResultSetUtils.java
/** * column name of table// w w w.ja v a 2 s .com * * @param isShowRowNum * @param rs * @return * @throws Exception */ public static Map<Integer, String> getColumnName(boolean isShowRowNum, ResultSet rs) throws Exception { Map<Integer, String> mapColumnName = new HashMap<Integer, String>(); int intStartIndex = 0; if (isShowRowNum) { intStartIndex++; mapColumnName.put(0, "#"); } ResultSetMetaData rsm = rs.getMetaData(); for (int i = 0; i < rsm.getColumnCount(); i++) { mapColumnName.put(i + intStartIndex, rsm.getColumnName(i + 1)); } return mapColumnName; }
From source file:com.hangum.tadpole.engine.sql.util.resultset.ResultSetUtils.java
/** * column label name of table//from w w w .j ava2 s. c o m * * @param isShowRowNum * @param rs * @return * @throws Exception */ public static Map<Integer, String> getColumnLabelName(boolean isShowRowNum, ResultSet rs) throws Exception { Map<Integer, String> mapColumnName = new HashMap<Integer, String>(); int intStartIndex = 0; if (isShowRowNum) { intStartIndex++; mapColumnName.put(0, "#"); } ResultSetMetaData rsm = rs.getMetaData(); for (int i = 0; i < rsm.getColumnCount(); i++) { mapColumnName.put(i + intStartIndex, rsm.getColumnLabel(i + 1)); } return mapColumnName; }
From source file:com.trackplus.ddl.DataReader.java
private static int getTableData(BufferedWriter writer, BufferedWriter writerClean, BufferedWriter writerUpdate, BufferedWriter writerUpdateClean, Connection con, String tableName, StringValueConverter stringValueConverter) throws DDLException { Statement st = MetaDataBL.createStatement(con); String sql = "SELECT * FROM " + tableName; ResultSet rs; try {/* w w w. j a v a 2s .c om*/ rs = st.executeQuery(sql); } catch (SQLException e) { throw new DDLException(e.getMessage(), e); } int idx = 0; int idxUpdate = 0; try { ResultSetMetaData md = rs.getMetaData(); String primaryKey = MetaDataBL.getPrimaryKey(tableName, con); int columnCount = md.getColumnCount(); StringBuilder sb = new StringBuilder(); sb.append("INSERT INTO " + tableName + "("); for (int i = 0; i < columnCount; i++) { String columnName = md.getColumnName(i + 1); sb.append(columnName); if (i < columnCount - 1) { sb.append(", "); } } sb.append(") VALUES("); StringBuilder updateSQL; while (rs.next()) { StringBuilder line = new StringBuilder(); line.append(sb); String primaryKeyValue = rs.getString(primaryKey); for (int i = 0; i < columnCount; i++) { String value; String columnName = md.getColumnName(i + 1); try { value = stringValueConverter.getStringValue(md, i + 1, rs, tableName); } catch (DDLException ex) { LOGGER.warn("Error: " + ex.getMessage() + " for column:" + columnName + " in table:" + tableName + " primary key " + primaryKey + "=" + primaryKeyValue + ". Will be set to NULL!"); value = null; } if (value != null && value.length() > MAX_VALUE_STRING) { if ("MAILBODY".equalsIgnoreCase(columnName) || "CLOBVALUE".equalsIgnoreCase(columnName)) { //blob } else { updateSQL = new StringBuilder(); updateSQL.append("UPDATE ").append(tableName).append(" SET ").append(columnName) .append("="); updateSQL.append(value).append("\n WHERE ").append(primaryKey).append("=") .append(primaryKeyValue); updateSQL.append(";"); MetaDataBL.appendLine(writerUpdateClean, updateSQL.toString()); updateSQL.append(LINE_SEPARATOR); MetaDataBL.appendLine(writerUpdate, updateSQL.toString()); idxUpdate++; value = null; } } line.append(value); if (i < columnCount - 1) { line.append(", "); } } line.append(");"); MetaDataBL.appendLine(writerClean, line.toString()); line.append(LINE_SEPARATOR); MetaDataBL.appendLine(writer, line.toString()); idx++; } } catch (SQLException e) { throw new DDLException(e.getMessage(), e); } try { rs.close(); } catch (SQLException e) { throw new DDLException(e.getMessage(), e); } if (idxUpdate > 0) { LOGGER.warn("There was " + idxUpdate + " records with String size>" + MAX_VALUE_STRING + " found in table:" + tableName); } return idx; }
From source file:br.bookmark.db.util.ResultSetUtils.java
/** * Populate target bean with the first record from a ResultSet. * * @param resultSet The ResultSet whose parameters are to be used * to populate bean properties// www . j a va 2 s .co m * @param target An instance of the bean to populate * @exception SQLException if an exception is thrown while setting * property values, populating the bean, or accessing the ResultSet * @return True if resultSet contained a next element */ public static boolean getElement(Object target, ResultSet resultSet) throws Exception { // Check prerequisites if ((target == null) || (resultSet == null)) throw new SQLException("getElement: Null parameter"); // Acquire resultSet MetaData ResultSetMetaData metaData = resultSet.getMetaData(); int cols = metaData.getColumnCount(); // Create hashmap, sized to number of columns HashMap properties = new HashMap(cols, 1); // Scroll to next record and pump into hashmap boolean found = false; if (resultSet.next()) { found = true; for (int i = 1; i <= cols; i++) { putEntry(properties, metaData, resultSet, i, target.getClass()); } // try { BeanUtils.copyProperties(target, properties); //analyseRelationships(target); // } // catch (Throwable t) { // throw new SQLException("ResultSetUtils.getElement: " + // t.getMessage() + " - " + properties.toString()); //} } // end if return found; }
From source file:com.hangum.tadpole.engine.sql.util.resultset.ResultSetUtils.java
/** * ? table name//w ww . j av a 2s . co m * * @param isShowRowNum * @param rs * @return * @throws Exception */ public static Map<Integer, String> getColumnTableName(final UserDBDAO userDB, boolean isShowRowNum, ResultSet rs) throws Exception { Map<Integer, String> mapColumnName = new HashMap<Integer, String>(); int intStartIndex = 0; if (isShowRowNum) { intStartIndex++; mapColumnName.put(0, "#"); } ResultSetMetaData rsm = rs.getMetaData(); for (int i = 0; i < rsm.getColumnCount(); i++) { // if(userDB.getDBDefine() == DBDefine.POSTGRE_DEFAULT) { // PGResultSetMetaData pgsqlMeta = (PGResultSetMetaData)rsm; // mapColumnName.put(i+intStartIndex, pgsqlMeta.getBaseTableName(i+1)); // //// if(logger.isDebugEnabled()) logger.debug("Table name is " + pgsqlMeta.getBaseTableName(i+1)); // } else if (userDB.getDBDefine() == DBDefine.HIVE_DEFAULT || userDB.getDBDefine() == DBDefine.HIVE2_DEFAULT) { mapColumnName.put(i + intStartIndex, "Apache Hive is not support this method."); } else { if (rsm.getSchemaName(i + 1) == null || "".equals(rsm.getSchemaName(i + 1))) { // if(logger.isDebugEnabled()) logger.debug("Table name is " + rsm.getTableName(i+1) + ", schema name is " + rsm.getSchemaName(i+1)); mapColumnName.put(i + intStartIndex, rsm.getTableName(i + 1)); } else { mapColumnName.put(i + intStartIndex, rsm.getSchemaName(i + 1) + "." + rsm.getTableName(i + 1)); } } } return mapColumnName; }
From source file:br.bookmark.db.util.ResultSetUtils.java
/** * Return a ArrayList of beans populated from a ResultSet. * * @param resultSet The ResultSet whose parameters are to be used * to populate bean properties//w w w . jav a 2 s . c o m * @param target An instance of the bean to populate * @exception SQLException if an exception is thrown while setting * property values, populating the bean, or accessing the ResultSet */ public static Collection getCollection(Object target, ResultSet resultSet) throws Exception { // Check prerequisites if ((target == null) || (resultSet == null)) throw new GenericDAOException("getCollection: Null parameter"); // Acquire resultSet MetaData ResultSetMetaData metaData = resultSet.getMetaData(); int cols = metaData.getColumnCount(); // Create hashmap, sized to number of columns HashMap properties = new HashMap(cols, 1); // Use ArrayList to maintain ResultSet sequence ArrayList list = new ArrayList(); // Acquire target class Class factory = target.getClass(); // Scroll to next record and pump into hashmap while (resultSet.next()) { for (int i = 1; i <= cols; i++) { putEntry(properties, metaData, resultSet, i, target.getClass()); } Object bean = factory.newInstance(); BeanUtils.copyProperties(bean, properties); list.add(bean); properties.clear(); } // end while return ((Collection) list); }
From source file:edu.ucsb.nceas.MCTestCase.java
protected static Vector<Hashtable<String, Object>> dbSelect(String sqlStatement, String methodName) throws SQLException { Vector<Hashtable<String, Object>> resultVector = new Vector<Hashtable<String, Object>>(); DBConnectionPool connPool = DBConnectionPool.getInstance(); DBConnection dbconn = DBConnectionPool.getDBConnection(methodName); int serialNumber = dbconn.getCheckOutSerialNumber(); PreparedStatement pstmt = null; debug("Selecting from db: " + sqlStatement); pstmt = dbconn.prepareStatement(sqlStatement); pstmt.execute();//from ww w. ja v a2s . c om ResultSet resultSet = pstmt.getResultSet(); ResultSetMetaData rsMetaData = resultSet.getMetaData(); int numColumns = rsMetaData.getColumnCount(); debug("Number of data columns: " + numColumns); while (resultSet.next()) { Hashtable<String, Object> hashTable = new Hashtable<String, Object>(); for (int i = 1; i <= numColumns; i++) { if (resultSet.getObject(i) != null) { hashTable.put(rsMetaData.getColumnName(i), resultSet.getObject(i)); } } debug("adding data row to result vector"); resultVector.add(hashTable); } resultSet.close(); pstmt.close(); DBConnectionPool.returnDBConnection(dbconn, serialNumber); return resultVector; }
From source file:com.thinkbiganalytics.util.JdbcCommon.java
/** * converts a JDBC result set to an Avro stream * * @param rs The result set of the JDBC query * @param outStream The output stream to for the Avro formatted records * @return the number of rows converted to Avro format * @throws SQLException if errors occur while reading data from the database * @throws IOException if unable to convert to Avro format */// www .j a v a 2 s . com public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream) throws SQLException, IOException { final Schema schema = createSchema(rs); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY || javaSqlType == BLOB || javaSqlType == CLOB) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof BigDecimal || value instanceof BigInteger) { // Avro can't handle BigDecimal and BigInteger as numbers - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof Number || value instanceof Boolean) { rec.put(i - 1, value); } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; } return nrOfRows; } }
From source file:com.viettel.ws.client.JDBCUtil.java
/** * Create document using DOM api/* w ww . ja va 2 s . c om*/ * * @param rs a result set * @param doc a input document for append content * @param rsName name of the appended element * @return a document after append content * @throws ParserConfigurationException If error when parse XML string * @throws SQLException If error when read data from database */ public static Document add2Document1(ResultSet rs1, ResultSet rs2, Document doc, String rsName) throws ParserConfigurationException, SQLException { if (rs1 == null && rs2 == null) { return doc; } //Get root element Element root = doc.getDocumentElement(); Element rsElement = doc.createElement(rsName); root.appendChild(rsElement); if (rs1 != null) { ResultSetMetaData rsmd = rs1.getMetaData(); int colCount = rsmd.getColumnCount(); while (rs1.next()) { Element row = doc.createElement("Row"); rsElement.appendChild(row); try { for (int i = 1; i <= colCount; i++) { String columnName = rsmd.getColumnName(i); Object value = rs1.getObject(i); if (value == null) { value = ""; } Element node = doc.createElement(columnName); node.appendChild(doc.createTextNode(value.toString())); row.appendChild(node); } } catch (Exception ex) { LogUtil.addLog(ex);//binhnt sonar a160901 // logger.error(e, e); } } } if (rs2 != null) { ResultSetMetaData rsmd = rs2.getMetaData(); int colCount = rsmd.getColumnCount(); while (rs2.next()) { Element row = doc.createElement("Row"); rsElement.appendChild(row); try { for (int i = 1; i <= colCount; i++) { String columnName = rsmd.getColumnName(i); Object value = rs2.getObject(i); if (value == null) { value = ""; } Element node = doc.createElement(columnName); node.appendChild(doc.createTextNode(value.toString())); row.appendChild(node); } } catch (Exception ex) { LogUtil.addLog(ex);//binhnt sonar a160901 // logger.error(e, e); } } } return doc; }
From source file:com.healthmarketscience.jackcess.util.ImportUtil.java
/** * Copy an existing JDBC ResultSet into a new (or optionally existing) table * in this database./* w w w .jav a2s. c om*/ * * @param name Name of the new table to create * @param source ResultSet to copy from * @param filter valid import filter * @param useExistingTable if {@code true} use current table if it already * exists, otherwise, create new table with unique * name * * @return the name of the imported table * * @see Builder */ public static String importResultSet(ResultSet source, Database db, String name, ImportFilter filter, boolean useExistingTable) throws SQLException, IOException { ResultSetMetaData md = source.getMetaData(); name = TableBuilder.escapeIdentifier(name); Table table = null; if (!useExistingTable || ((table = db.getTable(name)) == null)) { List<ColumnBuilder> columns = toColumns(md); table = createUniqueTable(db, name, columns, md, filter); } List<Object[]> rows = new ArrayList<Object[]>(COPY_TABLE_BATCH_SIZE); int numColumns = md.getColumnCount(); while (source.next()) { Object[] row = new Object[numColumns]; for (int i = 0; i < row.length; i++) { row[i] = source.getObject(i + 1); } row = filter.filterRow(row); if (row == null) { continue; } rows.add(row); if (rows.size() == COPY_TABLE_BATCH_SIZE) { table.addRows(rows); rows.clear(); } } if (rows.size() > 0) { table.addRows(rows); } return table.getName(); }