List of usage examples for java.sql ResultSetMetaData getColumnName
String getColumnName(int column) throws SQLException;
From source file:com.datatorrent.contrib.enrich.JDBCLoader.java
protected ArrayList<Object> getDataFrmResult(Object result) throws RuntimeException { try {//www .ja v a2 s. c o m ResultSet resultSet = (ResultSet) result; if (resultSet.next()) { ResultSetMetaData rsdata = resultSet.getMetaData(); // If the includefields is empty, populate it from ResultSetMetaData if (CollectionUtils.isEmpty(includeFieldInfo)) { if (includeFieldInfo == null) { includeFieldInfo = new ArrayList<>(); } for (int i = 1; i <= rsdata.getColumnCount(); i++) { String columnName = rsdata.getColumnName(i); // TODO: Take care of type conversion. includeFieldInfo.add(new FieldInfo(columnName, columnName, FieldInfo.SupportType.OBJECT)); } } ArrayList<Object> res = new ArrayList<Object>(); for (FieldInfo f : includeFieldInfo) { res.add(getConvertedData(resultSet.getObject(f.getColumnName()), f)); } return res; } else { return null; } } catch (SQLException e) { throw new RuntimeException(e); } }
From source file:com.mirth.connect.connectors.jdbc.DatabaseConnectorService.java
public Object invoke(String channelId, String method, Object object, String sessionsId) throws Exception { if (method.equals("getInformationSchema")) { // method 'getInformationSchema' will return Set<Table> Connection connection = null; try {//from w ww . j a va 2 s .c om DatabaseConnectionInfo databaseConnectionInfo = (DatabaseConnectionInfo) object; String driver = databaseConnectionInfo.getDriver(); String address = replacer.replaceValues(databaseConnectionInfo.getUrl(), channelId); String user = replacer.replaceValues(databaseConnectionInfo.getUsername(), channelId); String password = replacer.replaceValues(databaseConnectionInfo.getPassword(), channelId); // Although these properties are not persisted, they used by the JdbcConnectorService String tableNamePatternExp = databaseConnectionInfo.getTableNamePatternExpression(); String selectLimit = databaseConnectionInfo.getSelectLimit(); String schema = null; Class.forName(driver); int oldLoginTimeout = DriverManager.getLoginTimeout(); DriverManager.setLoginTimeout(30); connection = DriverManager.getConnection(address, user, password); DriverManager.setLoginTimeout(oldLoginTimeout); DatabaseMetaData dbMetaData = connection.getMetaData(); // the sorted set to hold the table information SortedSet<Table> tableInfoList = new TreeSet<Table>(); // Use a schema if the user name matches one of the schemas. // Fix for Oracle: MIRTH-1045 ResultSet schemasResult = null; try { schemasResult = dbMetaData.getSchemas(); while (schemasResult.next()) { String schemaResult = schemasResult.getString(1); if (user.equalsIgnoreCase(schemaResult)) { schema = schemaResult; } } } finally { if (schemasResult != null) { schemasResult.close(); } } // based on the table name pattern, attempt to retrieve the table information List<String> tablePatternList = translateTableNamePatternExpression(tableNamePatternExp); List<String> tableNameList = new ArrayList<String>(); // go through each possible table name patterns and query for the tables for (String tableNamePattern : tablePatternList) { ResultSet rs = null; try { rs = dbMetaData.getTables(null, schema, tableNamePattern, TABLE_TYPES); // based on the result set, loop through to store the table name so it can be used to // retrieve the table's column information while (rs.next()) { tableNameList.add(rs.getString("TABLE_NAME")); } } finally { if (rs != null) { rs.close(); } } } // for each table, grab their column information for (String tableName : tableNameList) { ResultSet rs = null; ResultSet backupRs = null; boolean fallback = false; try { // apparently it's much more efficient to use ResultSetMetaData to retrieve // column information. So each driver is defined with their own unique SELECT // statement to query the table columns and use ResultSetMetaData to retrieve // the column information. If driver is not defined with the select statement // then we'll define to the generic method of getting column information, but // this could be extremely slow List<Column> columnList = new ArrayList<Column>(); if (StringUtils.isEmpty(selectLimit)) { logger.debug("No select limit is defined, using generic method"); rs = dbMetaData.getColumns(null, null, tableName, null); // retrieve all relevant column information for (int i = 0; rs.next(); i++) { Column column = new Column(rs.getString("COLUMN_NAME"), rs.getString("TYPE_NAME"), rs.getInt("COLUMN_SIZE")); columnList.add(column); } } else { logger.debug( "Select limit is defined, using specific select query : '" + selectLimit + "'"); // replace the '?' with the appropriate schema.table name, and use ResultSetMetaData to // retrieve column information final String schemaTableName = StringUtils.isNotEmpty(schema) ? schema + "." + tableName : tableName; final String queryString = selectLimit.trim().replaceAll("\\?", schemaTableName); Statement statement = connection.createStatement(); try { rs = statement.executeQuery(queryString); ResultSetMetaData rsmd = rs.getMetaData(); // retrieve all relevant column information for (int i = 1; i < rsmd.getColumnCount() + 1; i++) { Column column = new Column(rsmd.getColumnName(i), rsmd.getColumnTypeName(i), rsmd.getPrecision(i)); columnList.add(column); } } catch (SQLException sqle) { logger.info("Failed to execute '" + queryString + "', fall back to generic approach to retrieve column information"); fallback = true; } finally { if (statement != null) { statement.close(); } } // failed to use selectLimit method, so we need to fall back to generic // if this generic approach fails, then there's nothing we can do if (fallback) { // Re-initialize in case some columns were added before failing columnList = new ArrayList<Column>(); logger.debug("Using fallback method for retrieving columns"); backupRs = dbMetaData.getColumns(null, null, tableName, null); // retrieve all relevant column information for (int i = 0; backupRs.next(); i++) { Column column = new Column(backupRs.getString("COLUMN_NAME"), backupRs.getString("TYPE_NAME"), backupRs.getInt("COLUMN_SIZE")); columnList.add(column); } } } // create table object and add to the list of table definitions Table table = new Table(tableName, columnList); tableInfoList.add(table); } finally { if (rs != null) { rs.close(); } if (backupRs != null) { backupRs.close(); } } } return tableInfoList; } catch (Exception e) { throw new Exception("Could not retrieve database tables and columns.", e); } finally { if (connection != null) { connection.close(); } } } return null; }
From source file:hmp.HMPClassiferSummary.java
private void run(String[] args) throws ClassNotFoundException, FileNotFoundException, SQLException, IOException, ParseException { /*/*w w w. j a va 2s. c om*/ * Get command line options */ cli = getOptions(args); /* * Determine if rdp results is file or directory */ File rdpFile = new File(cli.getOptionValue("rdpDir")); boolean isDirectory = false; if (rdpFile.isDirectory()) { isDirectory = true; } /* * Setup database connections * */ MySQLConnector connector = new MySQLConnector("localhost", "biosql", "biosql", "biosql"); conn = connector.getConnection(); connector = new MySQLConnector("localhost", cli.getOptionValue("db"), cli.getOptionValue("dbUser"), cli.getOptionValue("dbPassword")); microbiome = connector.getConnection(); /* * create output directory */ createOutputDir("summary"); /* * Determine which columns are available for rdp levels */ rdpLevelCols = new ArrayList<String>(); ResultSet rsCol = microbiome.createStatement().executeQuery("select * from rdp_result_data limit 10"); ResultSetMetaData rsMeta = rsCol.getMetaData(); int colNumber = rsMeta.getColumnCount(); for (int i = 1; i < colNumber + 1; i++) { rdpLevelCols.add(rsMeta.getColumnName(i)); } /* * process file/files */ PrintWriter writer = new PrintWriter(new FileWriter("summary_all.txt")); writer.println("sampleid|level|taxa|count|len|sd|gc|sd|skew"); if (isDirectory) { File[] files = getFiles(rdpFile); int count = 0; for (int i = 0; i < files.length; i++) { if (count >= 0) { System.out.println("===PROCESSING FILE " + i + " ==="); processReads(files[i], writer); count++; } } } else { processReads(rdpFile, writer); } writer.close(); System.out.println("Done!"); }
From source file:org.apache.sqoop.connector.jdbc.oracle.util.OracleQueries.java
public static List<Column> getColDataTypes(Connection connection, OracleTable table, List<String> colNames) throws SQLException { List<Column> result = new ArrayList<Column>(); StringBuilder sb = new StringBuilder(); sb.append("SELECT "); for (int idx = 0; idx < colNames.size(); idx++) { if (idx > 0) { sb.append(","); }//from www .j a v a 2s. co m sb.append(colNames.get(idx)); } sb.append(String.format(" FROM %s WHERE 0=1", table.toString())); String sql = sb.toString(); PreparedStatement statement = connection.prepareStatement(sql); try { ResultSetMetaData metadata = statement.getMetaData(); int numCols = metadata.getColumnCount(); for (int i = 1; i < numCols + 1; i++) { String colName = metadata.getColumnName(i); Column oracleColumn = OracleSqlTypesUtils.sqlTypeToSchemaType(metadata.getColumnType(i), colName, metadata.getPrecision(i), metadata.getScale(i)); result.add(oracleColumn); } } finally { statement.close(); } return result; }
From source file:com.vangent.hieos.logbrowser.util.TableModel.java
public TableModel(String sqlRequest, Map fieldsAndFormats, Connection c) throws SQLException { this.fieldsAndFormats = fieldsAndFormats; ResultSet statementResult;/*from w w w . jav a 2 s .co m*/ log.debug("TABLE_MODEL_SYSLOG: database connection created\n"); Statement statement = c.createStatement(); log.debug("TABLE_MODEL_SYSLOG: statement created\n"); statementResult = statement.executeQuery(sqlRequest); log.debug("TABLE_MODEL_SYSLOG: Query executed\n"); log.debug("<--" + new GregorianCalendar().getTime() + " TableModel close Database \n"); ResultSetMetaData metaData = statementResult.getMetaData(); int columnCount = metaData.getColumnCount(); dataVector = new Vector<Vector<Object>>(); headerVector = new Vector<String>(); log.debug("TABLE_MODEL_SYSLOG: colomn count : " + columnCount + "\n"); log.debug("TABLE_MODEL_SYSLOG: Table--------------------------------------"); for (int i = 0; i < columnCount; i++) { headerVector.add(metaData.getColumnName((i + 1))); log.debug(metaData.getColumnName((i + 1)) + "\t"); } while (statementResult.next()) { Vector<Object> tmp = new Vector<Object>(columnCount); for (int j = 0; j < columnCount; j++) { String columnName = getColumnName(j); Object columnData = statementResult.getObject(columnName); columnData = getFormattedData(columnName, columnData); tmp.add(columnData); log.debug(columnData + "\t"); } log.debug("\n"); dataVector.add(tmp); } }
From source file:org.apache.kylin.rest.util.HiveReroute.java
private void extractColumnMetadata(ResultSet resultSet, List<SelectedColumnMeta> columnMetas) throws SQLException { ResultSetMetaData metaData = null; int columnCount = 0; metaData = resultSet.getMetaData();//from w w w .ja v a 2s . c om columnCount = metaData.getColumnCount(); // fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false, metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null, metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false)); } }
From source file:org.atricore.idbus.idojos.dbidentitystore.IdentityDAO.java
protected SSONameValuePair[] fecthSSONameValuePairsFromCols(ResultSet rs) throws SQLException { List<SSONameValuePair> props = new ArrayList<SSONameValuePair>(); ResultSetMetaData rsmd = rs.getMetaData(); int cols = rsmd.getColumnCount(); while (rs.next()) { for (int i = 1; i <= cols; i++) { String cName = rsmd.getColumnName(i); String cValue = rs.getString(i); SSONameValuePair prop = new SSONameValuePair(cName, cValue); props.add(prop);/*from w ww. j av a 2s .co m*/ } } return props.toArray(new SSONameValuePair[props.size()]); }
From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java
private void doExportTable(final TransformerHandler handler, final String dbSchema, final Connection conn, final String tableName, final String whereClause) throws SQLException, SAXException { LOG.debug("Export table {}", tableName); AttributesImpl attrs = new AttributesImpl(); PreparedStatement stmt = null; ResultSet rs = null;//from w w w . j a v a 2 s . c om try { StringBuilder orderBy = new StringBuilder(); DatabaseMetaData meta = conn.getMetaData(); // ------------------------------------ // retrieve foreign keys (linked to the same table) to perform an ordered select ResultSet pkeyRS = null; try { pkeyRS = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName); while (pkeyRS.next()) { if (tableName.equals(pkeyRS.getString("PKTABLE_NAME"))) { String columnName = pkeyRS.getString("FKCOLUMN_NAME"); if (columnName != null) { if (orderBy.length() > 0) { orderBy.append(","); } orderBy.append(columnName); } } } } finally { if (pkeyRS != null) { try { pkeyRS.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } } // retrieve primary keys to perform an ordered select try { pkeyRS = meta.getPrimaryKeys(null, null, tableName); while (pkeyRS.next()) { String columnName = pkeyRS.getString("COLUMN_NAME"); if (columnName != null) { if (orderBy.length() > 0) { orderBy.append(","); } orderBy.append(columnName); } } } finally { if (pkeyRS != null) { try { pkeyRS.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } } // ------------------------------------ StringBuilder query = new StringBuilder(); query.append("SELECT * FROM ").append(tableName).append(" a"); if (StringUtils.isNotBlank(whereClause)) { query.append(" WHERE ").append(whereClause); } if (orderBy.length() > 0) { query.append(" ORDER BY ").append(orderBy); } stmt = conn.prepareStatement(query.toString()); rs = stmt.executeQuery(); while (rs.next()) { attrs.clear(); final ResultSetMetaData rsMeta = rs.getMetaData(); for (int i = 0; i < rsMeta.getColumnCount(); i++) { final String columnName = rsMeta.getColumnName(i + 1); final Integer columnType = rsMeta.getColumnType(i + 1); // Retrieve value taking care of binary values. String value = getValues(rs, columnName, columnType); if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName) || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) { attrs.addAttribute("", "", columnName, "CDATA", value); } } handler.startElement("", "", tableName, attrs); handler.endElement("", "", tableName); LOG.debug("Add record {}", attrs); } } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } if (stmt != null) { try { stmt.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } } }
From source file:org.apache.zeppelin.postgresql.PostgreSqlInterpreter.java
private InterpreterResult executeSql(String sql) { try {/*from w w w. ja va2 s. co m*/ if (exceptionOnConnect != null) { return new InterpreterResult(Code.ERROR, exceptionOnConnect.getMessage()); } currentStatement = getJdbcConnection().createStatement(); currentStatement.setMaxRows(maxResult); StringBuilder msg = null; boolean isTableType = false; if (containsIgnoreCase(sql, EXPLAIN_PREDICATE)) { msg = new StringBuilder(); } else { msg = new StringBuilder(TABLE_MAGIC_TAG); isTableType = true; } ResultSet resultSet = null; try { boolean isResultSetAvailable = currentStatement.execute(sql); if (isResultSetAvailable) { resultSet = currentStatement.getResultSet(); ResultSetMetaData md = resultSet.getMetaData(); for (int i = 1; i < md.getColumnCount() + 1; i++) { if (i > 1) { msg.append(TAB); } msg.append(replaceReservedChars(isTableType, md.getColumnName(i))); } msg.append(NEWLINE); int displayRowCount = 0; while (resultSet.next() && displayRowCount < getMaxResult()) { for (int i = 1; i < md.getColumnCount() + 1; i++) { msg.append(replaceReservedChars(isTableType, resultSet.getString(i))); if (i != md.getColumnCount()) { msg.append(TAB); } } msg.append(NEWLINE); displayRowCount++; } } else { // Response contains either an update count or there are no results. int updateCount = currentStatement.getUpdateCount(); msg.append(UPDATE_COUNT_HEADER).append(NEWLINE); msg.append(updateCount).append(NEWLINE); // In case of update event (e.g. isResultSetAvailable = false) update the completion // meta-data. if (sqlCompleter != null) { sqlCompleter.updateDataModelMetaData(getJdbcConnection()); } } } finally { try { if (resultSet != null) { resultSet.close(); } currentStatement.close(); } finally { currentStatement = null; } } return new InterpreterResult(Code.SUCCESS, msg.toString()); } catch (SQLException ex) { logger.error("Cannot run " + sql, ex); return new InterpreterResult(Code.ERROR, ex.getMessage()); } }
From source file:com.p5solutions.core.jpa.orm.rowbinder.EntityRowBinder.java
protected int findColumnIndex(ResultSetMetaData metaData, String columnName) throws SQLException { // TODO fix this or cache it somehow, so it doesn't iterate each query for (int i = 1; i <= metaData.getColumnCount(); i++) { String name = metaData.getColumnName(i); if (name.toUpperCase().equals(columnName)) { return i; }//from w w w . j av a 2 s . com } return -1; }