List of usage examples for java.sql ResultSetMetaData getColumnName
String getColumnName(int column) throws SQLException;
From source file:org.onebusaway.nyc.webapp.actions.admin.ReportingAction.java
public String submit() throws Exception { Session session = null;/*from ww w. j a v a 2 s . c om*/ Connection connection = null; Statement statement = null; ResultSet rs = null; try { session = sessionFactory.openSession(); connection = getConnectionFromSession(session); connection.setReadOnly(true); statement = connection.createStatement(); rs = statement.executeQuery(query); } catch (Exception e) { // make sure everything is closed if an exception was thrown try { rs.close(); } catch (Exception ex) { } try { statement.close(); } catch (Exception ex) { } try { connection.close(); } catch (Exception ex) { } try { session.close(); } catch (Exception ex) { } reportError = e.getMessage(); // not really "success", but we'll use the same template with the error displayed return SUCCESS; } // final so the output generator thread can close it final Session finalSession = session; final Connection finalConnection = connection; final Statement finalStatement = statement; final ResultSet finalRS = rs; final PipedInputStream pipedInputStream = new PipedInputStream(); final PipedOutputStream pipedOutputStream = new PipedOutputStream(pipedInputStream); executorService.execute(new Runnable() { @Override public void run() { try { // column labels ResultSetMetaData metaData = finalRS.getMetaData(); int columnCount = metaData.getColumnCount(); for (int i = 0; i < columnCount; i++) { String columnName = metaData.getColumnName(i + 1); byte[] bytes = columnName.getBytes(); if (i > 0) pipedOutputStream.write(columnDelimiter); pipedOutputStream.write(bytes); } pipedOutputStream.write(newline); // column values while (finalRS.next()) { for (int i = 0; i < columnCount; i++) { String value = finalRS.getString(i + 1); if (value == null) value = "null"; else { // remove returns value = value.replaceAll("\n|\r", ""); } byte[] valueBytes = value.getBytes(); if (i > 0) pipedOutputStream.write(columnDelimiter); pipedOutputStream.write(valueBytes); } pipedOutputStream.write(newline); } } catch (Exception e) { } finally { try { pipedOutputStream.close(); } catch (IOException e) { } try { finalRS.close(); } catch (SQLException e) { } try { finalStatement.close(); } catch (SQLException e) { } try { finalConnection.close(); } catch (SQLException e) { } try { finalSession.close(); } catch (Exception e) { } } } }); // the input stream will get populated by the piped output stream inputStream = pipedInputStream; return "download"; }
From source file:org.apache.phoenix.hive.mapreduce.PhoenixResultWritable.java
@Override public void readFields(ResultSet resultSet) throws SQLException { ResultSetMetaData rsmd = resultSet.getMetaData(); if (columnCount == -1) { this.columnCount = rsmd.getColumnCount(); }/*from ww w. ja v a2 s .com*/ rowMap.clear(); for (int i = 0; i < columnCount; i++) { Object value = resultSet.getObject(i + 1); String columnName = rsmd.getColumnName(i + 1); String mapName = columnMap.get(columnName); if (mapName != null) { columnName = mapName; } rowMap.put(columnName, value); } // Adding row__id column. if (isTransactional) { rowKeyMap.clear(); for (String pkColumn : primaryKeyColumnList) { rowKeyMap.put(pkColumn, rowMap.get(pkColumn)); } } }
From source file:org.apache.cocoon.components.flow.javascript.ScriptableResult.java
ScriptableResult(Scriptable scope, ResultSet rs, int startRow, int maxRows) throws SQLException, PropertyException, NotAFunctionException, JavaScriptException { Context cx = Context.getCurrentContext(); Scriptable rowMap = cx.newObject(scope, "Array"); put("rows", this, rowMap); Scriptable rowByIndex = cx.newObject(scope, "Array"); put("rowsByIndex", this, rowByIndex); ResultSetMetaData rsmd = rs.getMetaData(); int noOfColumns = rsmd.getColumnCount(); // Create the column name array Scriptable columnNames = cx.newObject(scope, "Array", new Object[] { new Integer(noOfColumns) }); put("columnNames", this, columnNames); for (int i = 1; i <= noOfColumns; i++) { columnNames.put(i - 1, columnNames, rsmd.getColumnName(i)); }/*from w ww . j a v a 2 s . com*/ // Throw away all rows upto startRow for (int i = 0; i < startRow; i++) { rs.next(); } // Process the remaining rows upto maxRows int processedRows = 0; int index = 0; boolean isLimited = false; while (rs.next()) { if ((maxRows != -1) && (processedRows == maxRows)) { isLimited = true; break; } Scriptable columns = cx.newObject(scope, "Array", new Object[] { new Integer(noOfColumns) }); Scriptable columnMap = new Row(); columnMap.setParentScope(columns.getParentScope()); columnMap.setPrototype(getObjectPrototype(scope)); // JDBC uses 1 as the lowest index! for (int i = 1; i <= noOfColumns; i++) { Object value = rs.getObject(i); if (rs.wasNull()) { value = null; } columns.put(i - 1, columns, value); columnMap.put(rsmd.getColumnName(i), columnMap, value); } rowMap.put(index, rowMap, columnMap); rowByIndex.put(index, rowByIndex, columns); processedRows++; index++; } put("rowCount", this, new Integer(index)); put("isLimitedByMaxRows", this, BooleanUtils.toBooleanObject(isLimited)); }
From source file:com.mvdb.etl.dao.impl.JdbcGenericDAO.java
@Override public DataHeader fetchAll2(File snapshotDirectory, Timestamp modifiedAfter, String objectName, final String keyName, final String updateTimeColumnName) { File objectFile = new File(snapshotDirectory, "data-" + objectName + ".dat"); final GenericConsumer genericConsumer = new SequenceFileConsumer(objectFile); final DataHeader dataHeader = new DataHeader(); String sql = "SELECT * FROM " + objectName + " o where o.update_time >= ?"; getJdbcTemplate().query(sql, new Object[] { modifiedAfter }, new RowCallbackHandler() { @Override/*from w w w . j a v a2 s .com*/ public void processRow(ResultSet row) throws SQLException { final Map<String, Object> dataMap = new HashMap<String, Object>(); ResultSetMetaData rsm = row.getMetaData(); int columnCount = rsm.getColumnCount(); for (int column = 1; column < (columnCount + 1); column++) { dataMap.put(rsm.getColumnName(column), row.getObject(rsm.getColumnLabel(column))); } DataRecord dataRecord = new GenericDataRecord(dataMap, keyName, globalMvdbKeyMaker, updateTimeColumnName, new GlobalMvdbUpdateTimeMaker()); genericConsumer.consume(dataRecord); dataHeader.incrementCount(); } }); genericConsumer.flushAndClose(); writeDataHeader(dataHeader, objectName, snapshotDirectory); return dataHeader; }
From source file:org.apache.tika.eval.ComparerBatchTest.java
private void debugShowColumns(String table) throws Exception { Statement st = conn.createStatement(); String sql = "select * from " + table; ResultSet rs = st.executeQuery(sql); ResultSetMetaData m = rs.getMetaData(); for (int i = 1; i <= m.getColumnCount(); i++) { System.out.println(i + " : " + m.getColumnName(i)); }//from w w w. j av a 2 s. c o m st.close(); }
From source file:com.netspective.axiom.sql.ResultSetUtils.java
public Map getResultSetSingleRowAsMap(ResultSet rs, boolean useLabelAsKey) throws SQLException { Map result = new HashMap(); if (rs.next()) { ResultSetMetaData rsmd = rs.getMetaData(); int colsCount = rsmd.getColumnCount(); for (int i = 1; i <= colsCount; i++) { result.put(/* w w w . j a v a2s . c o m*/ useLabelAsKey ? rsmd.getColumnLabel(i).toLowerCase() : rsmd.getColumnName(i).toLowerCase(), rs.getObject(i)); } return result; } else return null; }
From source file:org.acmsl.queryj.customsql.handlers.customsqlvalidation.RetrieveResultPropertiesHandler.java
/** * Creates a property from given {@link ResultSetMetaData}. * @param metadata the result set metadata. * @param index the index.//from w w w. j av a 2 s .c o m * @return the associated {@link Property}. * @throws SQLException if the property information is unavailable. */ @NotNull protected Property<String> createPropertyFrom(@NotNull final ResultSetMetaData metadata, final int index) throws SQLException { @NotNull final String t_strColumnName = metadata.getColumnName(index); @NotNull final String t_strType = metadata.getColumnTypeName(index); final boolean t_bNullable = (metadata.isNullable(index) == ResultSetMetaData.columnNullable); return new PropertyElement<>(t_strColumnName, t_strColumnName, index, t_strType, t_bNullable); }
From source file:com.novartis.opensource.yada.format.ResultSetResultJSONConverter.java
/** * Converts data from a {@link java.sql.ResultSet} into a {@link JSONArray} containing * one {@link JSONObject} per row/*ww w .j a va 2s. c om*/ * @param rs the result set containing the data to convert to JSON * @return a json array containing the data * @throws SQLException when iteration or access to {@code rs} fails */ protected JSONArray getJSONRows(ResultSet rs) throws SQLException { JSONArray rows = new JSONArray(); ResultSetMetaData rsmd = rs.getMetaData(); if (rsmd == null) rsmd = new RowSetMetaDataImpl(); List<String> convertedResult = new ArrayList<>(); while (rs.next()) { JSONObject row = new JSONObject(); String colValue; for (int i = 1; i <= rsmd.getColumnCount(); i++) { String origColName = rsmd.getColumnName(i); if (!origColName.toLowerCase().equals(JDBCAdaptor.ROWNUM_ALIAS)) { boolean harmonize = isHarmonized(); boolean prune = harmonize ? ((JSONObject) this.harmonyMap).has(Harmonizer.PRUNE) && ((JSONObject) this.harmonyMap).getBoolean(Harmonizer.PRUNE) : false; String col = origColName; if (harmonize) { if (((JSONObject) this.harmonyMap).has(origColName)) { col = ((JSONObject) this.harmonyMap).getString(origColName); } else if (prune) { col = ""; } } //TODO handle empty result set more intelligently // OLD WAY adds headers to empty object when rs is empty if (!"".equals(col)) { if (null == rs.getString(origColName) || NULL.equals(rs.getString(origColName))) { colValue = NULL_REPLACEMENT; } else { colValue = rs.getString(origColName); } row.put(col, colValue); } } } rows.put(row); convertedResult.add(row.toString()); } if (rows.length() > 0) { for (String key : JSONObject.getNames(rows.getJSONObject(0))) { getYADAQueryResult().addConvertedHeader(key); } getYADAQueryResult().getConvertedResults().add(convertedResult); } return rows; }
From source file:com.thinkbiganalytics.schema.QueryRunner.java
/** * Initializes the query result with the specified metadata. * * @param queryResult the query result to initialize * @param rsMetaData the result set metadata for the query * @throws SQLException if the metadata is not available *//*from w w w .j a v a 2 s. co m*/ private void initQueryResult(@Nonnull final DefaultQueryResult queryResult, @Nonnull final ResultSetMetaData rsMetaData) throws SQLException { final List<QueryResultColumn> columns = new ArrayList<>(); final Map<String, Integer> displayNameMap = new HashMap<>(); for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { final DefaultQueryResultColumn column = new DefaultQueryResultColumn(); column.setField(rsMetaData.getColumnName(i)); String displayName = rsMetaData.getColumnLabel(i); column.setHiveColumnLabel(displayName); //remove the table name if it exists displayName = StringUtils.contains(displayName, ".") ? StringUtils.substringAfterLast(displayName, ".") : displayName; Integer count = 0; if (displayNameMap.containsKey(displayName)) { count = displayNameMap.get(displayName); count++; } displayNameMap.put(displayName, count); column.setDisplayName(displayName + "" + (count > 0 ? count : "")); column.setTableName(StringUtils.substringAfterLast(rsMetaData.getColumnName(i), ".")); column.setDataType(ParserHelper.sqlTypeToHiveType(rsMetaData.getColumnType(i))); column.setNativeDataType(rsMetaData.getColumnTypeName(i)); columns.add(column); } queryResult.setColumns(columns); }
From source file:org.apache.hadoop.hive.jdbc.storagehandler.AtsdDBRecordReader.java
private ResultSet replaceDotsInColumnNames(ResultSet resultSet) throws SQLException { ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); if (columnCount > 0) { CachedRowSetImpl crs = new CachedRowSetImpl(); crs.populate(resultSet);//from www . j av a 2s .com RowSetMetaDataImpl rwsm = new RowSetMetaDataImpl(); rwsm.setColumnCount(columnCount); for (int i = 1; i <= columnCount; i++) { String columnName = metaData.getColumnName(i); if (columnName.contains(".")) { columnName = columnName.replaceAll("\\.", "\\$"); } rwsm.setColumnName(i, columnName); rwsm.setColumnLabel(i, metaData.getColumnLabel(i)); rwsm.setCatalogName(i, metaData.getCatalogName(i)); rwsm.setColumnType(i, metaData.getColumnType(i)); rwsm.setColumnTypeName(i, metaData.getColumnTypeName(i)); rwsm.setSchemaName(i, metaData.getSchemaName(i)); rwsm.setTableName(i, metaData.getTableName(i)); } crs.setMetaData(rwsm); return crs; } return resultSet; }