List of usage examples for java.sql ResultSetMetaData getColumnCount
int getColumnCount() throws SQLException;
ResultSet
object. From source file:fll.web.developer.QueryHandler.java
@SuppressFBWarnings(value = { "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" }, justification = "Executing query from user") @Override/*from w w w. ja v a2 s.c o m*/ protected void processRequest(final HttpServletRequest request, final HttpServletResponse response, final ServletContext application, final HttpSession session) throws IOException, ServletException { final List<String> columnNames = new LinkedList<String>(); final List<Map<String, String>> data = new LinkedList<Map<String, String>>(); String error = null; DataSource datasource = ApplicationAttributes.getDataSource(application); Statement stmt = null; ResultSet rs = null; Connection connection = null; try { connection = datasource.getConnection(); final String query = request.getParameter(QUERY_PARAMETER); stmt = connection.createStatement(); rs = stmt.executeQuery(query); ResultSetMetaData meta = rs.getMetaData(); for (int columnNum = 1; columnNum <= meta.getColumnCount(); ++columnNum) { columnNames.add(meta.getColumnName(columnNum).toLowerCase()); } while (rs.next()) { final Map<String, String> row = new HashMap<String, String>(); for (final String columnName : columnNames) { final String value = rs.getString(columnName); row.put(columnName, value); } data.add(row); } } catch (final SQLException e) { error = e.getMessage(); LOGGER.error("Exception doing developer query", e); } finally { SQLFunctions.close(rs); SQLFunctions.close(stmt); SQLFunctions.close(connection); } response.setContentType("application/json"); response.setCharacterEncoding(Utilities.DEFAULT_CHARSET.name()); final ResultData result = new ResultData(columnNames, data, error); final ObjectMapper jsonMapper = new ObjectMapper(); final Writer writer = response.getWriter(); jsonMapper.writeValue(writer, result); }
From source file:com.taobao.tddl.common.jdbc.MetaDataQueryForMapHandler.java
/** * @param tableName /* w w w . jav a 2s. c o m*/ */ private void initMetaData(String tableName, ResultSetMetaData rsmd) { try { int columnCount = rsmd.getColumnCount(); String[] columnNames = new String[columnCount]; ColumnMetaData[] columns = new ColumnMetaData[columnCount]; for (int i = 1; i <= columnCount; i++) { columnNames[i - 1] = rsmd.getColumnName(i).toLowerCase(); int sqlType = rsmd.getColumnType(i); if (sqlType == java.sql.Types.DATE) { sqlType = java.sql.Types.TIMESTAMP; } int scale = rsmd.getScale(i); String className = rsmd.getColumnClassName(i); columns[i - 1] = new ColumnMetaData(sqlType, scale, className); } TableMetaData tmd = new TableMetaData(columnNames, columns); this.tableMetaDatas.putIfAbsent(tableName, tmd); } catch (SQLException e) { log.warn("Fetch Metadata from resultSet failed.", e); } }
From source file:net.sf.jdmf.data.sources.jdbc.JDBCDataSource.java
/** * @see net.sf.jdmf.data.sources.DataSource#getAttributes() *///from w w w.jav a 2 s . c om public Map<String, List<Comparable>> getAttributes() { Map<String, List<Comparable>> attributes = new LinkedHashMap<String, List<Comparable>>(); try { Connection connection = DriverManager.getConnection(connectionString, userName, password); Statement statement = connection.createStatement(); for (String query : queries) { ResultSet resultSet = statement.executeQuery(query); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); for (int i = 1; i <= columnCount; ++i) { String attributeName = metaData.getColumnName(i); List<Comparable> attributeValues = new ArrayList<Comparable>(); attributes.put(attributeName, attributeValues); } while (resultSet.next()) { for (int i = 1; i <= columnCount; ++i) { List<Comparable> attributeValues = attributes.get(metaData.getColumnName(i)); attributeValues.add(getValueAsComparable(resultSet.getObject(i))); } } resultSet.close(); } statement.close(); connection.close(); } catch (SQLException e) { throw new DataSourceException("Could not retrieve data", e); } return attributes; }
From source file:com.jaspersoft.jasperserver.war.CSVServlet.java
private void printCSV(ResultSet rs, PrintWriter out) throws Exception { ResultSetMetaData md = rs.getMetaData(); int numCols = md.getColumnCount(); // print column headers for (int i = 1; i < numCols; i++) { out.write(quoteString(md.getColumnName(i))); out.write(SEP);// ww w . j ava 2s .c om } out.write(quoteString(md.getColumnName(numCols))); out.write(NEWLINE); // print row data while (rs.next()) { for (int i = 1; i < numCols; i++) { out.write(quoteString("" + rs.getObject(i))); out.write(SEP); } out.write(quoteString("" + rs.getObject(numCols))); out.write(NEWLINE); } }
From source file:com.xtesoft.xtecuannet.framework.templater.filler.utils.SQLScanner.java
public List<SQLField> getSQLFields(String tableName) { List<SQLField> fields = new ArrayList<SQLField>(0); PreparedStatement psta = null; try {/* ww w. jav a 2 s .c o m*/ logger.info("Processing table: " + tableName); psta = getConnection().prepareStatement("select top 1 * from " + tableName); ResultSet rset = psta.executeQuery(); ResultSetMetaData metadata = rset.getMetaData(); int columnCount = metadata.getColumnCount(); for (int i = 1; i <= columnCount; i++) { SQLField field = new SQLField(metadata.getColumnName(i), metadata.getColumnType(i)); fields.add(field); } rset.close(); psta.close(); } catch (Exception e) { logger.error("Error getting fields for table: " + tableName, e); } return fields; }
From source file:com.opencsv.ResultSetHelperService.java
@Override public String[] getColumnNames(ResultSet rs) throws SQLException { ResultSetMetaData metadata = rs.getMetaData(); String[] nameArray = new String[metadata.getColumnCount()]; for (int i = 0; i < metadata.getColumnCount(); i++) { nameArray[i] = metadata.getColumnLabel(i + 1); }//from ww w. j a v a2s .c o m return nameArray; }
From source file:com.opencsv.ResultSetHelperService.java
@Override public String[] getColumnValues(ResultSet rs, boolean trim, String dateFormatString, String timeFormatString) throws SQLException, IOException { ResultSetMetaData metadata = rs.getMetaData(); String[] valueArray = new String[metadata.getColumnCount()]; for (int i = 1; i <= metadata.getColumnCount(); i++) { valueArray[i - 1] = getColumnValue(rs, metadata.getColumnType(i), i, trim, dateFormatString, timeFormatString);// ww w.j a v a 2 s . com } return valueArray; }
From source file:org.wte4j.impl.service.SimpleDbViewModelService.java
@Override public WteDataModel createModel(Template<?> template, Object input) { String viewName = template.getProperties().get(VIEW_NAME); String pkColumnName = template.getProperties().get(PRIMARY_KEY_COLUMN_NAME); Integer pk = (Integer) input; Map<String, Object> dataMap = new HashMap<String, Object>(); //TODO PreparedStatement! String query = "select * from " + viewName + " where " + pkColumnName + "=" + pk; try {/*w w w. java 2 s .c o m*/ Connection connection = null; Statement statement = null; try { connection = ds.getConnection(); statement = connection.createStatement(); ResultSet rs = statement.executeQuery(query); ResultSetMetaData metaData = rs.getMetaData(); while (rs.next()) { for (int i = 1; i <= metaData.getColumnCount(); i++) { String columnName = metaData.getColumnName(i).toLowerCase(); dataMap.put(columnName, rs.getObject(i)); } } } finally { if (statement != null) { statement.close(); } if (connection != null) { connection.close(); } } } catch (SQLException e) { throw new WteException("error in createModel (" + viewName + ", " + pkColumnName + ", " + pk + ")", e); } return new WteMapModel(dataMap); }
From source file:fr.jetoile.hadoopunit.component.HiveServer2BootstrapTest.java
@Test public void hiveServer2ShouldStart() throws InterruptedException, ClassNotFoundException, SQLException { // assertThat(Utils.available("127.0.0.1", 20103)).isFalse(); // Load the Hive JDBC driver LOGGER.info("HIVE: Loading the Hive JDBC Driver"); Class.forName("org.apache.hive.jdbc.HiveDriver"); ////from www .j a v a2 s .com // Create an ORC table and describe it // // Get the connection Connection con = DriverManager .getConnection( "jdbc:hive2://" + configuration.getString(HadoopUnitConfig.HIVE_SERVER2_HOSTNAME_KEY) + ":" + configuration.getInt(HadoopUnitConfig.HIVE_SERVER2_PORT_KEY) + "/" + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY), "user", "pass"); // Create the DB Statement stmt; try { String createDbDdl = "CREATE DATABASE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Database Statement: {}", createDbDdl); stmt.execute(createDbDdl); } catch (Exception e) { e.printStackTrace(); } // Drop the table incase it still exists String dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); // Create the ORC table String createDdl = "CREATE TABLE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY) + " (id INT, msg STRING) " + "PARTITIONED BY (dt STRING) " + "CLUSTERED BY (id) INTO 16 BUCKETS " + "STORED AS ORC tblproperties(\"orc.compress\"=\"NONE\")"; stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Table Statement: {}", createDdl); stmt.execute(createDdl); // Issue a describe on the new table and display the output LOGGER.info("HIVE: Validating Table was Created: "); ResultSet resultSet = stmt.executeQuery( "DESCRIBE FORMATTED " + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY)); int count = 0; while (resultSet.next()) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) { System.out.print(resultSet.getString(i)); } System.out.println(); count++; } assertEquals(33, count); // Drop the table dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); }
From source file:com.chiorichan.account.adapter.sql.SqlAdapter.java
@Override public AccountMetaData readAccount(String id) throws LoginException { try {//from w w w.j a va 2s . co m AccountMetaData meta = new AccountMetaData(); if (id == null || id.isEmpty()) throw new LoginException(LoginExceptionReason.emptyUsername); Set<String> accountFieldSet = new HashSet<String>(accountFields); Set<String> accountColumnSet = new HashSet<String>(); accountFieldSet.add("acctId"); accountFieldSet.add("username"); ResultSet rs = sql.query("SELECT * FROM `" + table + "` LIMIT 0;"); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); do { for (int i = 1; i < columnCount + 1; i++) { accountColumnSet.add(rsmd.getColumnName(i)); } } while (rs.next()); String additionalAccountFields = ""; for (String f : accountFieldSet) { if (!f.isEmpty()) if (accountColumnSet.contains(f)) additionalAccountFields += " OR `" + f + "` = '" + id + "'"; else for (String c : accountColumnSet) { if (c.equalsIgnoreCase(f)) { additionalAccountFields += " OR `" + c + "` = '" + id + "'"; break; } } } rs = sql.query("SELECT * FROM `" + table + "` WHERE " + additionalAccountFields.substring(4) + ";"); if (rs == null || sql.getRowCount(rs) < 1) throw new LoginException(LoginExceptionReason.incorrectLogin); meta.setAll(DatabaseEngine.convertRow(rs)); meta.set("displayName", (rs.getString("fname").isEmpty()) ? rs.getString("name") : rs.getString("fname") + " " + rs.getString("name")); return meta; } catch (SQLException e) { throw new LoginException(e); } }