List of usage examples for java.sql ResultSet getMetaData
ResultSetMetaData getMetaData() throws SQLException;
ResultSet
object's columns. From source file:kr.co.bitnine.octopus.engine.CursorByPass.java
private void execute(int numRows) throws PostgresException { if (getState() == State.DONE || getState() == State.FAILED) setState(State.READY);//from w w w. j a v a 2s . c o m if (getState() != State.READY) return; LOG.debug("execute CursorByPass (rows=" + numRows + ")"); try { // NOTE: some JDBC drivers do not ignore setFetchSize(0) if (numRows > 0) stmt.setFetchSize(numRows); checkCancel(); ResultSet rs = stmt.executeQuery(); checkCancel(); ResultSetMetaData rsmd = rs.getMetaData(); int colCnt = rsmd.getColumnCount(); PostgresAttribute[] attrs = new PostgresAttribute[colCnt]; for (int i = 0; i < colCnt; i++) { String colName = rsmd.getColumnName(i + 1); int colType = rsmd.getColumnType(i + 1); LOG.info("JDBC type of column '" + colName + "' is " + colType); PostgresType type = TypeInfo.postresTypeOfJdbcType(colType); int typeInfo = -1; if (type == PostgresType.VARCHAR) typeInfo = rsmd.getColumnDisplaySize(i + 1); attrs[i] = new PostgresAttribute(colName, type, typeInfo); } tupDesc = new TupleDesc(attrs, getResultFormats()); tupSetByPass = new TupleSetByPass(this, rs, tupDesc); setState(State.ACTIVE); } catch (SQLException e) { setState(State.FAILED); close(); PostgresErrorData edata = new PostgresErrorData(PostgresSeverity.ERROR, "failed to execute by-pass query: " + e.getMessage()); throw new PostgresException(edata, e); } }
From source file:com.googlecode.psiprobe.controllers.sql.ExecuteSqlController.java
protected ModelAndView handleContext(String contextName, Context context, HttpServletRequest request, HttpServletResponse response) throws Exception { String resourceName = ServletRequestUtils.getStringParameter(request, "resource"); String sql = ServletRequestUtils.getStringParameter(request, "sql", null); if (sql == null || sql.equals("") || sql.trim().equals("")) { request.setAttribute("errorMessage", getMessageSourceAccessor().getMessage("probe.src.dataSourceTest.sql.required")); return new ModelAndView(getViewName()); }/* w w w. j a v a2s . c om*/ int maxRows = ServletRequestUtils.getIntParameter(request, "maxRows", 0); int rowsPerPage = ServletRequestUtils.getIntParameter(request, "rowsPerPage", 0); int historySize = ServletRequestUtils.getIntParameter(request, "historySize", 0); // store current option values and query history in a session attribute HttpSession sess = request.getSession(); DataSourceTestInfo sessData = (DataSourceTestInfo) sess.getAttribute(DataSourceTestInfo.DS_TEST_SESS_ATTR); synchronized (sess) { if (sessData == null) { sessData = new DataSourceTestInfo(); sess.setAttribute(DataSourceTestInfo.DS_TEST_SESS_ATTR, sessData); } sessData.setMaxRows(maxRows); sessData.setRowsPerPage(rowsPerPage); sessData.setHistorySize(historySize); sessData.addQueryToHistory(sql); } DataSource dataSource = null; try { dataSource = getContainerWrapper().getResourceResolver().lookupDataSource(context, resourceName, getContainerWrapper()); } catch (NamingException e) { request.setAttribute("errorMessage", getMessageSourceAccessor() .getMessage("probe.src.dataSourceTest.resource.lookup.failure", new Object[] { resourceName })); } if (dataSource == null) { request.setAttribute("errorMessage", getMessageSourceAccessor() .getMessage("probe.src.dataSourceTest.resource.lookup.failure", new Object[] { resourceName })); } else { List results = null; int rowsAffected = 0; try { // TODO: use Spring's jdbc template? Connection conn = dataSource.getConnection(); try { conn.setAutoCommit(true); PreparedStatement stmt = conn.prepareStatement(sql); try { boolean hasResultSet = stmt.execute(); if (!hasResultSet) { rowsAffected = stmt.getUpdateCount(); } else { results = new ArrayList(); ResultSet rs = stmt.getResultSet(); try { ResultSetMetaData metaData = rs.getMetaData(); while (rs.next() && (maxRows < 0 || results.size() < maxRows)) { Map record = new LinkedHashMap(); for (int i = 1; i <= metaData.getColumnCount(); i++) { String value = rs.getString(i); if (rs.wasNull()) { value = getMessageSourceAccessor() .getMessage("probe.src.dataSourceTest.sql.null"); } else { value = HtmlUtils.htmlEscape(value); } // a work around for IE browsers bug of not displaying // a border around an empty table column if (value.equals("")) { value = " "; } // Pad the keys of columns with existing labels so they are distinct String key = metaData.getColumnLabel(i); while (record.containsKey(key)) { key += " "; } record.put(HtmlUtils.htmlEscape(key), value); } results.add(record); } } finally { rs.close(); } rowsAffected = results.size(); } } finally { stmt.close(); } } finally { conn.close(); } // store the query results in the session attribute in order // to support a result set pagination feature without re-executing the query synchronized (sess) { sessData.setResults(results); } ModelAndView mv = new ModelAndView(getViewName(), "results", results); mv.addObject("rowsAffected", String.valueOf(rowsAffected)); mv.addObject("rowsPerPage", String.valueOf(rowsPerPage)); return mv; } catch (SQLException e) { String message = getMessageSourceAccessor().getMessage("probe.src.dataSourceTest.sql.failure", new Object[] { e.getMessage() }); logger.error(message, e); request.setAttribute("errorMessage", message); } } return new ModelAndView(getViewName()); }
From source file:com.zimbra.cs.db.JdbcClient.java
private void runSql(Connection conn, String sql) { Matcher m = PAT_SELECT.matcher(sql); if (m.find()) { // Run query and display results try {/*from ww w. j ava 2s . c o m*/ Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery(sql); ResultSetMetaData md = rs.getMetaData(); int colCount = md.getColumnCount(); List<Object[]> firstRows = new ArrayList<Object[]>(); int rowCount = 0; // Set initial column widths based on column labels int[] colWidths = new int[colCount]; if (mShowColumnNames) { for (int i = 0; i < colCount; i++) { String name = md.getColumnLabel(i + 1); if (name.length() > colWidths[i]) { colWidths[i] = name.length(); } } } // Read first 1000 rows first to calculate column widths for printing while (rowCount < 1000 && rs.next()) { Object[] row = getCurrentRow(rs); for (int i = 0; i < colCount; i++) { Object o = row[i]; int width = (o == null) ? NULL.length() : (o.toString()).length(); if (width > colWidths[i]) { colWidths[i] = width; } } firstRows.add(row); rowCount++; } // Print first rows if (!mBatch && mShowColumnNames) { // Skip if we're in batch mode. If not displaying column names, don't // print the first divider. printDivider(colWidths); } if (mShowColumnNames) { String[] colNames = new String[colCount]; for (int i = 0; i < colCount; i++) { colNames[i] = md.getColumnLabel(i + 1); } printRow(colNames, colWidths); } if (!mBatch) { printDivider(colWidths); } for (Object[] row : firstRows) { printRow(row, colWidths); } // Print any remaining rows while (rs.next()) { Object[] row = getCurrentRow(rs); printRow(row, colWidths); } if (!mBatch) { printDivider(colWidths); } rs.close(); stmt.close(); } catch (SQLException e) { e.printStackTrace(); System.err.println(e.getMessage()); } } else { // Run statement try { Statement stmt = conn.createStatement(); int numRows = stmt.executeUpdate(sql); stmt.close(); System.out.println("Updated " + numRows + " rows"); } catch (SQLException e) { System.err.println(e.getMessage()); } } }
From source file:com.netspective.axiom.sql.ResultSetUtils.java
public Object[][] getResultSetRowsAsMatrix(ResultSet rs) throws SQLException { ArrayList result = new ArrayList(); while (rs.next()) { ResultSetMetaData rsmd = rs.getMetaData(); int colsCount = rsmd.getColumnCount(); Object[] row = new Object[colsCount]; for (int i = 1; i <= colsCount; i++) { row[i - 1] = rs.getObject(i); }//w w w. ja v a 2 s.c om result.add(row); } if (result.size() > 0) return (Object[][]) result.toArray(new Object[result.size()][]); else return null; }
From source file:annis.sqlgen.FrequencySqlGenerator.java
@Override public FrequencyTable extractData(ResultSet rs) throws SQLException, DataAccessException { FrequencyTable result = new FrequencyTable(); ResultSetMetaData meta = rs.getMetaData(); while (rs.next()) { Validate.isTrue(meta.getColumnCount() > 1, "frequency table extractor needs at least 2 columns"); Validate.isTrue("count".equalsIgnoreCase(meta.getColumnName(meta.getColumnCount())), "last column name must be \"count\""); long count = rs.getLong("count"); String[] tupel = new String[meta.getColumnCount() - 1]; for (int i = 1; i <= tupel.length; i++) { String colVal = rs.getString(i); if (colVal == null) { tupel[i - 1] = ""; } else { String[] splitted = colVal.split(":", 3); if (splitted.length > 0) { colVal = splitted[splitted.length - 1]; }//from w w w .j a va2 s . co m tupel[i - 1] = colVal; } } // end for each column (except last "count" column) result.addEntry(new FrequencyTable.Entry(tupel, count)); } // end for complete result return result; }
From source file:com.jaeksoft.searchlib.crawler.database.DatabaseCrawlSql.java
public String checkSqlSelect() throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { JDBCConnection jdbcCnx = getNewJdbcConnection(); Transaction transaction = null;// w w w .ja va 2s . c o m StringWriter sw = null; PrintWriter pw = null; try { sw = new StringWriter(); pw = new PrintWriter(sw); transaction = getNewTransaction(jdbcCnx); Query query = transaction.prepare(sqlSelect); query.getStatement().setFetchSize(getBufferSize()); ResultSet resultSet = query.getResultSet(); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); pw.print("Found "); pw.print(columnCount); pw.println(" column(s)"); for (int i = 1; i <= columnCount; i++) { pw.print(i); pw.print(": "); pw.println(metaData.getColumnLabel(i)); } return sw.toString(); } finally { IOUtils.close(pw, sw); if (transaction != null) transaction.close(); } }
From source file:jp.co.golorp.emarf.model.Models.java
/** * Model?????IO/*from ww w. j av a 2 s . c o m*/ * * @param sql * sql * @param params * params * @return List */ private static List<Map<String, Object>> getDatas(final String sql, final Object... params) { // ?SQL? String rawSql = getRawSql(sql, params); // SQL?????? List<Map<String, Object>> datas = ModelsCache.get(rawSql); if (datas != null) { return datas; } // statementLog(rawSql); // ? PreparedStatement ps = null; try { ps = Connections.get().prepareStatement(sql); for (int i = 0; i < params.length; i++) { ps.setString(i + 1, String.valueOf(params[i])); } // ?? datas = new ArrayList<Map<String, Object>>(); ResultSet rs = null; try { // ??? rs = ps.executeQuery(); while (rs.next()) { // ? Map<String, Object> data = new LinkedHashMap<String, Object>(); // ResultSet?META? ResultSetMetaData meta = rs.getMetaData(); // META???? int columnCount = meta.getColumnCount(); for (int i = 1; i <= columnCount; i++) { // ??? String columnName = meta.getColumnName(i); // ??? String propertyName = StringUtil.toCamelCase(columnName); String key = propertyName; if (data.containsKey(propertyName)) { String modelName = StringUtil.toUpperCamelCase(meta.getTableName(i)); key = modelName + "." + propertyName; } // data.put(key, rs.getObject(columnName)); } // datas.add(data); } } catch (SQLException e) { throw new SystemError(e); } finally { IOUtil.closeQuietly(rs); } } catch (SQLException e) { throw new SystemError(e); } finally { IOUtil.closeQuietly(ps); } // SQL? // ModelsCache.set(rawSql, datas); return datas; }
From source file:teambootje.A5.java
/** * Creates new form A5// w w w . j av a 2s .c o m */ public A5() { initComponents(); setLocationRelativeTo(null); setLayout(new BorderLayout()); //Create and set up the window. setTitle("SS Rotterdam Analyse || Analyse 5"); ImageIcon icon = new ImageIcon("img/bootje.jpg"); setIconImage(icon.getImage()); // back BTN JButton back = new JButton("Back"); add(back, BorderLayout.NORTH); back.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dispose(); // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }); // panel en Label JPanel ana = new JPanel(); add(ana, BorderLayout.CENTER); //tabel String sql = "SELECT Leeftijd, COUNT(*) AS Aantal FROM persoon GROUP BY Leeftijd"; List<Object[]> list = new ArrayList<Object[]>(); ResultSet rs = null; try { rs = db.runSql(sql); while (rs.next()) { String age = rs.getString("Leeftijd"); int aantal = rs.getInt("Aantal"); String[] row = new String[rs.getMetaData().getColumnCount()]; for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) { row[i - 1] = rs.getString(i); } list.add(row); //chart JButton chart = new JButton("Chart"); add(chart, BorderLayout.SOUTH); chart.addActionListener(new ActionListener() { String leeftijd = age; int a1 = aantal; @Override public void actionPerformed(ActionEvent e) { DefaultPieDataset pieDataset = new DefaultPieDataset(); pieDataset.setValue("Niet vrijgegeven", a1); JFreeChart chart = ChartFactory.createPieChart3D("Aantal mensen per leeftijd", pieDataset, true, true, true); PiePlot3D p = (PiePlot3D) chart.getPlot(); //p.setForegroundAlpha(TOP_ALIGNMENT); ChartFrame pie = new ChartFrame("Aantal mensen per leeftijd", chart); pie.setVisible(true); pie.setSize(500, 500); pie.setLocationRelativeTo(null); // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }); } } catch (SQLException e) { JOptionPane.showMessageDialog(null, e); } Object[][] array = new Object[list.size()][]; Object columnNames[] = { "Leeftijd", "Aantal" }; list.toArray(array); JTable table = new JTable(array, columnNames); JScrollPane scroll = new JScrollPane(table); scroll.setPreferredSize(new Dimension(400, 400)); ana.add(scroll); }
From source file:fr.jetoile.hadoopunit.component.HiveServer2BootstrapTest.java
@Test public void hiveServer2ShouldStart() throws InterruptedException, ClassNotFoundException, SQLException { // assertThat(Utils.available("127.0.0.1", 20103)).isFalse(); // Load the Hive JDBC driver LOGGER.info("HIVE: Loading the Hive JDBC Driver"); Class.forName("org.apache.hive.jdbc.HiveDriver"); ///*from ww w.j a v a2s .c om*/ // Create an ORC table and describe it // // Get the connection Connection con = DriverManager .getConnection( "jdbc:hive2://" + configuration.getString(HadoopUnitConfig.HIVE_SERVER2_HOSTNAME_KEY) + ":" + configuration.getInt(HadoopUnitConfig.HIVE_SERVER2_PORT_KEY) + "/" + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY), "user", "pass"); // Create the DB Statement stmt; try { String createDbDdl = "CREATE DATABASE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Database Statement: {}", createDbDdl); stmt.execute(createDbDdl); } catch (Exception e) { e.printStackTrace(); } // Drop the table incase it still exists String dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); // Create the ORC table String createDdl = "CREATE TABLE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY) + " (id INT, msg STRING) " + "PARTITIONED BY (dt STRING) " + "CLUSTERED BY (id) INTO 16 BUCKETS " + "STORED AS ORC tblproperties(\"orc.compress\"=\"NONE\")"; stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Table Statement: {}", createDdl); stmt.execute(createDdl); // Issue a describe on the new table and display the output LOGGER.info("HIVE: Validating Table was Created: "); ResultSet resultSet = stmt.executeQuery( "DESCRIBE FORMATTED " + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY)); int count = 0; while (resultSet.next()) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) { System.out.print(resultSet.getString(i)); } System.out.println(); count++; } assertEquals(33, count); // Drop the table dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); }
From source file:net.mlw.vlh.adapter.jdbc.dynabean.fix.JDBCDynaClass.java
/** * <p>Introspect the metadata associated with our result set, and populate * the <code>properties</code> and <code>propertiesMap</code> instance * variables.</p>/* w ww. jav a 2 s.co m*/ * * @param resultSet The <code>resultSet</code> whose metadata is to * be introspected * * @exception SQLException if an error is encountered processing the * result set metadata */ protected void introspect(ResultSet resultSet) throws SQLException { // Accumulate an ordered list of DynaProperties List list = new ArrayList(); ResultSetMetaData metadata = resultSet.getMetaData(); int n = metadata.getColumnCount(); for (int i = 1; i <= n; i++) { // JDBC is one-relative! DynaProperty dynaProperty = createDynaProperty(metadata, i); if (dynaProperty != null) { list.add(dynaProperty); } } // Convert this list into the internal data structures we need properties = (DynaProperty[]) list.toArray(new DynaProperty[list.size()]); for (int i = 0; i < properties.length; i++) { propertiesMap.put(properties[i].getName(), properties[i]); } }