List of usage examples for java.sql Statement getResultSet
ResultSet getResultSet() throws SQLException;
ResultSet
object. From source file:org.talend.core.model.metadata.builder.database.ExtractMetaDataFromDataBase.java
private static String executeGetCommentStatement(String queryStmt, Connection connection) { String comment = null;/* w w w . j a va2 s. c om*/ Statement statement = null; ResultSet resultSet = null; try { statement = connection.createStatement(); statement.execute(queryStmt); // get the results resultSet = statement.getResultSet(); if (resultSet != null) { while (resultSet.next()) { comment = (String) resultSet.getObject(1); } } } catch (SQLException e) { // do nothing here } finally { // -- release resources if (resultSet != null) { try { resultSet.close(); } catch (SQLException e) { log.error(e, e); } } if (statement != null) { try { statement.close(); } catch (SQLException e) { log.error(e, e); } } } return comment; }
From source file:org.apache.falcon.regression.core.util.HiveAssert.java
/** * Assertion for equality of two tables. * @param cluster1 the ColoHelper of first cluster * @param table1 the first table (expected values) * @param cluster2 the ColoHelper of second cluster * @param table2 the second table (actual values) * @param softAssert object used for performing assertion * @return object used for performing assertion * @throws java.io.IOException// w ww . j a v a 2 s. com */ public static SoftAssert assertTableEqual(ColoHelper cluster1, HCatTable table1, ColoHelper cluster2, HCatTable table2, SoftAssert softAssert, boolean notIgnoreTblTypeAndProps) throws IOException { FileSystem cluster1FS = cluster1.getClusterHelper().getHadoopFS(); FileSystem cluster2FS = cluster2.getClusterHelper().getHadoopFS(); final String table1FullName = table1.getDbName() + "." + table1.getTableName(); final String table2FullName = table2.getDbName() + "." + table2.getTableName(); LOGGER.info("Checking equality of table : " + table1FullName + " & " + table2FullName); //table metadata equality softAssert.assertEquals(table2.comment(), table1.comment(), "Table " + table1FullName + " has different comment from " + table2FullName); softAssert.assertEquals(table2.getBucketCols(), table1.getBucketCols(), "Table " + table1FullName + " has different bucket columns from " + table2FullName); assertColumnListEqual(table1.getCols(), table2.getCols(), softAssert); softAssert.assertEquals(table2.getNumBuckets(), table1.getNumBuckets(), "Table " + table1FullName + " has different number of buckets from " + table2FullName); assertColumnListEqual(table1.getPartCols(), table2.getPartCols(), softAssert); softAssert.assertEquals(table2.getSerdeParams(), table1.getSerdeParams(), "Table " + table1FullName + " has different serde params from " + table2FullName); softAssert.assertEquals(table2.getSortCols(), table1.getSortCols(), "Table " + table1FullName + " has different sort columns from " + table2FullName); softAssert.assertEquals(table2.getStorageHandler(), table1.getStorageHandler(), "Table " + table1FullName + " has different storage handler from " + table2FullName); if (notIgnoreTblTypeAndProps) { softAssert.assertEquals(table2.getTabletype(), table1.getTabletype(), "Table " + table1FullName + " has different Tabletype from " + table2FullName); } final Map<String, String> tbl1Props = table1.getTblProps(); final Map<String, String> tbl2Props = table2.getTblProps(); final String[] ignoreTblProps = { "transient_lastDdlTime", "repl.last.id", "last_modified_by", "last_modified_time", "COLUMN_STATS_ACCURATE", }; for (String ignoreTblProp : ignoreTblProps) { tbl1Props.remove(ignoreTblProp); tbl2Props.remove(ignoreTblProp); } final String[] ignoreDefaultProps = { "numRows", "rawDataSize" }; for (String ignoreProp : ignoreDefaultProps) { if ("-1".equals(tbl1Props.get(ignoreProp))) { tbl1Props.remove(ignoreProp); } if ("-1".equals(tbl2Props.get(ignoreProp))) { tbl2Props.remove(ignoreProp); } } if (notIgnoreTblTypeAndProps) { softAssert.assertEquals(tbl2Props, tbl1Props, "Table " + table1FullName + " has different TblProps from " + table2FullName); } LOGGER.info("Checking equality of table partitions"); HCatClient hcatClient1 = cluster1.getClusterHelper().getHCatClient(); HCatClient hcatClient2 = cluster2.getClusterHelper().getHCatClient(); final List<HCatPartition> table1Partitions = hcatClient1.getPartitions(table1.getDbName(), table1.getTableName()); final List<HCatPartition> table2Partitions = hcatClient2.getPartitions(table2.getDbName(), table2.getTableName()); assertPartitionListEqual(table1Partitions, table2Partitions, softAssert); if (notIgnoreTblTypeAndProps) { softAssert.assertEquals(cluster2FS.getContentSummary(new Path(table2.getLocation())).getLength(), cluster1FS.getContentSummary(new Path(table1.getLocation())).getLength(), "Size of content for table1 and table2 are different"); } //table content equality LOGGER.info("Checking equality of table contents"); Statement jdbcStmt1 = null, jdbcStmt2 = null; try { final boolean execute1; final boolean execute2; jdbcStmt1 = cluster1.getClusterHelper().getHiveJdbcConnection().createStatement(); jdbcStmt2 = cluster2.getClusterHelper().getHiveJdbcConnection().createStatement(); execute1 = jdbcStmt1.execute("select * from " + table1FullName); execute2 = jdbcStmt2.execute("select * from " + table2FullName); softAssert.assertEquals(execute2, execute1, "Table " + table1FullName + " has different result of select * from " + table2FullName); if (execute1 && execute2) { final ResultSet resultSet1 = jdbcStmt1.getResultSet(); final ResultSet resultSet2 = jdbcStmt2.getResultSet(); final List<String> rows1 = HiveUtil.fetchRows(resultSet1); final List<String> rows2 = HiveUtil.fetchRows(resultSet2); softAssert.assertEquals(rows2, rows1, "Table " + table1FullName + " has different content from " + table2FullName); } } catch (SQLException e) { softAssert.fail("Comparison of content of table " + table1FullName + " with content of table " + table2FullName + " failed because of exception\n" + ExceptionUtils.getFullStackTrace(e)); } finally { if (jdbcStmt1 != null) { try { jdbcStmt1.close(); } catch (SQLException e) { LOGGER.warn("Closing of jdbcStmt1 failed: " + ExceptionUtils.getFullStackTrace(e)); } } if (jdbcStmt2 != null) { try { jdbcStmt2.close(); } catch (SQLException e) { LOGGER.warn("Closing of jdbcStmt2 failed: " + ExceptionUtils.getFullStackTrace(e)); } } } return softAssert; }
From source file:org.openconcerto.sql.utils.SQLUtils.java
/** * Execute all queries at once if possible. * //from w ww . j a v a2s . c o m * @param sysRoot where to execute. * @param queries what to execute. * @param handlers how to process the result sets, items can be <code>null</code>. * @return the results of the handlers. * @throws SQLException if an error occur * @throws RTInterruptedException if the current thread is interrupted. * @see SQLSystem#isMultipleResultSetsSupported() */ static public List<?> executeMultiple(final DBSystemRoot sysRoot, final List<String> queries, final List<? extends ResultSetHandler> handlers) throws SQLException, RTInterruptedException { final int size = handlers.size(); if (queries.size() != size) throw new IllegalArgumentException("Size mismatch " + queries + " / " + handlers); final List<Object> results = new ArrayList<Object>(size); final SQLSystem system = sysRoot.getServer().getSQLSystem(); if (system.isMultipleResultSetsSupported()) { final long timeMs = System.currentTimeMillis(); final long time = System.nanoTime(); final long afterCache = time; final StringBuilder sb = new StringBuilder(256 * size); for (final String q : queries) { sb.append(q); if (!q.trim().endsWith(";")) sb.append(';'); sb.append('\n'); } final String query = sb.toString(); sysRoot.getDataSource().useConnection(new ConnectionHandlerNoSetup<Object, SQLException>() { @Override public Object handle(SQLDataSource ds) throws SQLException { final Connection conn = ds.getConnection(); if (system == SQLSystem.MYSQL) { final ConnectionProperties connectionProperties = (ConnectionProperties) ((DelegatingConnection) conn) .getInnermostDelegate(); if (!connectionProperties.getAllowMultiQueries()) { throw new IllegalStateException( "Multi queries not allowed and the setting can only be set before connecting"); } } final long afterQueryInfo = System.nanoTime(); final long afterExecute, afterHandle; final Statement stmt = conn.createStatement(); try { if (Thread.currentThread().isInterrupted()) throw new RTInterruptedException("Interrupted before executing : " + query); stmt.execute(query); afterExecute = System.nanoTime(); for (final ResultSetHandler h : handlers) { if (Thread.currentThread().isInterrupted()) throw new RTInterruptedException("Interrupted while handling results : " + query); results.add(h == null ? null : h.handle(stmt.getResultSet())); stmt.getMoreResults(); } afterHandle = System.nanoTime(); } finally { stmt.close(); } SQLRequestLog.log(query, "executeMultiple", conn, timeMs, time, afterCache, afterQueryInfo, afterExecute, afterHandle, System.nanoTime()); return null; } }); } else { // use the same connection to allow some insert/update followed by a select sysRoot.getDataSource().useConnection(new ConnectionHandlerNoSetup<Object, SQLException>() { @Override public Object handle(SQLDataSource ds) throws SQLException { for (int i = 0; i < size; i++) { final ResultSetHandler rsh = handlers.get(i); // since the other if clause cannot support cache and this clause doesn't // have any table to fire, don't use cache results.add(sysRoot.getDataSource().execute(queries.get(i), rsh == null ? null : new IResultSetHandler(rsh, false))); } return null; } }); } return results; }
From source file:io.cloudslang.content.database.services.SQLCommandService.java
public static String executeSqlCommand(final SQLInputs sqlInputs) throws Exception { final ConnectionService connectionService = new ConnectionService(); try (final Connection connection = connectionService.setUpConnection(sqlInputs)) { connection.setReadOnly(false);//from www .j av a 2 s .c o m final String dbType = sqlInputs.getDbType(); if (ORACLE_DB_TYPE.equalsIgnoreCase(dbType) && sqlInputs.getSqlCommand().toLowerCase().contains(DBMS_OUTPUT)) { final PreparedStatement preparedStatement = connection.prepareStatement(sqlInputs.getSqlCommand()); preparedStatement.setQueryTimeout(sqlInputs.getTimeout()); OracleDbmsOutput oracleDbmsOutput = new OracleDbmsOutput(connection); preparedStatement.executeQuery(); sqlInputs.setIUpdateCount(preparedStatement.getUpdateCount()); preparedStatement.close(); final String output = oracleDbmsOutput.getOutput(); oracleDbmsOutput.close(); return output; } else { final Statement statement = connection.createStatement(sqlInputs.getResultSetType(), sqlInputs.getResultSetConcurrency()); statement.setQueryTimeout(sqlInputs.getTimeout()); try { statement.execute(sqlInputs.getSqlCommand()); } catch (SQLException e) { if (SYBASE_DB_TYPE.equalsIgnoreCase(dbType)) { //during a dump sybase sends back status as exceptions. if (sqlInputs.getSqlCommand().trim().toLowerCase().startsWith("dump")) { return SQLUtils.processDumpException(e); } else if (sqlInputs.getSqlCommand().trim().toLowerCase().startsWith("load")) { return SQLUtils.processLoadException(e); } } else { throw e; } } ResultSet rs = statement.getResultSet(); if (rs != null) { ResultSetMetaData rsMtd = rs.getMetaData(); if (rsMtd != null) { sqlInputs.getLRows().clear(); int colCount = rsMtd.getColumnCount(); if (sqlInputs.getSqlCommand().trim().toLowerCase().startsWith("dbcc")) { while (rs.next()) { if (colCount >= 4) { sqlInputs.getLRows().add(rs.getString(4)); } } } else { String delimiter = (StringUtils.isNoneEmpty(sqlInputs.getStrDelim())) ? sqlInputs.getStrDelim() : ","; String strRowHolder; while (rs.next()) { strRowHolder = ""; for (int i = 1; i <= colCount; i++) { if (i > 1) { strRowHolder += delimiter; } strRowHolder += rs.getString(i); } sqlInputs.getLRows().add(strRowHolder); } } rs.close(); } } //For sybase, when dbcc command is executed, the result is shown in warning message else if (dbType.equalsIgnoreCase(SYBASE_DB_TYPE) && sqlInputs.getSqlCommand().trim().toLowerCase().startsWith("dbcc")) { SQLWarning warning = statement.getWarnings(); while (warning != null) { sqlInputs.getLRows().add(warning.getMessage()); warning = warning.getNextWarning(); } } sqlInputs.setIUpdateCount(statement.getUpdateCount()); } } return "Command completed successfully"; }
From source file:wikipedia.sql.Links.java
/** Gets array of ArticleIdAndTitle of pages which point to page with the * title 'title_to', number of return array is limited by 'n_limit'. * * @param increment//from w w w. j a v a 2 s.c o m */ public static ArticleIdAndTitle[] getFromByTitleTo(SessionHolder session, String title_to, PageNamespace namespace, int n_limit) { // SELECT page_id, page_title, page_is_redirect FROM page,pagelinks // WHERE page_id=pl_from AND pl_title='' AND pl_namespace = 0 LIMIT 4; sb.setLength(0); sb.append( "SELECT page_id, page_title, page_is_redirect FROM page,pagelinks WHERE page_id=pl_from AND pl_title='"); sb.append(session.connect.enc.EncodeToDB(StringUtil.spaceToUnderscore(StringUtil.escapeChars(title_to)))); sb.append("' AND pl_namespace="); sb.append(namespace.toInt()); if (-1 != n_limit) { sb.append(" LIMIT "); sb.append(n_limit); } List<ArticleIdAndTitle> aid = new ArrayList<ArticleIdAndTitle>(); //return getLinksSQL_AsID(session, sb_sql_count_size.toString(), sb_sql.toString(), n_limit); try { Statement s = session.connect.conn.createStatement(); s.executeQuery(sb.toString()); ResultSet rs = s.getResultSet(); //if (rs.next()) { Encodings e = session.connect.enc; // gets all id, make permutation, takes first 'min' elements while (rs.next()) { int page_id = rs.getInt("page_id"); if (1 == rs.getInt("page_is_redirect")) { page_id = -page_id; } String db_str = Encodings.bytesTo(rs.getBytes("page_title"), e.GetDBEnc()); String page_title = e.EncodeFromDB(db_str); aid.add(new ArticleIdAndTitle(page_id, page_title)); } //} rs.close(); s.close(); } catch (SQLException ex) { System.err.println("SQLException (Links.getFromByTitleTo() ArticleIdAndTitle[]): sql='" + sb.toString() + "' " + ex.getMessage()); } return aid.toArray(ArticleIdAndTitle.NULL_ARTICLEIDANDTITLE_ARRAY); }
From source file:com.espertech.esper.epl.db.TestDatabaseDMConnFactory.java
private void tryAndCloseConnection(Connection connection) throws Exception { Statement stmt = connection.createStatement(); stmt.execute("select 1 from dual"); ResultSet result = stmt.getResultSet(); result.next();//from w w w . j a va2 s . c om assertEquals(1, result.getInt(1)); result.close(); stmt.close(); connection.close(); }
From source file:wikipedia.sql.Links.java
/** Creates array of ID, takes it from the table pagelinks. Shuffles it, * if it is user preference.//from www. j a v a 2 s . co m */ public static Integer[] getLinksSQL_AsID(SessionHolder session, String str_sql_count_size, String str_sql, int n_limit) { Integer[] result = NULL_INTEGER_ARRAY; try { Statement s = session.connect.conn.createStatement(); s.executeQuery(str_sql_count_size.toString()); ResultSet rs = s.getResultSet(); if (rs.next()) { int size = rs.getInt("size"); if (0 < size) { int min; if (n_limit <= 0) { min = size; // takes all elements } else { min = Math.min(n_limit, size); } s.executeQuery(str_sql.toString()); rs = s.getResultSet(); // gets all id, make permutation, takes first 'min' elements int[] id_all = new int[size]; for (int i = 0; rs.next(); i++) { id_all[i] = rs.getInt("pl_from"); } int[] id_res = id_all; if (session.randomPages() && n_limit < size) { id_res = RandShuffle.permuteRandomly(id_all); } result = new Integer[min]; int counter = 0; for (int id : id_res) { result[counter++] = id; if (counter >= min) break; } } } rs.close(); s.close(); } catch (SQLException ex) { System.err.println("SQLException (Links.java GetLinksSQL()): sql='" + str_sql + "' " + ex.getMessage()); } return result; }
From source file:wikipedia.sql.Links.java
/** Creates Article[], fills ->id and ->title using tables page, pagelinks. * /*from w w w . j a v a 2 s .co m*/ * @param n_limit limits the number of returned nodes * It is not considered if n_limit <= 0, i.e. it will return all results. * <pre> * Parameters' example: * str_from = "FROM page WHERE page_namespace=0 AND page_id" + str_in; * str_sql_count_size = "SELECT COUNT(page_id) AS size " + str_from; * str_sql = "SELECT page_id, page_title " + str_from;</pre> */ public static Article[] getLinksSQL(SessionHolder session, String str_sql_count_size, String str_sql, int n_limit) { Article[] l_from = null; int size = 0, min; int l_from_counter = 0; try { Statement s = session.connect.conn.createStatement(); s.executeQuery(str_sql_count_size); ResultSet rs = s.getResultSet(); if (rs.next()) { size = rs.getInt("size"); if (0 < size) { if (n_limit <= 0) { min = size; // takes all elements } else { min = Math.min(n_limit, size); } s.executeQuery(str_sql); rs = s.getResultSet(); // gets all id, make permutation, takes first 'min' elements int[] id_all = new int[size]; for (int i = 0; rs.next(); i++) { id_all[i] = rs.getInt("pl_from"); } int[] id_res = id_all; if (session.randomPages() && n_limit < size) { id_res = RandShuffle.permuteRandomly(id_all); } l_from = new Article[min]; for (int id : id_res) { String title = PageTable.getTitleByID(session.connect, id); if (null == title || 0 == title.length()) continue; Article a = Article.createArticleWithCategories(session, title, id); if (null != a) l_from[l_from_counter++] = a; if (l_from_counter >= min) break; } } } rs.close(); s.close(); } catch (SQLException ex) { System.err.println("SQLException (Links.java GetLinksSQL()): sql='" + str_sql + "' " + ex.getMessage()); } if (null == l_from || 0 == l_from.length) { l_from = null; return NULL_ARTICLE_ARRAY; } Article[] l_from_result; // cut off last elements of l_from (if they are null) if (l_from.length == l_from_counter) { l_from_result = l_from; } else { l_from_result = new Article[l_from_counter]; System.arraycopy(l_from, 0, l_from_result, 0, l_from_counter); l_from = null; } return l_from_result; }
From source file:morphy.utils.john.DatabaseConnection.java
public java.sql.ResultSet executeQueryWithRS(String query) { try {/*from w w w. jav a 2 s. co m*/ if (LOG.isInfoEnabled()) { LOG.info("Executed query: " + query); } Statement s = getConnection().createStatement(); s.execute(query); return s.getResultSet(); } catch (SQLException se) { Morphy.getInstance().onError(se); return null; } }
From source file:com.arsmentis.cordova.jdbc.Jdbc.java
private JSONArray execute(String sql) throws SQLException, JSONException { if (connection == null) { throw new SQLException("Not connected"); }/*ww w.j a v a 2 s . com*/ JSONArray results = new JSONArray(); Statement statement = connection.createStatement(); if (statement.execute(sql)) { ResultSet resultSet = statement.getResultSet(); ResultSetMetaData columns = resultSet.getMetaData(); while (resultSet.next()) { JSONObject row = new JSONObject(); for (int i = 1; i <= columns.getColumnCount(); i++) { row.put(columns.getColumnName(i), resultSet.getObject(i)); } results.put(row); } resultSet.close(); } statement.close(); return results; }