Example usage for java.sql Statement setFetchSize

List of usage examples for java.sql Statement setFetchSize

Introduction

In this page you can find the example usage for java.sql Statement setFetchSize.

Prototype

void setFetchSize(int rows) throws SQLException;

Source Link

Document

Gives the JDBC driver a hint as to the number of rows that should be fetched from the database when more rows are needed for ResultSet objects generated by this Statement.

Usage

From source file:org.apache.zeppelin.hopshive.HopsHiveInterpreter.java

private InterpreterResult executeSql(String propertyKey, String sql, InterpreterContext interpreterContext) {
    Connection connection;/*from  w w  w .  j a va 2  s  .  c o m*/
    Statement statement;
    ResultSet resultSet = null;
    String paragraphId = interpreterContext.getParagraphId();
    String user = interpreterContext.getAuthenticationInfo().getUser();

    boolean splitQuery = false;
    String splitQueryProperty = getProperty(String.format("%s.%s", propertyKey, SPLIT_QURIES_KEY));
    if (StringUtils.isNotBlank(splitQueryProperty) && splitQueryProperty.equalsIgnoreCase("true")) {
        splitQuery = true;
    }

    InterpreterResult interpreterResult = new InterpreterResult(InterpreterResult.Code.SUCCESS);
    try {
        connection = getConnection(propertyKey, interpreterContext);
        if (connection == null) {
            return new InterpreterResult(Code.ERROR, "Prefix not found.");
        }

        List<String> sqlArray;
        if (splitQuery) {
            sqlArray = splitSqlQueries(sql);
        } else {
            sqlArray = Arrays.asList(sql);
        }

        for (int i = 0; i < sqlArray.size(); i++) {
            String sqlToExecute = sqlArray.get(i);
            statement = connection.createStatement();

            // fetch n+1 rows in order to indicate there's more rows available (for large selects)
            statement.setFetchSize(getMaxResult());
            statement.setMaxRows(getMaxResult() + 1);

            if (statement == null) {
                return new InterpreterResult(Code.ERROR, "Prefix not found.");
            }

            try {
                getJDBCConfiguration(user).saveStatement(paragraphId, statement);

                boolean isResultSetAvailable = statement.execute(sqlToExecute);
                getJDBCConfiguration(user).setConnectionInDBDriverPoolSuccessful(propertyKey);
                if (isResultSetAvailable) {
                    resultSet = statement.getResultSet();

                    // Regards that the command is DDL.
                    if (isDDLCommand(statement.getUpdateCount(), resultSet.getMetaData().getColumnCount())) {
                        interpreterResult.add(InterpreterResult.Type.TEXT, "Query executed successfully.");
                    } else {
                        String results = getResults(resultSet,
                                !containsIgnoreCase(sqlToExecute, EXPLAIN_PREDICATE));
                        interpreterResult.add(results);
                        if (resultSet.next()) {
                            interpreterResult.add(ResultMessages.getExceedsLimitRowsMessage(getMaxResult(),
                                    String.format("%s.%s", COMMON_KEY, MAX_LINE_KEY)));
                        }
                    }
                } else {
                    // Response contains either an update count or there are no results.
                    int updateCount = statement.getUpdateCount();
                    interpreterResult.add(InterpreterResult.Type.TEXT,
                            "Query executed successfully. Affected rows : " + updateCount);
                }
            } finally {
                if (resultSet != null) {
                    try {
                        resultSet.close();
                    } catch (SQLException e) {
                        /*ignored*/ }
                }
                if (statement != null) {
                    try {
                        statement.close();
                    } catch (SQLException e) {
                        /*ignored*/ }
                }
            }
        }
        //In case user ran an insert/update/upsert statement
        if (connection != null) {
            try {
                if (!connection.getAutoCommit()) {
                    connection.commit();
                }
            } catch (SQLException e) {
                /*ignored*/ }
        }
        getJDBCConfiguration(user).removeStatement(paragraphId);
    } catch (Throwable e) {
        logger.error("Cannot run " + sql, e);
        String errorMsg = Throwables.getStackTraceAsString(e);
        interpreterResult.add(errorMsg);
        return new InterpreterResult(Code.ERROR, interpreterResult.message());
    }
    return interpreterResult;
}

From source file:org.bml.util.geo.util.geolite.GeoLiteCityBlock.java

public static Map<Integer, GeoLiteCityBlock> readFromDB(ComboPooledDataSource dataSource) {
    Map<Integer, GeoLiteCityBlock> mapOut = new TreeMap<Integer, GeoLiteCityBlock>();
    Connection con = null;/*from  w w  w  .  j  av a 2 s  . co  m*/
    Statement st = null;
    ResultSet rs = null;
    GeoLiteCityBlock tmp = null;
    int c = 0;
    try {
        con = dataSource.getConnection();

        st = con.createStatement();
        st.setMaxRows(Integer.MAX_VALUE);
        st.setQueryTimeout(600000);
        st.setFetchSize(100000);

        rs = st.executeQuery(GeoLiteCityBlock.PREPARED_SELECT_SQL);
        while (rs.next()) {
            c++;
            mapOut.put(rs.getInt(FIELD.STARTIP.fieldName),
                    new GeoLiteCityBlock(rs.getInt(FIELD.STARTIP.fieldName), rs.getInt(FIELD.ENDIP.fieldName),
                            rs.getInt(FIELD.LOCID.fieldName)));
            if ((c % 100000) == 0) {
                if (LOG.isInfoEnabled()) {
                    LOG.info("Loaded " + c + " IP Block to Location mappings");
                }
            }
        }
    } catch (SQLException ex) {
        if (LOG.isWarnEnabled()) {
            LOG.warn("SQLException caught while loading GeoLiteCityBlock objects ", ex);
        }
    } finally {
        DbUtils.closeQuietly(con, st, rs);
    }
    return mapOut;
}

From source file:org.bml.util.geo.util.geolite.GeoLiteCityLocation.java

public static Map<Integer, GeoLiteCityLocation> readFromDB(ComboPooledDataSource dataSource) {
    Map<Integer, GeoLiteCityLocation> mapOut = new HashMap<Integer, GeoLiteCityLocation>();
    Connection con = null;//ww  w .  j  av a2s.c  o  m
    Statement st = null;
    ResultSet rs = null;
    GeoLiteCityBlock tmp = null;
    int c = 0;
    try {
        con = dataSource.getConnection();

        st = con.createStatement();
        st.setMaxRows(Integer.MAX_VALUE);
        st.setQueryTimeout(600000);
        st.setFetchSize(100000);

        rs = st.executeQuery(PREPARED_SELECT_SQL);
        while (rs.next()) {
            c++;
            mapOut.put(rs.getInt(FIELD.LOCID.fieldName),
                    new GeoLiteCityLocation(rs.getInt(FIELD.LOCID.fieldName),
                            rs.getString(FIELD.COUNTRY.fieldName), rs.getString(FIELD.REGION.fieldName),
                            rs.getString(FIELD.CITY.fieldName), rs.getString(FIELD.POSTALCODE.fieldName),
                            rs.getDouble(FIELD.LATITUDE.fieldName), rs.getDouble(FIELD.LONGITUDE.fieldName),
                            rs.getString(FIELD.METROCODE.fieldName), rs.getLong(FIELD.AREACODE.fieldName)));
            if ((c % 100000) == 0) {
                if (LOG.isInfoEnabled()) {
                    LOG.info("Loaded " + c + " Location mappings");
                }
            }
        }
    } catch (SQLException ex) {
        if (LOG.isWarnEnabled()) {
            LOG.warn("SQLException caught while loading GeoLiteCityBlock objects ", ex);
        }
    } finally {
        DbUtils.closeQuietly(con, st, rs);
    }
    return mapOut;
}

From source file:org.diffkit.util.DKSqlUtil.java

public static ResultSet executeQuery(String sql_, Connection connection_, int fetchSize_) throws SQLException {
    LOG.debug("sql_->{}", sql_);
    if ((sql_ == null) || (connection_ == null))
        return null;
    Statement statement = createStatement(connection_);
    if (statement == null)
        return null;

    statement.setFetchSize(fetchSize_);
    return statement.executeQuery(sql_);
}

From source file:org.gbif.ipt.service.manage.impl.SourceManagerImpl.java

private String analyze(SqlSource ss) {
    String problem = null;/*from   w  ww  . jav  a  2s  .c o  m*/
    Connection con = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        con = getDbConnection(ss);
        // test sql
        if (StringUtils.trimToNull(ss.getSql()) != null) {
            stmt = con.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(FETCH_SIZE);
            rs = stmt.executeQuery(ss.getSqlLimited(FETCH_SIZE));
            // get number of columns
            ResultSetMetaData meta = rs.getMetaData();
            ss.setColumns(meta.getColumnCount());
            ss.setReadable(true);
        }
    } catch (SQLException e) {
        log.warn("Cant read sql source " + ss, e);
        problem = e.getMessage();
        ss.setReadable(false);
    } finally {
        // close result set, statement, and connection in that order
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.error("ResultSet could not be closed: " + e.getMessage(), e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                log.error("Statement could not be closed: " + e.getMessage(), e);
            }
        }
        if (con != null) {
            try {
                con.close();
            } catch (SQLException e) {
                log.error("Connection could not be closed: " + e.getMessage(), e);
            }
        }
    }
    return problem;
}

From source file:org.gbif.ipt.service.manage.impl.SourceManagerImpl.java

private List<String> columns(SqlSource source) {
    List<String> columns = new ArrayList<String>();
    Connection con = null;/*from  w w  w.  j av  a2s .  c  om*/
    Statement stmt = null;
    ResultSet rs = null;
    try {
        con = getDbConnection(source);
        if (con != null) {
            // test sql
            stmt = con.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(1);
            rs = stmt.executeQuery(source.getSqlLimited(1));
            // get column metadata
            ResultSetMetaData meta = rs.getMetaData();
            int idx = 1;
            int max = meta.getColumnCount();
            while (idx <= max) {
                columns.add(meta.getColumnLabel(idx));
                idx++;
            }
        } else {
            String msg = "Can't read sql source, the connection couldn't be created with the current parameters";
            columns.add(msg);
            log.warn(msg + " " + source);
        }
    } catch (SQLException e) {
        log.warn("Cant read sql source " + source, e);
    } finally {
        // close result set, statement, and connection in that order
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.error("ResultSet could not be closed: " + e.getMessage(), e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                log.error("Statement could not be closed: " + e.getMessage(), e);
            }
        }
        if (con != null) {
            try {
                con.close();
            } catch (SQLException e) {
                log.error("Connection could not be closed: " + e.getMessage(), e);
            }
        }
    }
    return columns;
}

From source file:org.gbif.ipt.service.manage.impl.SourceManagerImpl.java

private List<String[]> peek(SqlSource source, int rows) {
    List<String[]> preview = new ArrayList<String[]>();
    Connection con = null;/*from   w w w  .  j a  v a 2s  . c  o m*/
    Statement stmt = null;
    ResultSet rs = null;
    try {
        con = getDbConnection(source);
        if (con != null) {
            // test sql
            stmt = con.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(rows);
            rs = stmt.executeQuery(source.getSqlLimited(rows + 1));
            // loop over result
            while (rows > 0 && rs.next()) {
                rows--;
                String[] row = new String[source.getColumns()];
                for (int idx = 0; idx < source.getColumns(); idx++) {
                    row[idx] = rs.getString(idx + 1);
                }
                preview.add(row);
            }
        }
    } catch (SQLException e) {
        log.warn("Cant read sql source " + source, e);
    } finally {
        // close result set, statement, and connection in that order
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.error("ResultSet could not be closed: " + e.getMessage(), e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                log.error("Statement could not be closed: " + e.getMessage(), e);
            }
        }
        if (con != null) {
            try {
                con.close();
            } catch (SQLException e) {
                log.error("Connection could not be closed: " + e.getMessage(), e);
            }
        }
    }
    return preview;
}

From source file:org.glom.SqlUtils.java

public static ResultSet executeQuery(final Connection conn, final String query, int expectedLength)
        throws SQLException {
    // Setup and execute the query. Special care needs to be take to ensure that the results will be based
    // on a cursor so that large amounts of memory are not consumed when the query retrieve a large amount of
    // data. Here's the relevant PostgreSQL documentation:
    // http://jdbc.postgresql.org/documentation/83/query.html#query-with-cursor
    conn.setAutoCommit(false);/*w  w  w.  j a v  a 2s. c o m*/

    //TODO: Change this back to ResultSet.TYPE_FORWARD_ONLY when we can use a sane UI component
    //(not JTable) that doesn't need us to jump around the result set or copy its entire contents.
    final Statement st = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    if (expectedLength > 0) {
        st.setFetchSize(expectedLength);
    }

    return st.executeQuery(query);
}

From source file:org.glom.web.server.SqlUtils.java

public static ResultSet executeQuery(final Connection conn, final String query, int expectedLength)
        throws SQLException {
    // Setup and execute the query. Special care needs to be take to ensure that the results will be based
    // on a cursor so that large amounts of memory are not consumed when the query retrieve a large amount of
    // data. Here's the relevant PostgreSQL documentation:
    // http://jdbc.postgresql.org/documentation/83/query.html#query-with-cursor
    conn.setAutoCommit(false);//from  w w  w. j  a v  a2  s. co  m
    final Statement st = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    if (expectedLength > 0) {
        st.setFetchSize(expectedLength);
    }

    return st.executeQuery(query);
}

From source file:org.jesterj.ingest.scanners.JdbcScanner.java

private Statement createStatement(Connection conn) throws SQLException {
    Statement statement = conn.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY,
            java.sql.ResultSet.CONCUR_READ_ONLY);
    if (fetchSize != -1) {
        statement.setFetchSize(fetchSize);
    }/*from w  ww .j  av a  2 s.  c o m*/
    if (queryTimeout > 0) {
        statement.setQueryTimeout(queryTimeout);
    }
    return statement;
}