Example usage for java.sql PreparedStatement setFetchSize

List of usage examples for java.sql PreparedStatement setFetchSize

Introduction

In this page you can find the example usage for java.sql PreparedStatement setFetchSize.

Prototype

void setFetchSize(int rows) throws SQLException;

Source Link

Document

Gives the JDBC driver a hint as to the number of rows that should be fetched from the database when more rows are needed for ResultSet objects generated by this Statement.

Usage

From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java

/**
 * Return the prepared statement with the given identifier; first looks in the statement cache and if it does
 * not exist there create a new statement. This method is used for building statements with variable argument
 * numbers (e.g. in an IN)./*from  w  w  w . j  ava  2  s  .c o m*/
 *
 * @param key the id of the statement in statements.properties
 * @return
 * @throws SQLException
 */
public PreparedStatement getPreparedStatement(String key, int numberOfArguments) throws SQLException {
    requireJDBCConnection();

    PreparedStatement statement = statementCache.get(key + numberOfArguments);
    if (statement == null || statement.isClosed()) {
        StringBuilder s = new StringBuilder();
        for (int i = 0; i < numberOfArguments; i++) {
            if (i != 0) {
                s.append(',');
            }
            s.append('?');
        }

        statement = connection.prepareStatement(
                String.format(dialect.getStatement(key), s.toString(), numberOfArguments),
                ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        statementCache.put(key + numberOfArguments, statement);
    }
    statement.clearParameters();
    if (persistence.getDialect().isCursorSupported()) {
        statement.setFetchSize(persistence.getConfiguration().getCursorSize());
    }
    return statement;
}

From source file:org.apache.marmotta.kiwi.sparql.persistence.KiWiSparqlConnection.java

/**
 * Evaluate a statement pattern join or filter on the database by translating it into an appropriate SQL statement.
 * Copied and adapted from KiWiReasoningConnection.query()
 *
 * @param join//from   www  . j av  a  2 s . co m
 * @param dataset
 * @return
 */
public CloseableIteration<BindingSet, SQLException> evaluateJoin(TupleExpr join, final BindingSet bindings,
        final Dataset dataset) throws SQLException, InterruptedException {
    Preconditions
            .checkArgument(join instanceof Join || join instanceof Filter || join instanceof StatementPattern
                    || join instanceof Distinct || join instanceof Slice || join instanceof Reduced);

    // some definitions
    String[] positions = new String[] { "subject", "predicate", "object", "context" };

    // collect all patterns in a list, using depth-first search over the join
    List<StatementPattern> patterns = new PatternCollector(join).patterns;

    long offset = new LimitFinder(join).offset;
    long limit = new LimitFinder(join).limit;

    boolean distinct = new DistinctFinder(join).distinct;

    // associate a name with each pattern; the names are used in the database query to refer to the triple
    // that matched this pattern and in the construction of variable names for the HQL query
    int patternCount = 0;
    final Map<StatementPattern, String> patternNames = new HashMap<StatementPattern, String>();
    for (StatementPattern p : patterns) {
        patternNames.put(p, "P" + (++patternCount));
    }

    // find all variables occurring in the patterns and create a map to map them to
    // field names in the database query; each variable will have one or several field names,
    // one for each pattern it occurs in; field names are constructed automatically by a counter
    // and the pattern name to ensure the name is a valid HQL identifier
    int variableCount = 0;

    // a map for the variable names; will look like { ?x -> "V1", ?y -> "V2", ... }
    final Map<Var, String> variableNames = new HashMap<>();

    // a map for mapping variables to field names; each variable might have one or more field names,
    // depending on the number of patterns it occurs in; will look like
    // { ?x -> ["P1_V1", "P2_V1"], ?y -> ["P2_V2"], ... }
    Map<Var, List<String>> queryVariables = new HashMap<>();
    Map<Var, List<String>> queryVariableIds = new HashMap<>();

    // a map for defining alternative context values for each variable used in the context part of a pattern
    Map<StatementPattern, List<Resource>> variableContexts = new HashMap<>();

    for (StatementPattern p : patterns) {
        // check graph restrictions in datasets (MARMOTTA-340)
        Resource[] contexts;
        Value contextValue = p.getContextVar() != null ? p.getContextVar().getValue() : null;

        Set<URI> graphs = null;
        boolean emptyGraph = false;

        if (dataset != null) {
            if (p.getScope() == StatementPattern.Scope.DEFAULT_CONTEXTS) {
                graphs = dataset.getDefaultGraphs();
                emptyGraph = graphs.isEmpty() && !dataset.getNamedGraphs().isEmpty();
            } else {
                graphs = dataset.getNamedGraphs();
                emptyGraph = graphs.isEmpty() && !dataset.getDefaultGraphs().isEmpty();
            }
        }

        if (emptyGraph) {
            // Search zero contexts
            return new EmptyIteration<BindingSet, SQLException>();
        } else if (graphs == null || graphs.isEmpty()) {
            if (contextValue != null) {
                contexts = new Resource[] { (Resource) contextValue };
            } else {
                contexts = new Resource[0];
            }
        } else if (contextValue != null) {
            if (graphs.contains(contextValue)) {
                contexts = new Resource[] { (Resource) contextValue };
            } else {
                // Statement pattern specifies a context that is not part of
                // the dataset
                return new EmptyIteration<BindingSet, SQLException>();
            }
        } else {
            contexts = new Resource[graphs.size()];
            int i = 0;
            for (URI graph : graphs) {
                URI context = null;
                if (!SESAME.NIL.equals(graph)) {
                    context = graph;
                }
                contexts[i++] = context;
            }
        }

        // build pattern
        Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(),
                p.getContextVar() };
        for (int i = 0; i < fields.length; i++) {
            if (fields[i] != null && !fields[i].hasValue()) {
                Var v = fields[i];
                if (variableNames.get(v) == null) {
                    variableNames.put(v, "V" + (++variableCount));
                    queryVariables.put(v, new LinkedList<String>());
                    queryVariableIds.put(v, new LinkedList<String>());
                }
                String pName = patternNames.get(p);
                String vName = variableNames.get(v);
                if (hasNodeCondition(fields[i], join)) {
                    queryVariables.get(v).add(pName + "_" + positions[i] + "_" + vName);
                }
                queryVariableIds.get(v).add(pName + "." + positions[i]);
            }
        }

        // build an OR query for the value of the context variable
        if (contexts.length > 0) {
            variableContexts.put(p, Arrays.asList(contexts));
        }
    }

    // build the select clause by projecting for each query variable the first name
    StringBuilder selectClause = new StringBuilder();

    if (distinct) {
        selectClause.append("DISTINCT ");
    }

    final List<Var> selectVariables = new LinkedList<Var>();
    for (Iterator<Var> it = queryVariableIds.keySet().iterator(); it.hasNext();) {
        Var v = it.next();
        String projectedName = variableNames.get(v);
        String fromName = queryVariableIds.get(v).get(0);
        selectClause.append(fromName);
        selectClause.append(" as ");
        selectClause.append(projectedName);
        if (it.hasNext()) {
            selectClause.append(", ");
        }
        selectVariables.add(v);
    }

    // build the from-clause of the query; the from clause is constructed as follows:
    // 1. for each pattern P, there will be a "KiWiTriple P" in the from clause
    // 2. for each variable V in P occurring in
    //    - subject, there will be a "inner join P.subject as P_S_V" or "left outer join P.subject as P_S_V",
    //      depending on whether the "optional" parameter is false or true
    //    - property, there will be a "inner join P.property as P_P_V" or "left outer join p.property as P_P_V"
    //    - object, there will be a "inner join P.object as P_O_V" or "left outer join p.object as P_O_V"
    //    - context, there will be a "inner join P.context as P_C_V" or "left outer join p.context as P_C_V"
    StringBuilder fromClause = new StringBuilder();
    for (Iterator<StatementPattern> it = patterns.iterator(); it.hasNext();) {
        StatementPattern p = it.next();
        String pName = patternNames.get(p);
        fromClause.append("triples " + pName);

        Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(),
                p.getContextVar() };
        for (int i = 0; i < fields.length; i++) {
            if (fields[i] != null && !fields[i].hasValue() && hasNodeCondition(fields[i], join)) {
                String vName = variableNames.get(fields[i]);
                fromClause.append(" INNER JOIN nodes AS ");
                fromClause.append(pName + "_" + positions[i] + "_" + vName);
                fromClause.append(" ON " + pName + "." + positions[i] + " = ");
                fromClause.append(pName + "_" + positions[i] + "_" + vName + ".id ");
            }
        }

        if (it.hasNext()) {
            fromClause.append(",\n ");
        }
    }

    // build the where clause as follows:
    // 1. iterate over all patterns and for each resource and literal field in subject,
    //    property, object, or context, and set a query condition according to the
    //    nodes given in the pattern
    // 2. for each variable that has more than one occurrences, add a join condition
    // 3. for each variable in the initialBindings, add a condition to the where clause

    // list of where conditions that will later be connected by AND
    List<String> whereConditions = new LinkedList<String>();

    // 1. iterate over all patterns and for each resource and literal field in subject,
    //    property, object, or context, and set a query condition according to the
    //    nodes given in the pattern
    for (StatementPattern p : patterns) {
        String pName = patternNames.get(p);
        Var[] fields = new Var[] { p.getSubjectVar(), p.getPredicateVar(), p.getObjectVar(),
                p.getContextVar() };
        for (int i = 0; i < fields.length; i++) {
            // find node id of the resource or literal field and use it in the where clause
            // in this way we can avoid setting too many query parameters
            long nodeId = -1;
            if (fields[i] != null && fields[i].hasValue()) {
                Value v = valueFactory.convert(fields[i].getValue());
                if (v instanceof KiWiNode) {
                    nodeId = ((KiWiNode) v).getId();
                } else {
                    throw new IllegalArgumentException(
                            "the values in this query have not been created by the KiWi value factory");
                }

                if (nodeId >= 0) {
                    String condition = pName + "." + positions[i] + " = " + nodeId;
                    whereConditions.add(condition);
                }
            }
        }
    }

    // 2. for each variable that has more than one occurrences, add a join condition
    for (Var v : queryVariableIds.keySet()) {
        List<String> vNames = queryVariableIds.get(v);
        for (int i = 1; i < vNames.size(); i++) {
            String vName1 = vNames.get(i - 1);
            String vName2 = vNames.get(i);
            whereConditions.add(vName1 + " = " + vName2);
        }
    }

    // 3. for each variable in the initialBindings, add a condition to the where clause setting it
    //    to the node given as binding
    if (bindings != null) {
        for (String v : bindings.getBindingNames()) {
            for (Map.Entry<Var, List<String>> entry : queryVariableIds.entrySet()) {
                if (entry.getKey().getName() != null && entry.getKey().getName().equals(v)
                        && entry.getValue() != null && entry.getValue().size() > 0) {
                    List<String> vNames = entry.getValue();
                    String vName = vNames.get(0);
                    Value binding = valueFactory.convert(bindings.getValue(v));
                    if (binding instanceof KiWiNode) {
                        whereConditions.add(vName + " = " + ((KiWiNode) binding).getId());
                    } else {
                        throw new IllegalArgumentException(
                                "the values in this binding have not been created by the KiWi value factory");
                    }
                }
            }
        }
    }

    // 4. for each pattern, ensure that the matched triple is not marked as deleted
    for (StatementPattern p : patterns) {
        String pName = patternNames.get(p);
        whereConditions.add(pName + ".deleted = false");
    }

    // 5. for each filter condition, add a statement to the where clause
    List<ValueExpr> filters = new FilterCollector(join).filters;
    for (ValueExpr expr : filters) {
        whereConditions.add(evaluateExpression(expr, queryVariables, null));
    }

    // 6. for each context variable with a restricted list of contexts, we add a condition to the where clause
    //    of the form (V.id = R1.id OR V.id = R2.id ...)
    for (Map.Entry<StatementPattern, List<Resource>> vctx : variableContexts.entrySet()) {
        // the variable
        String varName = patternNames.get(vctx.getKey());

        // the string we are building
        StringBuilder cCond = new StringBuilder();
        cCond.append("(");
        for (Iterator<Resource> it = vctx.getValue().iterator(); it.hasNext();) {
            Value v = valueFactory.convert(it.next());
            if (v instanceof KiWiNode) {
                long nodeId = ((KiWiNode) v).getId();

                cCond.append(varName);
                cCond.append(".context = ");
                cCond.append(nodeId);

                if (it.hasNext()) {
                    cCond.append(" OR ");
                }
            } else {
                throw new IllegalArgumentException(
                        "the values in this query have not been created by the KiWi value factory");
            }

        }
        cCond.append(")");
        whereConditions.add(cCond.toString());
    }

    // construct the where clause
    StringBuilder whereClause = new StringBuilder();
    for (Iterator<String> it = whereConditions.iterator(); it.hasNext();) {
        whereClause.append(it.next());
        whereClause.append("\n ");
        if (it.hasNext()) {
            whereClause.append("AND ");
        }
    }

    // construct limit and offset
    StringBuilder limitClause = new StringBuilder();
    if (limit > 0) {
        limitClause.append("LIMIT ");
        limitClause.append(limit);
        limitClause.append(" ");
    }
    if (offset >= 0) {
        limitClause.append("OFFSET ");
        limitClause.append(offset);
        limitClause.append(" ");
    }

    // build the query string
    String queryString = "SELECT " + selectClause + "\n " + "FROM " + fromClause + "\n " + "WHERE "
            + whereClause + "\n " + limitClause;

    log.debug("original SPARQL syntax tree:\n {}", join);
    log.debug("constructed SQL query string:\n {}", queryString);
    log.debug("SPARQL -> SQL node variable mappings:\n {}", queryVariables);
    log.debug("SPARQL -> SQL ID variable mappings:\n {}", queryVariableIds);

    final PreparedStatement queryStatement = parent.getJDBCConnection().prepareStatement(queryString);
    if (parent.getDialect().isCursorSupported()) {
        queryStatement.setFetchSize(parent.getConfiguration().getCursorSize());
    }

    Future<ResultSet> queryFuture = executorService.submit(new Callable<ResultSet>() {
        @Override
        public ResultSet call() throws Exception {
            try {
                return queryStatement.executeQuery();
            } catch (SQLException ex) {
                if (Thread.interrupted()) {
                    log.info("SQL query execution cancelled; not returning result (Thread={})",
                            Thread.currentThread());
                    throw new InterruptedException("SPARQL query execution cancelled");
                } else {
                    throw ex;
                }
            }
        }
    });

    try {
        ResultSet result = queryFuture.get();

        ResultSetIteration<BindingSet> it = new ResultSetIteration<BindingSet>(result, true,
                new ResultTransformerFunction<BindingSet>() {
                    @Override
                    public BindingSet apply(ResultSet row) throws SQLException {
                        MapBindingSet resultRow = new MapBindingSet();

                        long[] nodeIds = new long[selectVariables.size()];
                        for (int i = 0; i < selectVariables.size(); i++) {
                            nodeIds[i] = row.getLong(variableNames.get(selectVariables.get(i)));
                        }
                        KiWiNode[] nodes = parent.loadNodesByIds(nodeIds);

                        for (int i = 0; i < selectVariables.size(); i++) {
                            Var v = selectVariables.get(i);
                            resultRow.addBinding(v.getName(), nodes[i]);
                        }

                        if (bindings != null) {
                            for (Binding binding : bindings) {
                                resultRow.addBinding(binding);
                            }
                        }
                        return resultRow;
                    }
                });

        // materialize result to avoid having more than one result set open at the same time
        return new CloseableIteratorIteration<BindingSet, SQLException>(Iterations.asList(it).iterator());
    } catch (InterruptedException | CancellationException e) {
        log.info("SPARQL query execution cancelled");
        queryFuture.cancel(true);
        queryStatement.cancel();
        queryStatement.close();

        throw new InterruptedException("SPARQL query execution cancelled");
    } catch (ExecutionException e) {
        log.error("error executing SPARQL query", e.getCause());
        if (e.getCause() instanceof SQLException) {
            throw (SQLException) e.getCause();
        } else if (e.getCause() instanceof InterruptedException) {
            throw (InterruptedException) e.getCause();
        } else {
            throw new SQLException("error executing SPARQL query", e);
        }
    }
}

From source file:org.apache.openjpa.jdbc.sql.SQLBuffer.java

/**
 * Create and populate the parameters of a prepred statement using the
 * SQL in this buffer and the given fetch configuration.
 *///from  w w w . j  a va2s  .com
public PreparedStatement prepareStatement(Connection conn, JDBCFetchConfiguration fetch, int rsType,
        int rsConcur) throws SQLException {
    if (rsType == -1 && fetch == null)
        rsType = ResultSet.TYPE_FORWARD_ONLY;
    else if (rsType == -1)
        rsType = fetch.getResultSetType();
    if (rsConcur == -1)
        rsConcur = ResultSet.CONCUR_READ_ONLY;

    PreparedStatement stmnt;
    if (rsType == ResultSet.TYPE_FORWARD_ONLY && rsConcur == ResultSet.CONCUR_READ_ONLY)
        stmnt = conn.prepareStatement(getSQL());
    else
        stmnt = conn.prepareStatement(getSQL(), rsType, rsConcur);
    try {
        setParameters(stmnt);
        if (fetch != null) {
            if (fetch.getFetchBatchSize() > 0)
                stmnt.setFetchSize(_dict.getBatchFetchSize(fetch.getFetchBatchSize()));
            if (rsType != ResultSet.TYPE_FORWARD_ONLY && fetch.getFetchDirection() != ResultSet.FETCH_FORWARD)
                stmnt.setFetchDirection(fetch.getFetchDirection());
        }
        return stmnt;
    } catch (SQLException se) {
        try {
            stmnt.close();
        } catch (SQLException se2) {
        }
        throw se;
    }
}

From source file:org.apache.torque.util.BasePeerImpl.java

/**
 * Performs a SQL <code>select</code> using a PreparedStatement.
 *
 * @param criteria A Criteria specifying the records to select, not null.
 * @param mapper The mapper creating the objects from the resultSet,
 *        not null./* www .  ja v  a  2s  . c  o m*/
 * @param connection the database connection for selecting records,
 *        not null.
 *
 * @return The results of the query, not null.
 *
 * @throws TorqueException Error performing database query.
 */
public <TT> List<TT> doSelect(org.apache.torque.criteria.Criteria criteria, RecordMapper<TT> mapper,
        Connection connection) throws TorqueException {
    correctBooleans(criteria);

    Query query = SqlBuilder.buildQuery(criteria);
    if (query.getFromClause().isEmpty()) {
        String tableName = SqlBuilder.getFullTableName(getTableMap().getFullyQualifiedTableName(),
                criteria.getDbName());
        query.getFromClause().add(new FromElement(tableName));
    }

    PreparedStatement statement = null;
    ResultSet resultSet = null;
    try {
        statement = connection.prepareStatement(query.toString());
        if (query.getFetchSize() != null) {
            statement.setFetchSize(query.getFetchSize());
        }

        List<Object> replacements = setPreparedStatementReplacements(statement,
                query.getPreparedStatementReplacements(), 0);

        long startTime = System.currentTimeMillis();
        log.debug("Executing query " + query + ", parameters = " + replacements);

        resultSet = statement.executeQuery();
        long queryEndTime = System.currentTimeMillis();
        log.trace("query took " + (queryEndTime - startTime) + " milliseconds");

        long offset;
        Database database = Torque.getDatabase(criteria.getDbName());
        if (database.getAdapter().supportsNativeOffset()) {
            offset = 0; //database takes care of offset
        } else {
            offset = criteria.getOffset();
        }

        long limit;
        if (database.getAdapter().supportsNativeLimit()) {
            limit = -1; //database takes care of offset
        } else {
            if (database.getAdapter().supportsNativeOffset()) {
                limit = criteria.getLimit();
            } else {
                if (criteria.getLimit() == -1) {
                    limit = criteria.getLimit();
                } else {
                    limit = offset + criteria.getLimit();
                }
            }
        }

        List<TT> result = new ArrayList<TT>();
        int rowNumber = 0;
        while (resultSet.next()) {
            if (rowNumber < offset) {
                rowNumber++;
                continue;
            }
            if (limit >= 0 && rowNumber >= limit) {
                break;
            }

            TT rowResult = mapper.processRow(resultSet, 0, criteria);
            result.add(rowResult);

            rowNumber++;
        }
        long mappingEndTime = System.currentTimeMillis();
        log.trace("mapping took " + (mappingEndTime - queryEndTime) + " milliseconds");

        if (criteria.isSingleRecord() && result.size() > 1) {
            throw new TooManyRowsException(
                    "Criteria expected single Record and " + "Multiple Records were selected");
        }
        return result;
    } catch (SQLException e) {
        throw ExceptionMapper.getInstance().toTorqueException(e);
    } finally {
        if (resultSet != null) {
            try {
                resultSet.close();
            } catch (SQLException e) {
                log.warn("error closing resultSet", e);
            }
        }
        if (statement != null) {
            try {
                statement.close();
            } catch (SQLException e) {
                log.warn("error closing statement", e);
            }
        }
    }
}

From source file:org.bidtime.dbutils.QueryRunnerEx.java

/**
 * Calls query after checking the parameters to ensure nothing is null.
 * @param conn The connection to use for the query call.
 * @param closeConn True if the connection should be closed, false otherwise.
 * @param sql The SQL statement to execute.
 * @param params An array of query replacement parameters.  Each row in
 * this array is one set of batch replacement values.
 * @return The results of the query.//from  ww  w  . j  av a2s.  c  om
 * @throws SQLException If there are database or parameter errors.
 */
private <T> T query(Connection conn, boolean closeConn, String sql, ResultSetHandler<T> rsh, Object... params)
        throws SQLException {
    if (conn == null) {
        throw new SQLException("Null connection");
    }

    if (sql == null) {
        if (closeConn) {
            close(conn);
        }
        throw new SQLException("Null SQL statement");
    }

    if (rsh == null) {
        if (closeConn) {
            close(conn);
        }
        throw new SQLException("Null ResultSetHandler");
    }

    PreparedStatement stmt = null;
    ResultSet rs = null;
    T result = null;
    long startTime = System.currentTimeMillis();
    try {
        //stmt = this.prepareStatement(conn, sql);
        stmt = (PreparedStatement) conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(StmtParams.getInstance().getFetchSize());
        stmt.setFetchDirection(ResultSet.FETCH_REVERSE);
        stmt.setQueryTimeout(StmtParams.getInstance().getStmtQueryTimeOut());
        this.fillStatement(stmt, params);
        rs = this.wrap(stmt.executeQuery());
        result = rsh.handle(rs);
    } catch (SQLException e) {
        this.rethrow(e, sql, params);
    } finally {
        try {
            close(rs);
        } finally {
            close(stmt);
            if (closeConn) {
                close(conn);
            }
        }
        if (LogSelectSql.logInfoOrDebug()) {
            LogSelectSql.logFormatTimeNow(startTime, sql, params);
        }
    }

    return result;
}

From source file:org.compass.gps.device.jdbc.AbstractJdbcGpsDevice.java

/**
 * Performs the indexing operation.// w  ww  .  j a v a 2 s .  com
 * <p/>
 * Calls the abstract {@link #doGetIndexExecutions(java.sql.Connection)} method with
 * an open connection to get the list of {@link org.compass.gps.device.jdbc.AbstractJdbcGpsDevice.IndexExecution} to perform.
 * <p/>
 * For each {@link org.compass.gps.device.jdbc.AbstractJdbcGpsDevice.IndexExecution}, executes the select query, and calls
 * the {@link #processResultSet(Object, java.sql.ResultSet, org.compass.core.CompassSession)} for the
 * returned <code>ResultSet</code>.
 */
protected void doIndex(CompassSession session) throws CompassGpsException {
    if (log.isInfoEnabled()) {
        log.info("{" + getName() + "}: Indexing the database with fetch size [" + fetchSize + "]");
    }
    Connection connection = JdbcUtils.getConnection(dataSource);
    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        IndexExecution[] indexExecutions = doGetIndexExecutions(connection);
        for (IndexExecution indexExecution : indexExecutions) {
            if (!isRunning()) {
                return;
            }
            ps = indexExecution.getStatement();
            if (ps == null) {
                if (log.isDebugEnabled()) {
                    log.debug("{" + getName() + "} Executing select query ["
                            + indexExecution.getStatementQuery() + "]");
                }
                ps = connection.prepareStatement(indexExecution.getStatementQuery());
            }
            if (getFetchSize() > 0) {
                ps.setFetchSize(getFetchSize());
            }
            rs = ps.executeQuery();
            processResultSet(indexExecution.getDescription(), rs, session);
        }
    } catch (CompassException e) {
        log.error("Failed to index database", e);
        throw e;
    } catch (Exception e) {
        log.error("Failed to index database", e);
        throw new JdbcGpsDeviceException("Failed to index database", e);
    } finally {
        JdbcUtils.closeResultSet(rs);
        JdbcUtils.closeStatement(ps);
        JdbcUtils.closeConnection(connection);
    }

    if (log.isInfoEnabled()) {
        log.info("{" + getName() + "}: Finished indexing the database");
    }
}

From source file:org.deegree.metadata.persistence.ebrim.eo.EbrimEOMDStore.java

@Override
public MetadataResultSet<RegistryObject> getRecords(MetadataQuery query) throws MetadataStoreException {

    PreparedStatement stmt = null;
    ResultSet rs = null;/* w w w  . java 2s .com*/
    ConnectionProvider prov = workspace.getResource(ConnectionProviderProvider.class, connId);
    Connection conn = getConnection(true);
    try {
        EOPropertyNameMapper propMapper = new EOPropertyNameMapper(query.getQueryTypeNames(),
                useLegacyPredicates);
        if (query.getFilter() instanceof IdFilter) {
            throw new MetadataStoreException("ID filters are currently not supported.");
        }

        AbstractWhereBuilder wb = new PostGISWhereBuilder(null, propMapper, (OperatorFilter) query.getFilter(),
                query.getSorting(), false, useLegacyPredicates);
        AliasedRIMType returnType = propMapper.getReturnType(query.getReturnTypeNames());
        StringBuilder idSelect = new StringBuilder("SELECT DISTINCT(");
        idSelect.append(propMapper.getTableAlias(returnType));
        idSelect.append(".internalId) FROM ");
        idSelect.append(propMapper.getTable(returnType));
        idSelect.append(' ');
        idSelect.append(propMapper.getTableAlias(returnType));
        boolean first = true;
        for (AliasedRIMType queryType : propMapper.getQueryTypes()) {
            if (queryType != returnType) {
                if (first) {
                    idSelect.append(" LEFT OUTER JOIN ");
                } else {
                    idSelect.append(" FULL OUTER JOIN ");
                }
                idSelect.append(propMapper.getTable(queryType).name());
                idSelect.append(' ');
                idSelect.append(propMapper.getTableAlias(queryType));
                idSelect.append(" ON TRUE");
                first = false;
            }
        }
        // cope with rim:RegistryPackage -> rim:RegistryObjectList/* join
        for (Join additionalJoin : propMapper.getAdditionalJoins()) {
            if (first) {
                idSelect.append(" LEFT OUTER JOIN ");
            } else {
                idSelect.append(" FULL OUTER JOIN ");
            }
            idSelect.append(additionalJoin.getToTable());
            idSelect.append(' ');
            idSelect.append(additionalJoin.getToTableAlias());
            idSelect.append(" ON ");
            idSelect.append(additionalJoin.getSQLJoinCondition());
            first = false;
        }

        if (wb.getWhere() != null) {
            idSelect.append(" WHERE ").append(wb.getWhere().getSQL());
        }
        if (wb.getOrderBy() != null) {
            idSelect.append(" ORDER BY ");
            idSelect.append(wb.getOrderBy().getSQL());
        }
        if (query != null && query.getStartPosition() != 1) {
            idSelect.append(" OFFSET ").append(Integer.toString(query.getStartPosition() - 1));
        }
        if (query != null) {
            idSelect.append(" LIMIT ").append(query.getMaxRecords());
        }

        StringBuilder blobSelect = new StringBuilder("SELECT data FROM ");
        blobSelect.append(propMapper.getTable(returnType));
        blobSelect.append(" WHERE internalId IN (");
        blobSelect.append(idSelect);
        blobSelect.append(")");

        stmt = conn.prepareStatement(blobSelect.toString());
        stmt.setFetchSize(DEFAULT_FETCH_SIZE);

        int i = 1;
        if (wb.getWhere() != null) {
            for (SQLArgument argument : wb.getWhere().getArguments()) {
                argument.setArgument(stmt, i++);
            }
        }

        if (wb.getOrderBy() != null) {
            for (SQLArgument argument : wb.getOrderBy().getArguments()) {
                argument.setArgument(stmt, i++);
            }
        }

        LOG.debug("Execute: " + stmt.toString());
        rs = executeQuery(stmt, prov, queryTimeout);
        return new EbrimEOMDResultSet(rs, conn, stmt);
    } catch (Throwable t) {

        JDBCUtils.close(rs, stmt, conn, LOG);
        LOG.debug(t.getMessage(), t);
        throw new MetadataStoreException(t.getMessage(), t);
    }
}

From source file:org.deegree.metadata.persistence.ebrim.eo.EbrimEOMDStore.java

@Override
public MetadataResultSet<RegistryObject> getRecordById(List<String> idList, QName[] recordTypeNames)
        throws MetadataStoreException {

    Table table = Table.idxtb_registrypackage;
    if (recordTypeNames != null && recordTypeNames.length > 0) {
        if (recordTypeNames.length > 1) {
            String msg = "Record by id queries with multiple specified type names are not supported.";
            throw new MetadataStoreException(msg);
        }// w w  w  .j a v a  2s . co  m

        try {
            List<AliasedRIMType> aliasedTypes = AliasedRIMType.valueOf(recordTypeNames[0]);
            if (aliasedTypes.get(0).getType() == AdhocQuery) {
                return getAdhocQueries(idList);
            }
            table = SlotMapper.getTable(aliasedTypes.get(0).getType());
        } catch (Throwable t) {
            String msg = "Specified type name '" + recordTypeNames[0]
                    + "' is not a known ebRIM 3.0 registry object type.";
            throw new MetadataStoreException(msg);
        }
        if (table == null) {
            String msg = "Queries on registry object type '" + recordTypeNames[0] + "' are not supported.";
            throw new MetadataStoreException(msg);
        }
    }

    PreparedStatement stmt = null;
    ResultSet rs = null;
    ConnectionProvider prov = workspace.getResource(ConnectionProviderProvider.class, connId);
    Connection conn = getConnection(true);

    try {
        StringBuilder sql = new StringBuilder();
        sql.append("SELECT data");
        sql.append(" FROM ");
        sql.append(table.name());
        sql.append(" WHERE id IN (");
        sql.append("?");
        for (int i = 1; i < idList.size(); i++) {
            sql.append(",?");
        }
        sql.append(")");

        stmt = conn.prepareStatement(sql.toString());
        stmt.setFetchSize(DEFAULT_FETCH_SIZE);

        int i = 1;
        for (String identifier : idList) {
            stmt.setString(i, identifier);
            i++;
        }

        LOG.debug("Execute: " + stmt.toString());
        rs = executeQuery(stmt, prov, queryTimeout);
        return new EbrimEOMDResultSet(rs, conn, stmt);
    } catch (Throwable t) {
        JDBCUtils.close(rs, stmt, conn, LOG);
        LOG.debug(t.getMessage(), t);
        throw new MetadataStoreException(t.getMessage(), t);
    }
}

From source file:org.fastcatsearch.datasource.reader.DBReader.java

@Override
public SchemaSetting getAutoGeneratedSchemaSetting() {
    Map<String, String> properties = singleSourceConfig.getProperties();
    String jdbcSourceId = properties.get("jdbcSourceId");
    String dataSQL = properties.get("dataSQL");
    IRService service = ServiceManager.getInstance().getService(IRService.class);
    Connection con = null;//from  w  ww . j a v a  2 s.  co m
    PreparedStatement pst = null;
    ResultSet res = null;
    ResultSetMetaData meta = null;
    try {
        JDBCSourceInfo jdbcInfo = service.getJDBCSourceInfo(jdbcSourceId);
        if (jdbcInfo != null) {
            con = getConnection(jdbcInfo);
        }
        logger.trace("get jdbc connection : {}", con);

        if (con != null) {
            logger.trace("executing sql :{}", dataSQL);
            pst = con.prepareStatement(dataSQL);
            pst.setFetchSize(1);
            pst.setMaxRows(1);
            res = pst.executeQuery();
            res.next();
            meta = res.getMetaData();

            SchemaSetting setting = new SchemaSetting();
            PrimaryKeySetting primaryKeySetting = new PrimaryKeySetting();
            List<FieldSetting> fieldSettingList = new ArrayList<FieldSetting>();
            List<AnalyzerSetting> analyzerSetting = new ArrayList<AnalyzerSetting>();
            List<GroupIndexSetting> groupIndexSetting = new ArrayList<GroupIndexSetting>();
            List<IndexSetting> indexSetting = new ArrayList<IndexSetting>();
            List<FieldIndexSetting> fieldIndexSetting = new ArrayList<FieldIndexSetting>();

            logger.trace("columnCount:{}", meta.getColumnCount());

            String tableName = null;

            for (int inx = 0; inx < meta.getColumnCount(); inx++) {
                if (tableName == null) {
                    tableName = meta.getTableName(inx + 1);
                }
                FieldSetting field = new FieldSetting();
                Type type = null;
                int size = 0;
                switch (meta.getColumnType(inx + 1)) {
                case Types.INTEGER:
                case Types.TINYINT:
                case Types.SMALLINT:
                case Types.NUMERIC:
                    type = Type.INT;
                    break;
                case Types.BIGINT:
                    type = Type.LONG;
                    break;
                case Types.FLOAT:
                    type = Type.FLOAT;
                    break;
                case Types.DOUBLE:
                    type = Type.DOUBLE;
                    break;
                case Types.DATE:
                case Types.TIME:
                case Types.TIMESTAMP:
                    type = Type.DATETIME;
                    break;
                case Types.CHAR:
                case Types.VARCHAR:
                case Types.LONGVARCHAR:
                    type = Type.STRING;
                    break;
                default:
                    type = Type.STRING;
                    break;
                }
                field.setId(meta.getColumnLabel(inx + 1));
                field.setName(field.getId());
                field.setType(type);
                field.setSize(size);
                logger.trace("field add {}", field);
                fieldSettingList.add(field);
            }

            setting.setFieldSettingList(fieldSettingList);
            setting.setPrimaryKeySetting(primaryKeySetting);
            setting.setFieldIndexSettingList(fieldIndexSetting);
            setting.setAnalyzerSettingList(analyzerSetting);
            setting.setGroupIndexSettingList(groupIndexSetting);
            setting.setIndexSettingList(indexSetting);

            return setting;
        }
    } catch (IRException e) {
        logger.error("", e);
    } catch (SQLException e) {
        logger.error("", e);
    } finally {
        if (res != null)
            try {
                res.close();
            } catch (SQLException ignore) {
            }
        if (pst != null)
            try {
                pst.close();
            } catch (SQLException ignore) {
            }
        if (con != null)
            try {
                con.close();
            } catch (SQLException ignore) {
            }
    }
    return null;
}

From source file:org.geotools.jdbc.JoiningJDBCFeatureSource.java

/**
 * Generates a 'SELECT p1, p2, ... FROM ... WHERE ...' prepared statement.
 * /*from w  w  w .j a v  a2 s.c om*/
 * @param featureType
 *            the feature type that the query must return (may contain less
 *            attributes than the native one)
 * @param attributes
 *            the properties queried, or {@link Query#ALL_NAMES} to gather
 *            all of them
 * @param query
 *            the query to be run. The type name and property will be ignored, as they are
 *            supposed to have been already embedded into the provided feature type
 * @param cx
 *            The database connection to be used to create the prepared
 *            statement
 * @throws FilterToSQLException 
 */
protected PreparedStatement selectSQLPS(SimpleFeatureType featureType, JoiningQuery query, Connection cx)
        throws SQLException, IOException, FilterToSQLException {

    AtomicReference<PreparedFilterToSQL> toSQLref = new AtomicReference<PreparedFilterToSQL>();
    String sql = selectSQL(featureType, query, toSQLref);

    LOGGER.fine(sql);
    PreparedStatement ps = cx.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    ps.setFetchSize(getDataStore().fetchSize);

    if (toSQLref.get() != null) {
        getDataStore().setPreparedFilterValues(ps, toSQLref.get(), 0, cx);
    }

    return ps;
}