Example usage for java.sql PreparedStatement clearParameters

List of usage examples for java.sql PreparedStatement clearParameters

Introduction

In this page you can find the example usage for java.sql PreparedStatement clearParameters.

Prototype

void clearParameters() throws SQLException;

Source Link

Document

Clears the current parameter values immediately.

Usage

From source file:org.apache.jmeter.protocol.jdbc.AbstractJDBCwoTimeOutTestElement.java

private PreparedStatement getPreparedStatement(final Connection conn, final boolean callable)
        throws SQLException {
    Map<String, PreparedStatement> preparedStatementMap = perConnCache.get(conn);
    if (null == preparedStatementMap) {
        @SuppressWarnings("unchecked") // LRUMap is not generic
        final Map<String, PreparedStatement> lruMap = new LRUMap(MAX_OPEN_PREPARED_STATEMENTS) {
            private static final long serialVersionUID = 1L;

            @Override/*  w w w  .  ja va 2  s  .  c  o m*/
            protected boolean removeLRU(final LinkEntry entry) {
                final PreparedStatement preparedStatement = (PreparedStatement) entry.getValue();
                close(preparedStatement);
                return true;
            }
        };
        preparedStatementMap = Collections.<String, PreparedStatement>synchronizedMap(lruMap);
        // As a connection is held by only one thread, we cannot already have a 
        // preparedStatementMap put by another thread
        perConnCache.put(conn, preparedStatementMap);
    }
    PreparedStatement pstmt = preparedStatementMap.get(getQuery());
    if (null == pstmt) {
        if (callable) {
            pstmt = conn.prepareCall(getQuery());
        } else {
            pstmt = conn.prepareStatement(getQuery());
        }
        //            pstmt.setQueryTimeout(getIntegerQueryTimeout());
        // PreparedStatementMap is associated to one connection so 
        //  2 threads cannot use the same PreparedStatement map at the same time
        preparedStatementMap.put(getQuery(), pstmt);
    } else {
        final int timeoutInS = getIntegerQueryTimeout();
        if (pstmt.getQueryTimeout() != timeoutInS) {
            //                pstmt.setQueryTimeout(getIntegerQueryTimeout());
        }
    }
    pstmt.clearParameters();
    return pstmt;
}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

public void markFileComplete(ArcProcessFile file) throws DatabaseException {
    synchronized (DatabaseResourceManager.DB_LOCK) {
        PooledDbConnection conn = null;/*  www  .j ava 2  s .  c  o m*/
        try {

            conn = connPool.getConnection();

            PreparedStatement stmt = conn.prepareStatement(UPDATE_LIFE_CYCLE_STMT_NAME, updateLifeCycleSql);
            stmt.clearParameters();
            stmt.setInt(1, FileLifeCycle.COMPLETE.ordinal());
            stmt.setLong(2, file.getDatabaseRecordId());
            stmt.executeUpdate();

        } catch (Exception e) {
            throw new DatabaseException(DBUtils
                    .getErrorMessage("An error occurred updating the life cycle of a file to complete", e), e);
        } finally {
            connPool.returnConnection(conn);
        }
    }
}

From source file:org.sakaiproject.search.indexer.impl.SearchBuilderQueueManager.java

/**
 * @param runtimeToDo/*from  ww w  .ja v  a2  s.c  o m*/
 * @param connection
 * @throws SQLException
 */
private void rollbackPendingAndUnLock(List<SearchBuilderItem> runtimeToDo, Connection connection)
        throws SQLException {
    PreparedStatement unLockPst = null;
    try {
        unLockPst = connection.prepareStatement("update " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " set searchstate = ? " //$NON-NLS-1$
                + " where id = ?  and  searchstate = ? "); //$NON-NLS-1$
        for (Iterator<SearchBuilderItem> isbi = runtimeToDo.iterator(); isbi.hasNext();) {

            SearchBuilderItem sbi = isbi.next();
            unLockPst.clearParameters();
            if (SearchBuilderItem.STATE_FAILED.equals(sbi.getSearchstate())) {
                sbi.setSearchstate(SearchBuilderItem.STATE_FAILED);
                unLockPst.setInt(1, SearchBuilderItem.STATE_FAILED.intValue());
            } else {
                sbi.setSearchstate(SearchBuilderItem.STATE_PENDING);
                unLockPst.setInt(1, SearchBuilderItem.STATE_PENDING.intValue());
            }
            unLockPst.setString(2, sbi.getId());
            unLockPst.setInt(3, sbi.getSearchstate());
            if (unLockPst.executeUpdate() == 1) {
                log.warn("Failed to mark " + sbi.getName() + " as pending ");
            }
            connection.commit();
        }
    } finally {
        try {
            unLockPst.close();
        } catch (Exception ex) {
            log.warn("Error unlocking pst", ex);
        }
    }
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

private void delete(Connection connection, SearchBuilderItem sbi) throws SQLException {
    PreparedStatement pst = null;
    try {/* ww w .ja  va 2 s .c o m*/
        pst = connection.prepareStatement(" delete from " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " where id = ? "); //$NON-NLS-1$
        pst.clearParameters();
        pst.setString(1, sbi.getId());
        pst.execute();
    } catch (SQLException ex) {
        log.warn("Failed ", ex); //$NON-NLS-1$
        throw ex;
    } finally {
        try {
            pst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }

}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateCompositionHelper.java

private void doInPreparedStatement(MMMetaModel metaModel, PreparedStatement ps) throws SQLException {
    List<AbstractMetadata> metadatas = metaModel.getMetadatas();
    String parentMetaModelId = metaModel.getParentMetaModel().getCode();
    String metaModelId = metaModel.getCode();
    String relationshipName = metaModel.getCompedRelationCode();
    long sysTime = AdapterExtractorContext.getInstance().getGlobalTime();

    for (int i = 0, size = metadatas.size(); i < size; i++) {

        AbstractMetadata metadata = metadatas.get(i);
        if (metadata.isHasExist()) {
            // ??,???
            continue;
        }/*from w w  w.  j av a2  s. com*/

        // ?ID
        ps.setString(1, metadata.getParentMetadata().getId());
        // ?ID
        ps.setString(2, parentMetaModelId);
        // ?ID
        ps.setString(3, metadata.getId());
        // ?ID
        ps.setString(4, metaModelId);
        // ???
        ps.setString(5, relationshipName);
        // 
        ps.setLong(6, sysTime);

        setPs(ps, 6);

        ps.addBatch();
        ps.clearParameters();

        if (++super.count % super.batchSize == 0) {
            ps.executeBatch();
            ps.clearBatch();
        }
    }

}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

public boolean isFileAlreadyDiscovered(String path) throws DatabaseException {
    synchronized (DatabaseResourceManager.DB_LOCK) {
        PooledDbConnection conn = null;//from   w w  w .j  a  va  2  s . com
        ResultSet rs;
        try {
            conn = connPool.getConnection();

            PreparedStatement stmt = conn.prepareStatement(GET_FILE_IN_TEMP_TABLE_STMT_NAME,
                    getFileInTempTableSql);
            stmt.clearParameters();
            stmt.setString(1, path);
            rs = stmt.executeQuery();

            boolean ret = rs.next();
            rs.close();
            return ret;

        } catch (Exception e) {
            throw new DatabaseException(
                    DBUtils.getErrorMessage("An error occurred selecting from temp table", e), e);
        } finally {
            connPool.returnConnection(conn);
        }
    }
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.DeleteMetadataAlterHelper.java

protected void doInPreparedStatement(PreparedStatement ps, String metaModelCode, boolean hasChildMetaModel,
        List<AbstractMetadata> metadatas) throws SQLException {
    try {/*  w ww . j a va  2  s. co  m*/
        for (AbstractMetadata metadata : metadatas) {
            // ?ID
            String sequenceId = sequenceDao.getUuid();
            ps.setString(1, sequenceId);
            // ?,1
            ps.setString(2, "1");
            // ID
            ps.setString(3, taskInstanceId);
            // // ?ID
            // ps.setString(4, metadata.getId());
            // // 
            // ps.setString(5, metaModelCode);
            // ID
            ps.setString(4, userId);

            // START_TIME?START_TIME
            ps.setLong(5, metadata.getStartTime());
            // : ALTERATION_TIME
            ps.setLong(6, startTime);

            // OLD_START_TIME ???OLD_START_TIME??
            ps.setNull(7, java.sql.Types.BIGINT);
            // ?ID
            ps.setString(8, metadata.getId());

            ps.addBatch();
            ps.clearParameters();

            if (++super.count % super.batchSize == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }

        }
    } catch (SQLException e) {
        // ??,????,,??
        log.warn("??!", e);
    }

}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

private void getFilesInDirInProgress(PooledDbConnection conn, long dirRecordId) throws SQLException {
    PreparedStatement filesInDirStmt = conn.prepareStatement(GET_FILES_IN_DIR_IN_PROGRESS_STMT_NAME,
            getFilesInDirInProgressSql);

    filesInDirStmt.clearParameters();
    filesInDirStmt.setLong(1, dirRecordId);

    Set<String> paths = new HashSet<String>();
    ResultSet rs = filesInDirStmt.executeQuery();
    while (rs.next()) {
        String path = rs.getString(ManagedJobFilesTableColumn.SOURCE_PATH.toString());
        paths.add(path);// w  w w.  j  av  a2 s  .  c o m
        if (paths.size() >= HCPMoverProperties.PREPROCESS_FILES_BATCH_SIZE.getAsInt()) {
            insertIntoTempTable(conn, paths);
            paths.clear();
        }
    }
    if (paths.size() > 0) {
        insertIntoTempTable(conn, paths);
    }

    rs.close();
}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

private void insertIntoTempTable(PooledDbConnection conn, Set<String> paths) throws SQLException {
    PreparedStatement insertStmt = conn.prepareStatement(INSERT_INTO_TEMP_TABLE_STMT_NAME,
            insertIntoTempTableSql);/*from w  ww.  j a v a 2  s. com*/

    for (String path : paths) {
        insertStmt.clearParameters();
        insertStmt.setString(1, path);
        insertStmt.addBatch();
    }

    insertStmt.executeBatch();
    conn.commit();
}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

public void insertDirListingBatch(Collection<ArcProcessFile> dirFiles, ArcProcessFile dir, boolean firstBatch)
        throws DatabaseException {
    synchronized (DatabaseResourceManager.DB_LOCK) {
        PooledDbConnection conn = null;//from w  ww .  ja v a2s.c om
        try {
            conn = connPool.getConnection();
            conn.setAutoCommit(false);

            if (firstBatch) {
                PreparedStatement stmt = conn.prepareStatement(MARK_DIR_LISTING_IN_PROGRESS_STMT_NAME,
                        markDirListingInProgressSql);
                stmt.clearParameters();
                stmt.setLong(1, dir.getDatabaseRecordId());
                stmt.executeUpdate();
            }

            insertFilesToDiscover(conn, dirFiles, false);

            conn.commit();
        } catch (Exception e) {
            rollback(conn);
            throw new DatabaseException(
                    DBUtils.getErrorMessage("An error occurred inserting rows into the database", e), e);
        } finally {
            connPool.returnConnection(conn);
        }
    }
}