Example usage for java.sql PreparedStatement clearParameters

List of usage examples for java.sql PreparedStatement clearParameters

Introduction

In this page you can find the example usage for java.sql PreparedStatement clearParameters.

Prototype

void clearParameters() throws SQLException;

Source Link

Document

Clears the current parameter values immediately.

Usage

From source file:netflow.DatabaseProxy.java

public void saveHosts(Map<String, HostTraffic> cache, java.util.Date date) {
    if (cache.size() == 0) {
        log.debug("Host cache empty");
        return;/*from w  w w.  j  a  v a 2 s  . c om*/
    }
    log.debug("Saving " + cache.size() + " records for " + date);
    String sql = getQuery("neflow.details.insert");
    try {
        PreparedStatement pstmt = con.prepareStatement(sql);
        Timestamp t = new java.sql.Timestamp(date.getTime());
        for (String key : cache.keySet()) {
            HostTraffic traffic = cache.get(key);
            if (!hasRecord(t, traffic.getHostAddress(), traffic.getNetworkId())) {
                pstmt.setTimestamp(1, t);
                pstmt.setString(2, traffic.getHostAddress());
                pstmt.setInt(3, traffic.getNetworkId());
                pstmt.setLong(4, traffic.getInputBytes());
                pstmt.setLong(5, traffic.getOutputBytes());
                pstmt.addBatch();
            }
        }
        int[] results = pstmt.executeBatch();
        log.info("saveHosts(): saved " + results.length + " records");
        pstmt.close();
        pstmt.clearParameters();
    } catch (SQLException e) {
        log.error("Saving hosts error: " + e.getMessage());
        SQLException ex = e.getNextException();
        if (ex != null) {
            log.error(ex.getMessage());
        }
        e.printStackTrace(System.err);
    }
}

From source file:org.netkernelroc.gradle.apposite.Package.java

public void uninstall(Connection connection, File nkInstance) throws Exception {
    final String setNotInstalledSql = "UPDATE PACKAGE_VERSIONS SET INSTALLED=FALSE WHERE ID=?";
    final PreparedStatement setNoInstalledPS = connection.prepareStatement(setNotInstalledSql);

    final String addTransactionEventSql = "INSERT INTO PACKAGE_TRANSACTION_EVENTS VALUES (\n" + "    NULL,\n"
            + "    @TRANSACTIONID,\n" + "    2,\n" + "    ?\n" + ");";
    final PreparedStatement addTransactionEventPS = connection.prepareStatement(addTransactionEventSql);

    final String deleteModulesSql = "DELETE FROM MODULES WHERE PACKAGEVID=?;";
    final PreparedStatement deleteModulesPS = connection.prepareStatement(deleteModulesSql);

    // I believe this only happens if the package was uploaded
    final String deletePackageSql = "DELETE\n" + "FROM   PACKAGES\n" + "WHERE  ID=( SELECT PACKAGES.ID\n"
            + "            FROM    PACKAGES,\n" + "                    PACKAGE_VERSIONS\n"
            + "            WHERE   PACKAGES.ID=PACKAGE_VERSIONS.PACKAGEID\n"
            + "            AND     PACKAGES.REPOCOLLECTIONSETID=1\n"
            + "            AND     PACKAGE_VERSIONS.ID=?\n" + "          );";
    final PreparedStatement deletePackagePS = connection.prepareStatement(deletePackageSql);

    setNoInstalledPS.clearParameters();
    setNoInstalledPS.setLong(1, latestInstalledVersion.getId());
    setNoInstalledPS.executeUpdate();/*from  w  w w  . java  2  s . c om*/

    addTransactionEventPS.clearParameters();
    addTransactionEventPS.setLong(1, id);
    addTransactionEventPS.executeUpdate();

    deletePackagePS.clearParameters();
    deletePackagePS.setLong(1, latestInstalledVersion.getId());
    deletePackagePS.executeUpdate();

    latestInstalledVersion = null;
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

public int countPending(Connection connection) {

    PreparedStatement pst = null;
    ResultSet rst = null;//w w  w.  java2  s .  c  o m
    try {

        pst = connection.prepareStatement("select count(*) from " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " where searchstate = ? "); //$NON-NLS-1$
        pst.clearParameters();
        pst.setInt(1, SearchBuilderItem.STATE_PENDING.intValue());
        rst = pst.executeQuery();
        if (rst.next()) {
            return rst.getInt(1);
        }
        return 0;
    } catch (SQLException sqlex) {
        return 0;
    } finally {
        try {
            rst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            pst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }

}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateMetadataAlterHelper.java

protected void doInPreparedStatement(PreparedStatement ps, String metaModelCode, boolean hasChildMetaModel,
        List<AbstractMetadata> metadatas) throws SQLException {
    try {//w w  w . j a  v a 2 s  .  c o m
        for (AbstractMetadata metadata : metadatas) {
            // ?ID
            String sequenceId = sequenceDao.getUuid();
            ps.setString(1, sequenceId);

            // ID
            ps.setString(3, taskInstanceId);
            // ?ID
            ps.setString(4, metadata.getId());
            // 
            ps.setString(5, metaModelCode);
            // ID
            ps.setString(7, userId);

            // : ALTERATION_TIME
            ps.setLong(9, startTime);

            // ? ? 2010-05-18 fbchen
            //ps.setString(3, genAttrs(metadata));

            setPs(ps, metadata, metaModelCode, hasChildMetaModel);

            String parentId = metadata.getParentMetadata().getId();
            if (parentId == null || parentId.equals("")) {
                parentId = "0";
            }
            ps.setString(11, parentId);
            ps.addBatch();
            ps.clearParameters();

            if (++super.count % super.batchSize == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }

        }
    } catch (SQLException e) {
        // ??,????,,??
        log.warn("??!", e);
    }

}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

/**
 * Mark processing of a directory complete in the database. Everything is done in a single
 * transaction./*from   w  w  w. j  a  v a  2s .co  m*/
 *
 * @param dir
 *            - the directory for which there was a directory listing
 * @param dirFiles
 *            - the last batch of files in the directory listing
 * @param dirFailureException
 *            - if this is non null, it means that the directory should be "failed" with this
 *            exception
 * @param processingRequired
 *            - true if the directory needs processing, false otherwise
 * @param postProcessingRequired
 *            - true if post processing of the dir is required on success (true for delete jobs)
 * @throws DatabaseException
 *             - any error occurred
 */
public void markDirProcessed(ArcProcessFile dir, Collection<ArcProcessFile> dirFiles,
        Exception dirFailureException, boolean processingRequired, boolean postProcessingRequired)
        throws DatabaseException {
    synchronized (DatabaseResourceManager.DB_LOCK) {
        PooledDbConnection conn = null;
        try {

            conn = connPool.getConnection();
            conn.setAutoCommit(false);

            //
            // Insert into the DB all files in the last batch of the dir listing
            //
            if (!dirFiles.isEmpty()) {
                insertFilesToDiscover(conn, dirFiles, false);
            }

            //
            // Update the status of the directory row
            //
            FileLifeCycle lifeCycle;
            FileStatus status;
            int dirListingInProgress;
            if (dirFailureException != null) {
                lifeCycle = FileLifeCycle.FINDING; // keep life cycle at "finding" so it will be
                                                   // retried if we restart the job
                status = FileStatus.FAILED;
                dirListingInProgress = 1; // don't clear this flag or else on rerun of the job
                                          // we'll add duplicate rows
            } else if (processingRequired) {
                lifeCycle = FileLifeCycle.READY_TO_PROCESS;
                status = FileStatus.NONE;
                dirListingInProgress = 0;
            } else if (postProcessingRequired) {
                lifeCycle = FileLifeCycle.READY_TO_POSTPROCESS;
                status = FileStatus.NONE;
                dirListingInProgress = 0;
            } else {
                lifeCycle = FileLifeCycle.COMPLETE;
                status = FileStatus.SUCCEEDED;
                dirListingInProgress = 0;
            }

            int includeInInventory = (postProcessingRequired ? 1 : 0);

            PreparedStatement stmt = conn.prepareStatement(MARK_DIR_PROCESSED_STMT_NAME,
                    markDirectoryProcessedSql);

            stmt.clearParameters();
            stmt.setInt(1, lifeCycle.ordinal());
            stmt.setInt(2, status.ordinal());
            stmt.setInt(3, includeInInventory);
            if (dirFailureException == null) {
                stmt.setNull(4, java.sql.Types.VARCHAR);
            } else {
                stmt.setString(4, dirFailureException.getMessage());
            }
            stmt.setInt(5, dirListingInProgress);
            stmt.setLong(6, dir.getDatabaseRecordId());
            stmt.executeUpdate();

            //
            // Update stats in the managed_jobs table for this job
            //
            int totalCntToAdd = (postProcessingRequired ? 1 : 0); // only for delete jobs do
                                                                  // dirs count towards total
                                                                  // object cnt
            int failCntToAdd = (postProcessingRequired && dirFailureException != null ? 1 : 0); // only
                                                                                                // for
                                                                                                // delete
                                                                                                // jobs
                                                                                                // do
                                                                                                // dirs
                                                                                                // count
                                                                                                // towards
                                                                                                // total
                                                                                                // and
                                                                                                // therefore
                                                                                                // failure
                                                                                                // total
            int failDirCntToAdd = (dirFailureException != null ? 1 : 0);
            if (totalCntToAdd + failCntToAdd + failDirCntToAdd > 0) {
                ManagedJobsSchema.getInstance().updateDirStats(conn, jobId, totalCntToAdd, failCntToAdd,
                        failDirCntToAdd);
            }

            conn.commit();

        } catch (Exception e) {
            rollback(conn);
            throw new DatabaseException(DBUtils.getErrorMessage(
                    "An error occurred marking directory processing complete in " + qualifiedFilesTableName, e),
                    e);
        } finally {
            connPool.returnConnection(conn);
        }
    }
}

From source file:org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor.java

public void saveHostAggregateRecords(Map<TimelineMetric, MetricHostAggregate> hostAggregateMap,
        String phoenixTableName) throws SQLException {

    if (hostAggregateMap == null || hostAggregateMap.isEmpty()) {
        LOG.debug("Empty aggregate records.");
        return;//  w ww.jav  a 2  s.  c o  m
    }

    Connection conn = getConnection();
    PreparedStatement stmt = null;

    long start = System.currentTimeMillis();
    int rowCount = 0;

    try {
        stmt = conn.prepareStatement(String.format(UPSERT_AGGREGATE_RECORD_SQL, phoenixTableName));

        for (Map.Entry<TimelineMetric, MetricHostAggregate> metricAggregate : hostAggregateMap.entrySet()) {

            TimelineMetric metric = metricAggregate.getKey();
            MetricHostAggregate hostAggregate = metricAggregate.getValue();

            rowCount++;
            stmt.clearParameters();
            stmt.setString(1, metric.getMetricName());
            stmt.setString(2, metric.getHostName());
            stmt.setString(3, metric.getAppId());
            stmt.setString(4, metric.getInstanceId());
            stmt.setLong(5, metric.getTimestamp());
            stmt.setString(6, metric.getType());
            stmt.setDouble(7, hostAggregate.getSum());
            stmt.setDouble(8, hostAggregate.getMax());
            stmt.setDouble(9, hostAggregate.getMin());
            stmt.setDouble(10, hostAggregate.getNumberOfSamples());

            try {
                stmt.executeUpdate();
            } catch (SQLException sql) {
                LOG.error(sql);
            }

            if (rowCount >= PHOENIX_MAX_MUTATION_STATE_SIZE - 1) {
                conn.commit();
                rowCount = 0;
            }

        }

        conn.commit();

    } finally {
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                // Ignore
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException sql) {
                // Ignore
            }
        }
    }

    long end = System.currentTimeMillis();

    if ((end - start) > 60000l) {
        LOG.info("Time to save map: " + (end - start) + ", " + "thread = " + Thread.currentThread().getClass());
    }
}

From source file:com.commander4j.db.JDBUserReport.java

public boolean create() {

    boolean result = false;

    if (isValidUserReport() == false) {
        logger.debug("create [" + getReportID() + "]");

        try {/*from ww  w .  j  a  v a  2 s  . com*/
            PreparedStatement stmtupdate;
            stmtupdate = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).prepareStatement(
                    Common.hostList.getHost(getHostID()).getSqlstatements().getSQL("JDBUserReport.create"));
            stmtupdate.setString(1, getReportID());
            stmtupdate.execute();
            stmtupdate.clearParameters();
            Common.hostList.getHost(getHostID()).getConnection(getSessionID()).commit();
            stmtupdate.close();
            update();
            result = true;
        } catch (SQLException e) {
            setErrorMessage(e.getMessage());
        }
    }

    return result;
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

private List getSiteMasterItems(Connection connection) throws SQLException {
    PreparedStatement pst = null;
    ResultSet rst = null;//from  w  w  w .jav a  2  s  .co m
    try {
        pst = connection.prepareStatement("select " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_FIELDS + " from " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " where itemscope =   ?  "); //$NON-NLS-1$
        pst.clearParameters();
        pst.setInt(1, SearchBuilderItem.ITEM_SITE_MASTER.intValue());
        rst = pst.executeQuery();
        ArrayList<SearchBuilderItemImpl> a = new ArrayList<SearchBuilderItemImpl>();
        while (rst.next()) {
            SearchBuilderItemImpl sbi = new SearchBuilderItemImpl();
            populateSearchBuilderItem(rst, sbi);
            a.add(sbi);
        }
        return a;
    } finally {
        try {
            rst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            pst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

/**
 * get the Instance Master/* w w  w  .j a  v  a  2  s  .com*/
 * 
 * @return
 * @throws HibernateException
 */
private SearchBuilderItem getMasterItem(Connection connection) throws SQLException {
    log.debug("get Master Items with " + connection); //$NON-NLS-1$

    PreparedStatement pst = null;
    ResultSet rst = null;
    try {
        pst = connection.prepareStatement("select " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_FIELDS + " from " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " where itemscope = ? "); //$NON-NLS-1$
        pst.clearParameters();
        pst.setInt(1, SearchBuilderItem.ITEM_GLOBAL_MASTER.intValue());
        rst = pst.executeQuery();
        SearchBuilderItemImpl sbi = new SearchBuilderItemImpl();
        if (rst.next()) {
            populateSearchBuilderItem(rst, sbi);
        } else {
            sbi.setName(SearchBuilderItem.INDEX_MASTER);
            sbi.setContext(SearchBuilderItem.GLOBAL_CONTEXT);
            sbi.setSearchaction(SearchBuilderItem.ACTION_UNKNOWN);
            sbi.setSearchstate(SearchBuilderItem.STATE_UNKNOWN);
            sbi.setItemscope(SearchBuilderItem.ITEM_GLOBAL_MASTER);
        }
        return sbi;
    } finally {
        try {
            rst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            pst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

private void save(Connection connection, SearchBuilderItem sbi) throws SQLException {
    PreparedStatement pst = null;
    try {//www  .  j a  va  2s  .c  o m
        pst = connection.prepareStatement(" insert into " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_T + " ( " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_FIELDS + " ) values ( " //$NON-NLS-1$
                + SEARCH_BUILDER_ITEM_FIELDS_PARAMS + " ) "); //$NON-NLS-1$
        pst.clearParameters();
        populateStatement(pst, sbi);
        pst.executeUpdate();
    } finally {
        try {
            pst.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }

}