Example usage for java.sql PreparedStatement setBytes

List of usage examples for java.sql PreparedStatement setBytes

Introduction

In this page you can find the example usage for java.sql PreparedStatement setBytes.

Prototype

void setBytes(int parameterIndex, byte x[]) throws SQLException;

Source Link

Document

Sets the designated parameter to the given Java array of bytes.

Usage

From source file:uk.nhs.cfh.dsp.srth.query.transform.sql.impl.MySQLReportingQueryExecutionEngineService.java

private String getTableNameForSubtypeHierarchy(TerminologyConstraint includedConstraint,
        Collection<TerminologyConstraint> excludedConstraints, QueryStatisticsCollection collection)
        throws SQLException {

    String id = includedConstraint.getUuid().toString().replace('-', '_');
    String tableName = "Hierarchy_" + id;
    if (logger.isDebugEnabled()) {
        logger.debug("Value of tableName : " + tableName);
    }//from ww  w  . j a v  a2s . c om

    String tableCreateString = "" + "CREATE TABLE " + tableName + " " + "(" + "CONCEPT_ID "
            + conceptIdCoulumnDef + " NOT NULL " + ")";

    // create table with index as needed
    String[] indexCols = { "CONCEPT_ID" };
    createTable(tableName, tableCreateString, false, indexCols, collection);

    String populateString = "INSERT INTO " + tableName + " VALUES (?)";
    PreparedStatement ps = connection.prepareStatement(populateString);
    if (EngineStatus.DEBUG == getStatus()) {
        collection.getCommentedSteps().add("Populating table using statement : ");
        collection.getCommentedSteps().add(populateString);
    }

    // get subtypes for included constraint
    for (UUID subtypeId : subtypeGetter.getSubTypeIdsForConstraint(includedConstraint)) {
        // add subtypeId to table
        ps.setBytes(1, asByteArray(subtypeId));
        ps.executeUpdate();
        if (EngineStatus.DEBUG == getStatus()) {
            collection.getCommentedSteps().add(
                    "Added descendant concept " + subtypeId + " to table " + tableName + " using statement :");
            collection.getCommentedSteps().add("INSERT INTO " + tableName + " VALUES (" + subtypeId + ")");
            collection.getSqlSteps().add("INSERT INTO " + tableName + " VALUES (" + subtypeId + ")");
        }
    }

    // close statement
    ps.close();
    if (logger.isDebugEnabled()) {
        logger.debug("Created table for subtype hierarchy. Table name = " + tableName);
    }

    /*
       *  handle excluded terms specified recursively since excluded terms might have their
       *  own subsumption strategy specified
       */

    for (TerminologyConstraint excludedConstraint : excludedConstraints) {
        // recursive call to get table name
        String excludedTermsTable = getTableNameForSubtypeHierarchy(excludedConstraint,
                Collections.<TerminologyConstraint>emptySet(), collection);
        // go through contents of table and remove entries
        PreparedStatement deleteStm = connection
                .prepareStatement("" + "DELETE FROM " + tableName + " WHERE CONCEPT_ID  = ?");
        ResultSet terms = connection.createStatement()
                .executeQuery("" + "SELECT CONCEPT_ID FROM " + excludedTermsTable);
        while (terms.next()) {
            String exclTermId = terms.getString(1);
            if (logger.isDebugEnabled()) {
                logger.debug("Deleting excluded term : " + exclTermId);
            }
            deleteStm.setString(1, exclTermId);
            deleteStm.executeUpdate();
        }

        // close statements
        terms.close();
        deleteStm.close();
    }
    if (logger.isDebugEnabled()) {
        logger.debug("Removed excluded terms from table name : " + tableName);
    }
    // add table to temp tables file list
    updateTablesConfigFile(tableName);

    return tableName;
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveConfigOperations() throws ArchivalDAOException {
    Statement stmt = null;//  w  w w. ja v  a 2s  .  c  o m
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_CONFIG_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_CONFIG_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setBytes(2, rs.getBytes("OPERATION_CONFIG"));
            stmt2.setInt(3, rs.getInt("ENABLED"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [CONFIG_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_CONFIG_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving config operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java

@Override
public void updateRecipients(final Map<ProfileRecipientFields, ValidatorResults> recipientBeans,
        Integer adminID, final int type, final ImportProfile profile, int datasource_id,
        CSVColumnState[] columns) {// ww  w  . j  av a2s .c  o  m
    if (recipientBeans.isEmpty()) {
        return;
    }
    final JdbcTemplate template = getJdbcTemplateForTemporaryTable();
    final String prefix = "cust_" + adminID + "_tmp_";
    final String tableName = prefix + datasource_id + "_tbl";
    final ProfileRecipientFields[] recipients = recipientBeans.keySet()
            .toArray(new ProfileRecipientFields[recipientBeans.keySet().size()]);
    String keyColumn = profile.getKeyColumn();
    List<String> keyColumns = profile.getKeyColumns();
    String duplicateSql = "";
    if (keyColumns.isEmpty()) {
        duplicateSql += " column_duplicate_check_0=? ";
    } else {
        for (int i = 0; i < keyColumns.size(); i++) {
            duplicateSql += " column_duplicate_check_" + i + "=? ";
            if (i != keyColumns.size() - 1) {
                duplicateSql += ", ";
            }
        }
    }
    final String query = "UPDATE  " + tableName + " SET recipient=?, validator_result=?, status_type=?, "
            + duplicateSql + " WHERE temporary_id=?";
    final List<CSVColumnState> temporaryKeyColumns = new ArrayList<CSVColumnState>();
    for (CSVColumnState column : columns) {
        if (keyColumns.isEmpty()) {
            if (column.getColName().equals(keyColumn) && column.getImportedColumn()) {
                temporaryKeyColumns.add(column);
            }
        } else {
            for (String columnName : keyColumns) {
                if (column.getColName().equals(columnName) && column.getImportedColumn()) {
                    temporaryKeyColumns.add(column);
                    break;
                }
            }
        }
    }
    final BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() {

        public void setValues(PreparedStatement ps, int i) throws SQLException {
            ps.setBytes(1, ImportUtils.getObjectAsBytes(recipients[i]));
            ps.setBytes(2, ImportUtils.getObjectAsBytes(recipientBeans.get(recipients[i])));
            ps.setInt(3, type);
            for (int j = 0; j < temporaryKeyColumns.size(); j++) {
                setPreparedStatmentForCurrentColumn(ps, 4 + j, temporaryKeyColumns.get(j), recipients[i],
                        profile, recipientBeans.get(recipients[i]));
            }
            ps.setString(4 + temporaryKeyColumns.size(), recipients[i].getTemporaryId());

            if (logger.isInfoEnabled()) {
                logger.info("Import ID: " + profile.getImportId() + " Updating recipient in temp-table: "
                        + Toolkit.getValueFromBean(recipients[i], profile.getKeyColumn()));
            }
        }

        public int getBatchSize() {
            return recipientBeans.size();
        }
    };
    template.batchUpdate(query, setter);
}

From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java

@Override
public void createRecipients(final Map<ProfileRecipientFields, ValidatorResults> recipientBeansMap,
        final Integer adminID, final ImportProfile profile, final Integer type, int datasource_id,
        CSVColumnState[] columns) {/*from   w  w  w.  ja  v  a2  s.  co  m*/
    if (recipientBeansMap.isEmpty()) {
        return;
    }
    final JdbcTemplate template = getJdbcTemplateForTemporaryTable();
    final String prefix = "cust_" + adminID + "_tmp_";
    final String tableName = prefix + datasource_id + "_tbl";
    final ProfileRecipientFields[] recipients = recipientBeansMap.keySet()
            .toArray(new ProfileRecipientFields[recipientBeansMap.keySet().size()]);
    String keyColumn = profile.getKeyColumn();
    List<String> keyColumns = profile.getKeyColumns();
    String duplicateSql = "";
    String duplicateSqlParams = "";
    if (keyColumns.isEmpty()) {
        duplicateSql += " column_duplicate_check_0 ";
        duplicateSqlParams = "?";
    } else {
        for (int i = 0; i < keyColumns.size(); i++) {
            duplicateSql += "column_duplicate_check_" + i;
            duplicateSqlParams += "?";
            if (i != keyColumns.size() - 1) {
                duplicateSql += ",";
                duplicateSqlParams += ",";
            }
        }
    }
    final List<CSVColumnState> temporaryKeyColumns = new ArrayList<CSVColumnState>();
    for (CSVColumnState column : columns) {
        if (keyColumns.isEmpty()) {
            if (column.getColName().equals(keyColumn) && column.getImportedColumn()) {
                temporaryKeyColumns.add(column);
            }
        } else {
            for (String columnName : keyColumns) {
                if (column.getColName().equals(columnName) && column.getImportedColumn()) {
                    temporaryKeyColumns.add(column);
                    break;
                }
            }
        }
    }
    final String query = "INSERT INTO " + tableName
            + " (recipient, validator_result, temporary_id, status_type, " + duplicateSql + ") VALUES (?,?,?,?,"
            + duplicateSqlParams + ")";
    final BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() {

        public void setValues(PreparedStatement ps, int i) throws SQLException {
            ps.setBytes(1, ImportUtils.getObjectAsBytes(recipients[i]));
            ps.setBytes(2, ImportUtils.getObjectAsBytes(recipientBeansMap.get(recipients[i])));
            ps.setString(3, recipients[i].getTemporaryId());
            ps.setInt(4, type);
            for (int j = 0; j < temporaryKeyColumns.size(); j++) {
                setPreparedStatmentForCurrentColumn(ps, 5 + j, temporaryKeyColumns.get(j), recipients[i],
                        profile, recipientBeansMap.get(recipients[i]));
            }

            if (logger.isInfoEnabled()) {
                logger.info("Import ID: " + profile.getImportId() + " Adding recipient to temp-table: "
                        + Toolkit.getValueFromBean(recipients[i], profile.getKeyColumn()));
            }
        }

        public int getBatchSize() {
            return recipientBeansMap.size();
        }
    };
    template.batchUpdate(query, setter);
}

From source file:org.cerberus.crud.dao.impl.TestCaseCountryPropertiesDAO.java

@Override
public void insertTestCaseCountryProperties(TestCaseCountryProperties testCaseCountryProperties)
        throws CerberusException {
    boolean throwExcep = false;
    StringBuilder query = new StringBuilder();
    query.append(//  w  w  w . j  a va 2 s. com
            "INSERT INTO testcasecountryproperties (`Test`,`TestCase`,`Country`,`Property` ,`Description`,`Type`");
    query.append(",`Database`,`Value1`,`Value2`,`Length`,`RowLimit`,`Nature`,`RetryNb`,`RetryPeriod`) ");
    query.append("VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)");

    Connection connection = this.databaseSpring.connect();
    try {
        PreparedStatement preStat = connection.prepareStatement(query.toString());
        try {
            preStat.setString(1, testCaseCountryProperties.getTest());
            preStat.setString(2, testCaseCountryProperties.getTestCase());
            preStat.setString(3, testCaseCountryProperties.getCountry());
            preStat.setBytes(4, testCaseCountryProperties.getProperty().getBytes("UTF-8"));
            preStat.setBytes(5, testCaseCountryProperties.getDescription().getBytes("UTF-8"));
            preStat.setString(6, testCaseCountryProperties.getType());
            preStat.setString(7, testCaseCountryProperties.getDatabase());
            preStat.setBytes(8, testCaseCountryProperties.getValue1().getBytes("UTF-8"));
            preStat.setBytes(9, testCaseCountryProperties.getValue2().getBytes("UTF-8"));
            preStat.setInt(10, testCaseCountryProperties.getLength());
            preStat.setInt(11, testCaseCountryProperties.getRowLimit());
            preStat.setString(12, testCaseCountryProperties.getNature());
            preStat.setInt(13, testCaseCountryProperties.getRetryNb());
            preStat.setInt(14, testCaseCountryProperties.getRetryPeriod());

            preStat.executeUpdate();
            throwExcep = false;

        } catch (SQLException exception) {
            LOG.error("Unable to execute query : " + exception.toString());
        } catch (UnsupportedEncodingException ex) {
            LOG.error(ex.toString());
        } finally {
            preStat.close();
        }
    } catch (SQLException exception) {
        LOG.error("Unable to execute query : " + exception.toString());
    } finally {
        try {
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {
            LOG.warn(e.toString());
        }
    }
    if (throwExcep) {
        throw new CerberusException(new MessageGeneral(MessageGeneralEnum.CANNOT_UPDATE_TABLE));
    }
}

From source file:com.feedzai.commons.sql.abstraction.engine.impl.H2Engine.java

@Override
protected int entityToPreparedStatement(final DbEntity entity, final PreparedStatement ps,
        final EntityEntry entry, final boolean useAutoInc) throws DatabaseEngineException {

    int i = 1;//www.  j  a v a  2s  .c o  m
    for (DbColumn column : entity.getColumns()) {
        if ((column.isAutoInc() && useAutoInc)) {
            continue;
        }

        try {
            final Object val;
            if (column.isDefaultValueSet() && !entry.containsKey(column.getName())) {
                val = column.getDefaultValue().getConstant();
            } else {
                val = entry.get(column.getName());
            }

            switch (column.getDbColumnType()) {
            case BLOB:
                ps.setBytes(i, objectToArray(val));

                break;
            case CLOB:
                if (val == null) {
                    ps.setNull(i, Types.CLOB);
                    break;
                }

                if (val instanceof String) {
                    StringReader sr = new StringReader((String) val);
                    ps.setCharacterStream(i, sr);
                } else {
                    throw new DatabaseEngineException("Cannot convert " + val.getClass().getSimpleName()
                            + " to String. CLOB columns only accept Strings.");
                }
                break;
            default:
                ps.setObject(i, val);
            }
        } catch (Exception ex) {
            throw new DatabaseEngineException("Error while mapping variables to database", ex);
        }

        i++;
    }

    return i - 1;
}

From source file:com.feedzai.commons.sql.abstraction.engine.impl.PostgreSqlEngine.java

@Override
protected int entityToPreparedStatement(final DbEntity entity, final PreparedStatement ps,
        final EntityEntry entry, final boolean useAutoInc) throws DatabaseEngineException {

    int i = 1;//from  w ww  . j a  v  a  2  s  .co m
    for (DbColumn column : entity.getColumns()) {
        if (column.isAutoInc() && useAutoInc) {
            continue;
        }

        try {
            final Object val;
            if (column.isDefaultValueSet() && !entry.containsKey(column.getName())) {
                val = column.getDefaultValue().getConstant();
            } else {
                val = entry.get(column.getName());
            }

            switch (column.getDbColumnType()) {
            case BLOB:
                ps.setBytes(i, objectToArray(val));

                break;
            case CLOB:
                if (val == null) {
                    ps.setNull(i, Types.CLOB);
                    break;
                }

                if (val instanceof String) {
                    //StringReader sr = new StringReader((String) val);
                    //ps.setClob(i, sr);
                    // postrgresql driver des not have setClob implemented
                    ps.setString(i, (String) val);
                } else {
                    throw new DatabaseEngineException("Cannot convert " + val.getClass().getSimpleName()
                            + " to String. CLOB columns only accept Strings.");
                }
                break;
            default:
                ps.setObject(i, val);
            }
        } catch (Exception ex) {
            throw new DatabaseEngineException("Error while mapping variables to database", ex);
        }

        i++;
    }

    return i - 1;
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveProfileOperations() throws ArchivalDAOException {
    Statement stmt = null;//from www  . ja va  2 s .c  o m
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_PROFILE_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_PROFILE_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setInt(2, rs.getInt("ENABLED"));
            stmt2.setBytes(3, rs.getBytes("OPERATION_DETAILS"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [PROFILE_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_PROFILE_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving profile operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:dao.ContactUpdateQuery.java

public void run(Connection conn, String fname, String lname, String mname, String dob, String title,
        String industry, String company, String pwebsite, String cwebsite, String blogsite, String city,
        String state, String country, String desc, String zipcode, String gender, String nickname,
        String designation, String bcity, String bstate, String bcountry, String bzipcode, String hphone,
        String cphone, String bphone, String yim, String aim, String msn, String icq, String loginId,
        String fax, String netphone, String relation, String email, String published, String usertag,
        String contactId, String street, String bstreet) throws BaseDaoException {

    /****  use UTC_TIMESTAMP after upgrading mysql **/

    // default date
    if (RegexStrUtil.isNull(dob)) {
        dob = "1970-01-01";
    }/*w ww  .ja  v a 2 s. c o m*/

    byte[] mytag = { ' ' };
    if (!RegexStrUtil.isNull(usertag)) {
        mytag = usertag.getBytes();
    }

    byte[] mydesc = { ' ' };
    if (!RegexStrUtil.isNull(desc)) {
        mydesc = desc.getBytes();
    }

    PreparedStatement query = null;
    String stmt = "update hdcontacts set ownerid=" + loginId + ", fname='" + fname + "', lname='" + lname
            + "', mname='" + mname + "', nickname='" + nickname + "', gender=" + gender + ", dob='" + dob
            + "', email='" + email + "', designation='" + designation + "', title='" + title + "', industry='"
            + industry + "', company='" + company + "', pwebsite='" + pwebsite + "', cwebsite='" + cwebsite
            + "', blogsite='" + blogsite + "', street='" + street + "', city='" + city + "', state='" + state
            + "', country='" + country + "', zipcode='" + zipcode + "',description=?, bstreet='" + bstreet
            + "', bcity='" + bcity + "', bstate='" + bstate + "', bcountry='" + bcountry + "', bzipcode='"
            + bzipcode + "', hphone='" + hphone + "', cphone='" + cphone + "', bphone='" + bphone
            + "', netphone='" + netphone + "', fax='" + fax + "', yim='" + yim + "', aim='" + aim + "', msn='"
            + msn + "', icq='" + icq + "', relation='" + relation + "', published='" + published
            + "', usertags=? where contactid=" + contactId + "";

    try {
        query = conn.prepareStatement(stmt);
        query.setBytes(1, mydesc);
        query.setBytes(2, mytag);
        query.executeUpdate();
    } catch (Exception e) {
        throw new BaseDaoException("Error occured, executing update hdcontacts " + stmt, e);
    }
}

From source file:org.sakaiproject.content.impl.serialize.impl.conversion.Type1BlobResourcesConversionHandler.java

public boolean convertSource(String id, Object source, PreparedStatement updateRecord) throws SQLException {

    String xml = (String) source;

    SAXSerializableResourceAccess sax = new SAXSerializableResourceAccess();
    SAXSerializableResourceAccess sax2 = new SAXSerializableResourceAccess();
    try {//www  .j  a  v a  2  s . c  o m
        sax.parse(xml);
    } catch (Exception e1) {
        log.warn("Failed to parse " + id + "[" + xml + "]", e1);
        return false;
    }

    Type1BaseContentResourceSerializer t1b = new Type1BaseContentResourceSerializer();
    t1b.setTimeService(new ConversionTimeService());
    try {
        byte[] result = t1b.serialize(sax);
        t1b.parse(sax2, result);
        sax.check(sax2);

        Matcher contextMatcher = contextPattern.matcher(sax.getSerializableId());
        String context = null;
        if (contextMatcher.find()) {
            String root = contextMatcher.group(1);
            context = contextMatcher.group(2);
            if (!root.equals("group/")) {
                context = "~" + context;
            }
        }

        updateRecord.setString(1, context);
        updateRecord.setLong(2, sax.getSerializableContentLength());
        updateRecord.setBytes(3, result);
        updateRecord.setString(4, sax.getSerializableResourceType());
        updateRecord.setString(5, id);
        return true;
    } catch (Exception e) {
        log.warn("Failed to process record " + id, e);
    }
    return false;

}