Example usage for java.sql PreparedStatement setBinaryStream

List of usage examples for java.sql PreparedStatement setBinaryStream

Introduction

In this page you can find the example usage for java.sql PreparedStatement setBinaryStream.

Prototype

void setBinaryStream(int parameterIndex, java.io.InputStream x, long length) throws SQLException;

Source Link

Document

Sets the designated parameter to the given input stream, which will have the specified number of bytes.

Usage

From source file:net.sf.infrared.collector.impl.persistence.ApplicationStatisticsDaoImpl.java

private void insertTree(final String appName, final String hostName, final Tree tree) {
    byte[] byteArray = null;
    try {/*from   w  w w. ja v a  2  s .  c  om*/
        ByteArrayOutputStream baos = serializeObject(tree);
        byteArray = baos.toByteArray();
    } catch (IOException e) {
        log.error("IOException : Unable to serialize the Aggregate Operation Tree Object");
    }
    final ByteArrayInputStream bais = new ByteArrayInputStream(byteArray);
    getJdbcTemplate().update(SQL_INSERT_TREE, new PreparedStatementSetter() {
        public void setValues(PreparedStatement ps) throws SQLException {
            ps.setString(1, appName);
            ps.setString(2, hostName);
            ps.setBinaryStream(3, bais, bais.available());
            ps.setTimestamp(4, new Timestamp(System.currentTimeMillis()));
        }
    });
}

From source file:org.opencms.db.generic.CmsSqlManager.java

/**
 * Sets the designated parameter to the given Java array of bytes.<p>
 * //from   w  ww  . jav a2 s .co  m
 * The driver converts this to an SQL VARBINARY or LONGVARBINARY (depending on the argument's 
 * size relative to the driver's limits on VARBINARY values) when it sends it to the database. 
 * 
 * @param statement the PreparedStatement where the content is set
 * @param pos the first parameter is 1, the second is 2, ...
 * @param content the parameter value 
 * @throws SQLException if a database access error occurs
 */
public void setBytes(PreparedStatement statement, int pos, byte[] content) throws SQLException {

    if (content.length < 2000) {
        statement.setBytes(pos, content);
    } else {
        statement.setBinaryStream(pos, new ByteArrayInputStream(content), content.length);
    }
}

From source file:org.ralasafe.entitle.BackupManagerImpl.java

private void importBackup(Connection conn, Backup backup) throws DBLevelException {
    PreparedStatement pstmt = null;
    try {/*w  w  w  .j  a v a  2 s .c  o m*/
        // create tmp zip
        String tempZip = SystemConstant.getRepositoryDir() + new Random().nextLong() + ".zip";
        String xmlDir = SystemConstant.getRepositoryDir() + "ralasafe";
        ZipUtil.zip(tempZip, xmlDir);

        // update clob
        File zipFile = new File(tempZip);
        InputStream zipIn = new FileInputStream(zipFile);
        pstmt = conn.prepareStatement(UPDATE_CONTENT_SQL);
        pstmt.setBinaryStream(1, zipIn, (int) zipFile.length());
        pstmt.setInt(2, backup.getId());
        pstmt.executeUpdate();
        zipIn.close();

        // delete tmp zip
        zipFile.delete();

    } catch (Exception e) {
        log.error("", e);
        throw new RalasafeException(e);
    } finally {
        DBUtil.close(pstmt);
    }
}

From source file:org.unitime.commons.hibernate.blob.XmlBlobType.java

public void nullSafeSet(PreparedStatement ps, Object value, int index, SessionImplementor session)
        throws SQLException, HibernateException {
    if (value == null) {
        ps.setNull(index, sqlTypes()[0]);
    } else {//from  w ww .  j a v  a  2 s. co  m
        try {
            ByteArrayOutputStream bytes = new ByteArrayOutputStream();
            XMLWriter writer = new XMLWriter(new GZIPOutputStream(bytes), OutputFormat.createCompactFormat());
            writer.write((Document) value);
            writer.flush();
            writer.close();
            ps.setBinaryStream(index, new ByteArrayInputStream(bytes.toByteArray(), 0, bytes.size()),
                    bytes.size());
        } catch (IOException e) {
            throw new HibernateException(e.getMessage(), e);
        }
    }
}

From source file:org.wso2.carbon.identity.sts.store.dao.DBStsDAO.java

/**
 * This is for updating the token in DB//w  w w . j a  v  a 2  s  .c o m
 *
 * @param token Token
 */
public void updateToken(Token token) throws TrustException {
    Connection connection = IdentityDatabaseUtil.getDBConnection();
    PreparedStatement prepStmt = null;
    ResultSet rs = null;
    String query = DBQueries.UPDATE_TOKEN;

    try {
        prepStmt = connection.prepareStatement(query);
        byte[] tokenByteContainer = getTokenContent(token);
        InputStream tokenInputStream = new ByteArrayInputStream(tokenByteContainer);
        prepStmt.setBinaryStream(1, tokenInputStream, tokenByteContainer.length);
        prepStmt.setTimestamp(2, new Timestamp(token.getCreated().getTime()));
        prepStmt.setTimestamp(3, new Timestamp(token.getExpires().getTime()));
        prepStmt.setInt(4, token.getState());
        prepStmt.setString(5, token.getId());
        prepStmt.executeUpdate();
        connection.commit();

    } catch (Exception e) {
        IdentityDatabaseUtil.rollBack(connection);
        String msg = "Failed to update token ";
        throw new TrustException(msg, e);
    } finally {
        IdentityDatabaseUtil.closeAllConnections(connection, rs, prepStmt);
    }

}

From source file:org.xerela.server.birt.ReportJob.java

/**
 * Stream the binary intermediate output format file into a BLOB in the database.
 *
 * @param tmpFile intermediate output format file
 * @throws SQLException thrown if there is an error accessing the database
 * @throws IOException thrown if there is an error reading the file
 *//*from w ww  .jav a2  s  .  c om*/
private void persistReport(File tmpFile, int executionId) throws SQLException, IOException {
    Connection connection = BirtActivator.getDataSource().getConnection();

    try {
        PreparedStatement stmt = connection
                .prepareStatement("INSERT INTO birt_report (execution_id, details) VALUES (?,?)"); //$NON-NLS-1$

        BufferedInputStream is = new BufferedInputStream(new FileInputStream(tmpFile));
        stmt.setInt(1, executionId);
        stmt.setBinaryStream(2, is, (int) tmpFile.length());
        int rowsUpdated = stmt.executeUpdate();
        if (rowsUpdated == 0) {
            LOGGER.warn(Messages.ReportJob_reportPersistFailure);
        }

        stmt.close();
        is.close();
    } finally {
        connection.close();
    }
}

From source file:org.sakaiproject.content.impl.db.test.CheckBlobSafety.java

@Test
public void testBlob() {
    try {/*from  www .jav  a2 s.  com*/
        Random r = new Random();
        int blockSize = 4095; // use an odd size to get byte boundaries
        int nblocks = 512;
        int maxSize = blockSize * nblocks;
        byte[] b = new byte[maxSize];
        byte[] bin = new byte[maxSize];
        log.info("Loading Random Data " + maxSize);
        r.nextBytes(b);
        log.info("Loaded Random Data");

        log.info("Got Connection");
        PreparedStatement pstout = null;
        PreparedStatement pstin = null;
        InputStream instream = null;
        ResultSet rs = null;
        try {
            pstout = con.prepareStatement(p.getProperty("insert.statement"));
            pstin = con.prepareStatement(p.getProperty("select.statement"));
            for (int i = 1; i < nblocks; i += 5) {
                int size = blockSize * i;
                pstout.clearParameters();

                pstout.setBinaryStream(1, new ByteArrayInputStream(b), size);
                pstout.setInt(2, i);
                pstout.executeUpdate();
                log.info("Loaded record  " + i + " of size " + (size) + " bytes");
                con.commit();
                i++;
            }
            for (int i = 1; i < nblocks; i += 5) {
                int size = blockSize * i;
                pstin.clearParameters();
                pstin.setInt(1, i);
                rs = pstin.executeQuery();
                if (rs.next()) {
                    instream = rs.getBinaryStream(1);
                    DataInputStream din = new DataInputStream(instream);
                    din.readFully(bin, 0, size);
                    for (int j = 0; j < size; j++) {
                        Assert.assertEquals("Byte Missmatch record " + i + " offset " + j, b[j], bin[j]);
                    }
                    log.info("Checked Record " + i + " of size " + size + " bytes");
                    din.close();
                    instream.close();
                    rs.close();
                    i++;
                } else {
                    Assert.assertEquals("Didnt get any record at " + i, true, false);
                }
                con.commit();
            }
        } finally {
            try {
                pstin.close();
            } catch (SQLException e) {

            }
            try {
                pstout.close();
            } catch (SQLException e) {

            }
            try {
                instream.close();
            } catch (Exception ex) {
            }
            try {
                rs.close();
            } catch (Exception ex) {
            }

        }
    } catch (Exception ex) {
        log.error("Failed ", ex);
    }

}

From source file:no.sintef.jarfter.PostgresqlInteractor.java

public int addTransformationEntry(String uri, String name, String metadata, String clojureFileName,
        String jarFileName) throws JarfterException {
    checkConnection();//from w w w  .j  av a2s. c om

    File jarFile = null;
    FileInputStream jarFileInputStream = null;
    try {
        jarFile = new File(jarFileName); // if jarFile does not exist, the next line will throw exception
        jarFileInputStream = new FileInputStream(jarFileName);
    } catch (FileNotFoundException nofile) {
        log("addTransformationEntry - Did not find jarFile");
        error(nofile);
        throw new JarfterException(JarfterException.Error.IO_NO_TEMP_JAR);
    }
    int rowsUpdated;
    try {
        PreparedStatement pst = conn.prepareStatement(
                "INSERT INTO transformations (uri, name, metadata, clojure, executable) VALUES (?, ?, ?, ?, ?)");
        pst.setString(1, uri);
        pst.setString(2, name);
        pst.setString(3, metadata);
        pst.setString(4, fileToString(clojureFileName));
        pst.setBinaryStream(5, jarFileInputStream, (int) jarFile.length());
        rowsUpdated = pst.executeUpdate();
        pst.close();
        jarFileInputStream.close();
    } catch (SQLException sqle) {
        log("addTransformationEntry - got SQLException...");
        error(sqle);
        if (sqle.getLocalizedMessage().contains("duplicate key value")) {
            throw new JarfterException(JarfterException.Error.SQL_DUPLICATED_KEY);
        }
        throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage());
    } catch (FileNotFoundException nofile) {
        log("addTransformationEntry - Did not find jarFile");
        error(nofile);
        throw new JarfterException(JarfterException.Error.IO_NO_TEMP_CLJ);
    } catch (IOException ioe) {
        log("addTransformationEntry - got IOException from jarFileInputStream.close()");
        error(ioe);
        throw new JarfterException(JarfterException.Error.UNKNOWN_ERROR, ioe.getLocalizedMessage());
    }
    log("addTransformationEntry - End of method");
    return rowsUpdated;
}

From source file:org.wso2.carbon.identity.sts.store.dao.DBStsDAO.java

/**
 * This is for adding token to DB.//from  www .  j ava2 s .com
 *
 * @param token Token
 */
public void addToken(Token token) throws TrustException {
    Connection connection = IdentityDatabaseUtil.getDBConnection();
    PreparedStatement prepStmt = null;
    ResultSet rs = null;

    String query = DBQueries.ADD_TOKEN;

    try {
        prepStmt = connection.prepareStatement(query);
        prepStmt.setString(1, token.getId());
        byte[] tokenByteContainer = getTokenContent(token);
        InputStream tokenInputStream = new ByteArrayInputStream(tokenByteContainer);
        prepStmt.setBinaryStream(2, tokenInputStream, tokenByteContainer.length);
        prepStmt.setTimestamp(3, new Timestamp(token.getCreated().getTime()));
        prepStmt.setTimestamp(4, new Timestamp(token.getExpires().getTime()));
        prepStmt.setInt(5, token.getState());
        prepStmt.execute();
        connection.commit();

    } catch (Exception e) {
        IdentityDatabaseUtil.rollBack(connection);
        String msg = "Failed to add token";
        throw new TrustException(msg, e);
    } finally {
        IdentityDatabaseUtil.closeAllConnections(connection, rs, prepStmt);
    }

}

From source file:nl.b3p.catalog.arcgis.ArcSDE9xJDBCHelper.java

@Override
public void saveMetadata(ArcSDEJDBCDataset dataset, String metadata) throws Exception {
    Connection c = getConnection();
    PreparedStatement ps = null;
    try {//from   ww w .j a  v  a  2  s.  com
        c.setAutoCommit(false);

        // gebruik geen DbUtils; setBinaryStream() werkt niet met setObject()
        // welke DbUtils gebruikt

        String sql = "update " + getTableName(TABLE_USERMETADATA) + " set xml = ? where name = ? and owner = ?";
        sql += databaseNameSQL(dataset);
        ps = c.prepareStatement(sql);
        byte[] xml = metadata.getBytes(ENCODING);
        ps.setBinaryStream(1, new ByteArrayInputStream(xml), xml.length);
        ps.setString(2, dataset.getName());
        ps.setString(3, dataset.getOwner());
        if (dataset.getDatabaseName() != null) {
            ps.setString(4, dataset.getDatabaseName());
        }
        int rowsAffected = ps.executeUpdate();
        ps.close();
        ps = null;

        if (rowsAffected > 1) {
            throw new Exception("Updating metadata should affect maximum one row; got rows affected count of "
                    + rowsAffected);
        }

        if (rowsAffected == 0) {
            // try to insert new row

            QueryRunner runner = new QueryRunner();

            // determine highest id
            Object id = runner.query(c, "select coalesce(max(id)+1,1) from " + getTableName(TABLE_USERMETADATA),
                    new ScalarHandler());

            Integer datasetType = determineDatasetType(c, dataset);

            // weer setBinaryStream nodig
            ps = c.prepareStatement("insert into " + getTableName(TABLE_USERMETADATA)
                    + " (id, databasename, owner, name, datasettype, xml) values(?,?,?,?,?,?)");
            ps.setObject(1, id);
            ps.setObject(2, dataset.getDatabaseName());
            ps.setString(3, dataset.getOwner());
            ps.setString(4, dataset.getName());
            ps.setObject(5, datasetType);
            ps.setBinaryStream(6, new ByteArrayInputStream(xml), xml.length);
            ps.executeUpdate();
            ps.close();
            ps = null;
        }

        DbUtils.commitAndClose(c);
    } catch (Exception e) {
        DbUtils.rollbackAndCloseQuietly(c);
        throw e;
    } finally {
        DbUtils.closeQuietly(ps);
    }
}