Example usage for java.sql PreparedStatement setBinaryStream

List of usage examples for java.sql PreparedStatement setBinaryStream

Introduction

In this page you can find the example usage for java.sql PreparedStatement setBinaryStream.

Prototype

void setBinaryStream(int parameterIndex, java.io.InputStream x) throws SQLException;

Source Link

Document

Sets the designated parameter to the given input stream.

Usage

From source file:com.splicemachine.derby.impl.sql.execute.operations.InsertOperationIT.java

@Test
public void testInsertBlob() throws Exception {
    InputStream fin = new FileInputStream(getResourceDirectory() + "order_line_500K.csv");
    PreparedStatement ps = methodWatcher.prepareStatement("insert into FILES (name, doc) values (?,?)");
    ps.setString(1, "csv_file");
    ps.setBinaryStream(2, fin);
    ps.execute();/* w  ww  .j a v  a  2 s.co  m*/
    ResultSet rs = methodWatcher.executeQuery("SELECT doc FROM FILES WHERE name = 'csv_file'");
    byte buff[] = new byte[1024];
    while (rs.next()) {
        Blob ablob = rs.getBlob(1);
        File newFile = new File(getBaseDirectory() + "/target/order_line_500K.csv");
        if (newFile.exists()) {
            newFile.delete();
        }
        newFile.createNewFile();
        InputStream is = ablob.getBinaryStream();
        FileOutputStream fos = new FileOutputStream(newFile);
        for (int b = is.read(buff); b != -1; b = is.read(buff)) {
            fos.write(buff, 0, b);
        }
        is.close();
        fos.close();
    }
    File file1 = new File(getResourceDirectory() + "order_line_500K.csv");
    File file2 = new File(getBaseDirectory() + "/target/order_line_500K.csv");
    Assert.assertTrue("The files contents are not equivalent", FileUtils.contentEquals(file1, file2));
}

From source file:com.demandware.vulnapp.challenge.impl.SQLIChallenge.java

/**
 * inserts mineral data/*  w  w  w  .ja v a  2 s .co  m*/
 * @param holder
 * @throws SQLException
 */
private void setupContents(DBHolder holder) throws SQLException {
    Connection conn = holder.getConnection();
    for (Mineral m : this.minerals) {
        try (InputStream picIn = new FileInputStream(m.getPic())) {
            PreparedStatement pre = conn.prepareStatement(this.mineralInsertStatement);
            pre.setString(1, m.getName());
            pre.setString(2, m.getBlurb());
            pre.setBinaryStream(3, picIn);
            pre.executeUpdate();
            pre.close();
        } catch (SQLException | IOException e) {
            conn.close();
            throw new SetupRuntimeException(
                    "Could not create mineral table for values: " + m.getName() + " " + m.getPic(), e);
        }
    }
    conn.close();
}

From source file:org.artifactory.storage.db.util.JdbcHelper.java

private void setParamsToStmt(PreparedStatement pstmt, Object[] params) throws SQLException {
    int i = 1;/*from ww w  . j  a  v a  2 s  .c om*/
    for (Object param : params) {
        if (param instanceof Iterable) {
            for (Object p : (Iterable) param) {
                pstmt.setObject(i++, p);
            }
        } else if (param instanceof BlobWrapper) {
            BlobWrapper blobWrapper = (BlobWrapper) param;
            if (blobWrapper.getLength() < 0) {
                pstmt.setBinaryStream(i++, blobWrapper.getInputStream());
            } else {
                pstmt.setBinaryStream(i++, blobWrapper.getInputStream(), blobWrapper.getLength());
            }
        } else {
            pstmt.setObject(i++, param);
        }
    }
}

From source file:org.wso2.carbon.analytics.datasource.rdbms.RDBMSAnalyticsFileSystem.java

private void populateStatementWithDataChunkUpdate(PreparedStatement stmt, String path, DataChunk chunk)
        throws SQLException {
    byte[] bytes = chunk.getData();
    if (!this.rdbmsQueryConfigurationEntry.isBlobLengthRequired()) {
        stmt.setBinaryStream(1, new ByteArrayInputStream(bytes));
    } else {/*from w  w w .  ja  va 2  s .c om*/
        stmt.setBinaryStream(1, new ByteArrayInputStream(bytes), bytes.length);
    }
    stmt.setString(2, path);
    stmt.setLong(3, chunk.getChunkNumber());
}

From source file:org.wso2.carbon.analytics.datasource.rdbms.RDBMSAnalyticsFileSystem.java

private void populateStatementWithDataChunk(PreparedStatement stmt, String path, DataChunk chunk)
        throws SQLException {
    stmt.setString(1, path);/*from  w  ww  . j a  v a  2s. c o m*/
    stmt.setLong(2, chunk.getChunkNumber());
    byte[] bytes = chunk.getData();
    if (!this.rdbmsQueryConfigurationEntry.isBlobLengthRequired()) {
        stmt.setBinaryStream(3, new ByteArrayInputStream(bytes));
    } else {
        stmt.setBinaryStream(3, new ByteArrayInputStream(bytes), bytes.length);
    }
}

From source file:com.nextep.datadesigner.vcs.services.VCSFiles.java

private void writeMySQLBlob(Connection conn, IRepositoryFile file, File localFile) throws SQLException {
    file.setFileSizeKB((localFile.length() / 1024) + 1);
    PreparedStatement stmt = null;
    InputStream is = null;/* w w w. j a va2  s  . c o  m*/
    try {
        /*
         * Columns names in the SET clause cannot be qualified with an alias name because it
         * would fail in Postgres.
         */
        stmt = conn.prepareStatement("UPDATE rep_files rf " //$NON-NLS-1$
                + "  SET file_content = ? " //$NON-NLS-1$
                + "    , filesize = ? " //$NON-NLS-1$
                + "WHERE rf.file_id = ? "); //$NON-NLS-1$
        is = new FileInputStream(localFile);
        stmt.setBinaryStream(1, is);
        stmt.setLong(2, file.getFileSizeKB());
        stmt.setLong(3, file.getUID().rawId());
        stmt.execute();
    } catch (IOException e) {
        throw new ErrorException(e);
    } finally {
        if (stmt != null) {
            stmt.close();
        }
        safeClose(is);
    }
}

From source file:com.nabla.wapp.server.basic.general.ImportService.java

@Override
public String executeAction(final HttpServletRequest request, final List<FileItem> sessionFiles)
        throws UploadActionException {
    final UserSession userSession = UserSession.load(request);
    if (userSession == null) {
        if (log.isTraceEnabled())
            log.trace("missing user session");
        throw new UploadActionException("permission denied");
    }//from   w w  w. j  av a 2s. c om
    Assert.state(sessionFiles.size() == 1);
    try {
        for (FileItem file : sessionFiles) {
            if (file.isFormField())
                continue;
            if (log.isDebugEnabled()) {
                log.debug("field '" + file.getFieldName() + "': uploading " + file.getName());
                log.debug("field: " + file.getFieldName());
                log.debug("filename: " + file.getName());
                log.debug("content_type: " + file.getContentType());
                log.debug("size: " + file.getSize());
            }
            final Connection conn = db.getConnection();
            try {
                final PreparedStatement stmt = conn.prepareStatement(
                        "INSERT INTO import_data (field_name, file_name, content_type, length, content, userSessionId) VALUES(?,?,?,?,?,?);",
                        Statement.RETURN_GENERATED_KEYS);
                try {
                    stmt.setString(1, file.getFieldName());
                    stmt.setString(2, file.getName());
                    stmt.setString(3, file.getContentType());
                    stmt.setLong(4, file.getSize());
                    stmt.setString(6, userSession.getSessionId());
                    final InputStream fs = file.getInputStream();
                    try {
                        stmt.setBinaryStream(5, fs);
                        if (stmt.executeUpdate() != 1) {
                            if (log.isErrorEnabled())
                                log.error("failed to add imported file record");
                            throw new UploadActionException("internal error");
                        }
                        final ResultSet rsKey = stmt.getGeneratedKeys();
                        try {
                            rsKey.next();
                            final Integer id = rsKey.getInt(1);
                            if (log.isDebugEnabled())
                                log.debug(
                                        "uploading " + file.getName() + " successfully completed. id = " + id);
                            return id.toString();
                        } finally {
                            rsKey.close();
                        }
                    } finally {
                        fs.close();
                    }
                } catch (IOException e) {
                    if (log.isErrorEnabled())
                        log.error("error reading file " + file.getName(), e);
                    throw new UploadActionException("internal error");
                } finally {
                    Database.close(stmt);
                }
            } finally {
                // remove any orphan import records i.e. older than 48h (beware of timezone!)
                final Calendar dt = Util.dateToCalendar(new Date());
                dt.add(GregorianCalendar.DATE, -2);
                try {
                    Database.executeUpdate(conn, "DELETE FROM import_data WHERE created < ?;",
                            Util.calendarToSqlDate(dt));
                } catch (final SQLException __) {
                }
                Database.close(conn);
            }
        }
    } catch (SQLException e) {
        if (log.isErrorEnabled())
            log.error("error uploading file", e);
        throw new UploadActionException("internal error");
    } finally {
        super.removeSessionFileItems(request);
    }
    return null;
}

From source file:com.nabla.wapp.report.server.ReportManager.java

public int addReport(final Connection conn, final String reportName, @Nullable final String internalName,
        final InputStream design, final InputStream in) throws SQLException, DispatchException {
    // load and scan report design
    if (log.isDebugEnabled())
        log.debug("scanning report " + reportName);
    ReportDesign report;/*from   w ww .  ja  va 2 s  . c om*/
    try {
        report = new Persister().read(ReportDesign.class, design);
    } catch (Exception e) {
        if (log.isErrorEnabled())
            log.error("fail to load report design", e);
        throw new InternalErrorException(Util.formatInternalErrorDescription(e));
    }
    // add report record
    final Integer roleId = getRole(conn, report.getRole());
    if (roleId == null) {
        if (log.isErrorEnabled())
            log.error("invalid role '" + report.getRole() + "' defined for report '" + reportName + "'");
        throw new DispatchException(ReportErrors.REPORT_DESIGN_INVALID_ROLE);
    }
    final String category = report.getCategory();
    if (!reportCategoryValidator.isValid(category)) {
        if (log.isErrorEnabled())
            log.error("invalid category '" + category + "' defined for report ' " + reportName + "'");
        throw new DispatchException(ReportErrors.REPORT_DESIGN_INVALID_CATEGORY);
    }
    final PreparedStatement stmt = conn.prepareStatement(
            "INSERT INTO report (name,internal_name,category,role_id,content) VALUES(?,?,?,?,?);",
            Statement.RETURN_GENERATED_KEYS);
    try {
        stmt.setString(1, report.getTitle());
        if (internalName != null)
            stmt.setString(2, internalName);
        else
            stmt.setNull(2, Types.VARCHAR);
        if (category != null)
            stmt.setString(3, category);
        else
            stmt.setNull(3, Types.VARCHAR);
        stmt.setInt(4, roleId);
        stmt.setBinaryStream(5, in);
        if (log.isDebugEnabled())
            log.debug("uploading report " + reportName);
        if (stmt.executeUpdate() != 1) {
            if (log.isErrorEnabled())
                log.error("failed to add internal report '" + reportName + "'");
            throw new InternalErrorException();
        }
        final ResultSet rsKey = stmt.getGeneratedKeys();
        try {
            rsKey.next();
            return rsKey.getInt(1);
        } finally {
            rsKey.close();
        }
    } finally {
        stmt.close();
    }
}

From source file:org.wso2.carbon.das.jobmanager.core.impl.RDBMSServiceImpl.java

public void persistResourcePool(ResourcePool resourcePool) throws ResourceManagerException {
    if (resourcePool != null && resourcePool.getGroupId() != null) {
        Connection connection = null;
        PreparedStatement preparedStatement = null;
        try {// w  w w . j  a  v a2s .  c o m
            connection = getConnection();
            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
            ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream);
            objectOutputStream.writeObject(resourcePool);
            byte[] resourceMappingAsBytes = byteArrayOutputStream.toByteArray();
            // TODO: 10/31/17 Instead of REPLACE, use DELETE and INSERT
            preparedStatement = connection
                    .prepareStatement(ResourceManagerConstants.PS_REPLACE_RESOURCE_MAPPING_ROW);
            preparedStatement.setString(1, resourcePool.getGroupId());
            preparedStatement.setBinaryStream(2, new ByteArrayInputStream(resourceMappingAsBytes));
            preparedStatement.executeUpdate();
            connection.commit();
            if (log.isDebugEnabled()) {
                log.debug(ResourceManagerConstants.TASK_UPSERT_RESOURCE_MAPPING + " "
                        + resourcePool.getGroupId() + " executed successfully");
            }
        } catch (SQLException e) {
            rollback(connection, ResourceManagerConstants.TASK_UPSERT_RESOURCE_MAPPING);
            throw new ResourceManagerException(
                    "Error occurred while " + ResourceManagerConstants.TASK_UPSERT_RESOURCE_MAPPING
                            + ". Group ID" + resourcePool.getGroupId(),
                    e);
        } catch (IOException e) {
            throw new ResourceManagerException(e);
        } finally {
            close(preparedStatement, ResourceManagerConstants.TASK_UPSERT_RESOURCE_MAPPING);
            close(connection, ResourceManagerConstants.TASK_UPSERT_RESOURCE_MAPPING);
        }
    }
}

From source file:au.com.ish.derbydump.derbydump.main.DumpTest.java

@Test
public void theDumpTest() throws Exception {
    // Create table
    StringBuilder createTableBuffer = new StringBuilder();
    createTableBuffer.append("CREATE TABLE ");
    createTableBuffer.append(Configuration.getConfiguration().getSchemaName());
    createTableBuffer.append(".");
    createTableBuffer.append(tableName);
    createTableBuffer.append(" (");

    StringBuilder insertBuffer = new StringBuilder();
    insertBuffer.append("INSERT INTO ");
    insertBuffer.append(RESOURCE_SCHEMA_NAME);
    insertBuffer.append(".");
    insertBuffer.append(tableName);/*from   w ww  .j a v  a 2 s .  c  o m*/
    insertBuffer.append(" VALUES (");

    for (String col : columns) {
        createTableBuffer.append(col.toUpperCase());
        //String[] c = col.split(" ");
        //insertBuffer.append(c[0].toUpperCase().trim());
        insertBuffer.append("?");
        if (!columns[columns.length - 1].equals(col)) {
            createTableBuffer.append(", ");
            insertBuffer.append(",");
        }
    }

    createTableBuffer.append(")");
    insertBuffer.append(")");

    config.setTableRewriteProperty("testSkip", "--exclude--");
    config.setTableRewriteProperty("testRename", "testRenameNew");
    config.setTruncateTables(truncate);

    File f = new File("./build/outputs/" + tableName + ".sql");
    if (f.exists()) {
        f.delete();
    }
    f.mkdirs();

    config.setOutputFilePath(f.getCanonicalPath());

    Connection connection = db.createNewConnection();
    Statement statement = connection.createStatement();
    PreparedStatement ps = null;

    try {
        statement.execute(createTableBuffer.toString());
        connection.commit();
        //config.setTableRewriteProperty("TABLE2", "--exclude--");

        for (Object o : valuesToInsert) {
            Object[] vals = (Object[]) o;
            if (vals.length > 0) {
                ps = db.getConnection().prepareStatement(insertBuffer.toString());
                for (int i = 0; i < vals.length; i++) {
                    if (vals[i] instanceof InputStream) {
                        ps.setBinaryStream(i + 1, (InputStream) vals[i]);
                    } else {
                        ps.setObject(i + 1, vals[i]);
                    }
                }
                ps.execute();
                connection.commit();
            }
        }

        OutputThread output = new OutputThread();
        Thread writer = new Thread(output, "File_Writer");
        writer.start();

        new DatabaseReader(output);
        // Let the writer know that no more data is coming
        writer.interrupt();
        writer.join();

        // Now let's read the output and see what is in it
        List<String> lines = FileUtils.readLines(f);

        assertEquals("Missing foreign key operations", "SET FOREIGN_KEY_CHECKS = 0;", lines.get(0));
        assertEquals("Missing foreign key operations", "SET FOREIGN_KEY_CHECKS = 1;",
                lines.get(lines.size() - 1));

        if (!skipped) {
            assertTrue("LOCK missing", lines.contains("LOCK TABLES `" + outputTableName + "` WRITE;"));
            assertTrue("UNLOCK missing", lines.contains("UNLOCK TABLES;"));

            int index = lines.indexOf("LOCK TABLES `" + outputTableName + "` WRITE;");

            if (truncate) {
                assertTrue("TRUNCATE missing", lines.contains("TRUNCATE TABLE " + outputTableName + ";"));
                assertTrue("INSERT missing, got " + lines.get(index + 2),
                        lines.get(index + 2).startsWith("INSERT INTO " + outputTableName));
            } else {
                assertTrue("INSERT missing, got " + lines.get(index + 1),
                        lines.get(index + 1).startsWith("INSERT INTO " + outputTableName));
            }

            for (String s : validOutputs) {
                assertTrue("VALUES missing :" + s, lines.contains(s));
            }
        } else {
            assertTrue("LOCK missing", !lines.contains("LOCK TABLES `" + outputTableName + "` WRITE;"));
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail("failed to create test data" + e.getMessage());
    } finally {
        if (ps != null) {
            ps.close();
        }
        statement.close();
        connection.close();
    }
}