Example usage for java.sql Types BLOB

List of usage examples for java.sql Types BLOB

Introduction

In this page you can find the example usage for java.sql Types BLOB.

Prototype

int BLOB

To view the source code for java.sql Types BLOB.

Click Source Link

Document

The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type BLOB.

Usage

From source file:org.nuclos.server.dblayer.impl.standard.StandardSqlDBAccess.java

protected static DbGenericType getDbGenericType(int sqlType, String typeName) {
    switch (sqlType) {
    case Types.VARCHAR:
    case Types.NVARCHAR:
    case Types.NCHAR:
    case Types.CHAR:
        return DbGenericType.VARCHAR;
    case Types.NUMERIC:
    case Types.DECIMAL:
        return DbGenericType.NUMERIC;
    case Types.BIT:
    case Types.BOOLEAN:
        return DbGenericType.BOOLEAN;
    case Types.DATE:
        return DbGenericType.DATE;
    case Types.BLOB:
    case Types.VARBINARY:
    case Types.BINARY:
    case Types.LONGVARBINARY:
        return DbGenericType.BLOB;
    case Types.CLOB:
    case Types.LONGVARCHAR:
        return DbGenericType.CLOB;
    case Types.TIMESTAMP:
        return DbGenericType.DATETIME;
    default:/*from w w w  .  j a  v a 2 s.com*/
        return null;
    }
}

From source file:org.cloudgraph.rdb.service.JDBCSupport.java

protected void execute(PlasmaType type, StringBuilder sql, Map<String, PropertyPair> values, Connection con) {
    PreparedStatement statement = null;
    List<InputStream> streams = null;
    try {/*from ww w . java2s.  co  m*/
        if (log.isDebugEnabled()) {
            log.debug("execute: " + sql.toString());
            StringBuilder paramBuf = createParamDebug(values);
            log.debug("params: " + paramBuf.toString());
        }
        statement = con.prepareStatement(sql.toString());
        for (PropertyPair pair : values.values()) {
            PlasmaProperty valueProp = pair.getProp();
            if (pair.getValueProp() != null)
                valueProp = pair.getValueProp();

            int jdbcType = converter.toJDBCDataType(valueProp, pair.getValue());
            Object jdbcValue = converter.toJDBCDataValue(valueProp, pair.getValue());
            if (jdbcType != Types.BLOB && jdbcType != Types.VARBINARY) {
                statement.setObject(pair.getColumn(), jdbcValue, jdbcType);
            } else {
                byte[] bytes = (byte[]) jdbcValue;
                long len = bytes.length;
                ByteArrayInputStream is = new ByteArrayInputStream(bytes);
                statement.setBinaryStream(pair.getColumn(), is, len);
                if (streams == null)
                    streams = new ArrayList<InputStream>();
                streams.add(is);
            }

            if (pair.getOldValue() != null) {
                Object jdbcOldValue = converter.toJDBCDataValue(valueProp, pair.getOldValue());
                if (jdbcType != Types.BLOB && jdbcType != Types.VARBINARY) {
                    statement.setObject(pair.getOldValueColumn(), jdbcOldValue, jdbcType);
                } else {
                    byte[] bytes = (byte[]) jdbcOldValue;
                    long len = bytes.length;
                    ByteArrayInputStream is = new ByteArrayInputStream(bytes);
                    statement.setBinaryStream(pair.getOldValueColumn(), is, len);
                    if (streams == null)
                        streams = new ArrayList<InputStream>();
                    streams.add(is);
                }
            }
        }
        statement.executeUpdate();
    } catch (Throwable t) {
        throw new DataAccessException(t);
    } finally {
        try {
            if (statement != null)
                statement.close();
        } catch (SQLException e) {
            log.error(e.getMessage(), e);
        }
        if (streams != null)
            try {
                for (InputStream stream : streams)
                    stream.close();
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }

    }
}

From source file:org.pentaho.di.jdbc.Support.java

/**
 * Retrieve the fully qualified java class name for the
 * supplied JDBC Types constant./*from w  ww  . j a va2s .c o  m*/
 *
 * @param jdbcType The JDBC Types constant.
 * @return The fully qualified java class name as a <code>String</code>.
 */
static String getClassName(int jdbcType) {
    switch (jdbcType) {
    case java.sql.Types.BOOLEAN:
    case java.sql.Types.BIT:
        return "java.lang.Boolean";

    case java.sql.Types.TINYINT:
    case java.sql.Types.SMALLINT:
    case java.sql.Types.INTEGER:
        return "java.lang.Integer";

    case java.sql.Types.BIGINT:
        return "java.lang.Long";

    case java.sql.Types.NUMERIC:
    case java.sql.Types.DECIMAL:
        return "java.math.BigDecimal";

    case java.sql.Types.REAL:
        return "java.lang.Float";

    case java.sql.Types.FLOAT:
    case java.sql.Types.DOUBLE:
        return "java.lang.Double";

    case java.sql.Types.CHAR:
    case java.sql.Types.VARCHAR:
        return "java.lang.String";

    case java.sql.Types.BINARY:
    case java.sql.Types.VARBINARY:
        return "[B";

    case java.sql.Types.LONGVARBINARY:
    case java.sql.Types.BLOB:
        return "java.sql.Blob";

    case java.sql.Types.LONGVARCHAR:
    case java.sql.Types.CLOB:
        return "java.sql.Clob";

    case java.sql.Types.DATE:
        return "java.sql.Date";

    case java.sql.Types.TIME:
        return "java.sql.Time";

    case java.sql.Types.TIMESTAMP:
        return "java.sql.Timestamp";
    default:
        break;
    }

    return "java.lang.Object";
}

From source file:org.jumpmind.symmetric.db.ase.AseTriggerTemplate.java

@Override
protected String buildKeyVariablesDeclare(Column[] columns, String prefix) {
    String text = "";
    for (int i = 0; i < columns.length; i++) {
        text += "declare @" + prefix + "pk" + i + " ";
        switch (columns[i].getMappedTypeCode()) {
        case Types.TINYINT:
        case Types.SMALLINT:
        case Types.INTEGER:
        case Types.BIGINT:
            // ASE does not support bigint
            text += "NUMERIC(18,0)\n";
            break;
        case Types.NUMERIC:
        case Types.DECIMAL:
            // Use same default scale and precision used by Sybase ASA
            // for a decimal with unspecified scale and precision.
            text += "decimal(30,6)\n";
            break;
        case Types.FLOAT:
        case Types.REAL:
        case Types.DOUBLE:
            text += "float\n";
            break;
        case Types.CHAR:
        case Types.VARCHAR:
        case Types.LONGVARCHAR:
            text += "varchar(1000)\n";
            break;
        case Types.DATE:
            text += "date\n";
            break;
        case Types.TIME:
            text += "time\n";
            break;
        case Types.TIMESTAMP:
            text += "datetime\n";
            break;
        case Types.BOOLEAN:
        case Types.BIT:
            text += "bit\n";
            break;
        case Types.CLOB:
            text += "varchar(32767)\n";
            break;
        case Types.BLOB:
        case Types.BINARY:
        case Types.VARBINARY:
        case Types.LONGVARBINARY:
        case -10: // SQL-Server ntext binary type
            text += "varbinary(32767)\n";
            break;
        case Types.OTHER:
            text += "varbinary(32767)\n";
            break;
        default://from   ww  w.j  a v  a  2s  . c o  m
            if (columns[i].getJdbcTypeName() != null
                    && columns[i].getJdbcTypeName().equalsIgnoreCase("interval")) {
                text += "interval";
                break;
            }
            throw new NotImplementedException(columns[i] + " is of type " + columns[i].getMappedType());
        }
    }

    return text;
}

From source file:org.cloudgraph.rdb.service.JDBCSupport.java

protected void executeInsert(PlasmaType type, StringBuilder sql, Map<String, PropertyPair> values,
        Connection con) {//from  w w w  .j av a  2 s . c om
    PreparedStatement statement = null;
    List<InputStream> streams = null;
    try {

        if (log.isDebugEnabled()) {
            log.debug("execute: " + sql.toString());
            StringBuilder paramBuf = createParamDebug(values);
            log.debug("params: " + paramBuf.toString());
        }

        statement = con.prepareStatement(sql.toString());

        for (PropertyPair pair : values.values()) {
            PlasmaProperty valueProp = pair.getProp();
            if (pair.getValueProp() != null)
                valueProp = pair.getValueProp();
            int jdbcType = converter.toJDBCDataType(valueProp, pair.getValue());
            Object jdbcValue = converter.toJDBCDataValue(valueProp, pair.getValue());
            if (jdbcType != Types.BLOB && jdbcType != Types.VARBINARY) {
                statement.setObject(pair.getColumn(), jdbcValue, jdbcType);
            } else {
                byte[] bytes = (byte[]) jdbcValue;
                long len = bytes.length;
                ByteArrayInputStream is = new ByteArrayInputStream(bytes);
                statement.setBinaryStream(pair.getColumn(), is, len);
                if (streams == null)
                    streams = new ArrayList<InputStream>();
                streams.add(is);
            }
        }

        statement.execute();
    } catch (Throwable t) {
        throw new DataAccessException(t);
    } finally {
        try {
            if (statement != null)
                statement.close();
        } catch (SQLException e) {
            log.error(e.getMessage(), e);
        }
        if (streams != null)
            try {
                for (InputStream stream : streams)
                    stream.close();
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }

    }
}

From source file:org.zaproxy.zap.extension.websocket.db.TableWebSocket.java

public void insertMessage(WebSocketMessageDTO message) throws DatabaseException {
    try {/*ww  w.  j ava2s.  c o m*/
        // synchronize on whole object to avoid race conditions with insertOrUpdateChannel()
        synchronized (this) {
            if (getConnection().isClosed()) {
                // temporarily buffer messages and write them the next time
                messagesBuffer.offer(message);
                return;
            }

            do {
                if (!channelIds.contains(message.channel.id)) {
                    // maybe channel is buffered
                    if (channelsBuffer.size() > 0) {
                        insertOrUpdateChannel(channelsBuffer.poll());
                    }
                    throw new SQLException("channel not inserted: " + message.channel.id);
                }

                if (logger.isDebugEnabled()) {
                    logger.debug("insert message: " + message.toString());
                }

                psInsertMessage.setInt(1, message.id);
                psInsertMessage.setInt(2, message.channel.id);
                psInsertMessage.setTimestamp(3, new Timestamp(message.timestamp));
                psInsertMessage.setInt(4, message.opcode);

                // write payload
                if (message.payload instanceof String) {
                    psInsertMessage.setClob(5, new JDBCClob((String) message.payload));
                    psInsertMessage.setNull(6, Types.BLOB);
                } else if (message.payload instanceof byte[]) {
                    psInsertMessage.setNull(5, Types.CLOB);
                    psInsertMessage.setBlob(6, new JDBCBlob((byte[]) message.payload));
                } else {
                    throw new SQLException(
                            "Attribute 'payload' of class WebSocketMessageDTO has got wrong type!");
                }

                psInsertMessage.setInt(7, message.payloadLength);
                psInsertMessage.setBoolean(8, message.isOutgoing);
                psInsertMessage.execute();

                if (message instanceof WebSocketFuzzMessageDTO) {
                    WebSocketFuzzMessageDTO fuzzMessage = (WebSocketFuzzMessageDTO) message;
                    psInsertFuzz.setInt(1, fuzzMessage.fuzzId);
                    psInsertFuzz.setInt(2, fuzzMessage.id);
                    psInsertFuzz.setInt(3, fuzzMessage.channel.id);
                    psInsertFuzz.setString(4, fuzzMessage.state.toString());
                    psInsertFuzz.setString(5, fuzzMessage.fuzz);
                    psInsertFuzz.execute();
                }

                message = messagesBuffer.poll();
            } while (message != null);
        }
    } catch (SQLException e) {
        throw new DatabaseException(e);
    }
}

From source file:org.cloudgraph.rdb.service.JDBCSupport.java

protected List<PropertyPair> executeInsertWithGeneratedKeys(PlasmaType type, StringBuilder sql,
        Map<String, PropertyPair> values, Connection con) {
    List<PropertyPair> resultKeys = new ArrayList<PropertyPair>();
    PreparedStatement statement = null;
    List<InputStream> streams = null;
    ResultSet generatedKeys = null;
    try {//w  w w.  j  a  v  a  2s. c o  m

        if (log.isDebugEnabled()) {
            log.debug("execute: " + sql.toString());
            StringBuilder paramBuf = createParamDebug(values);
            log.debug("params: " + paramBuf.toString());
        }

        statement = con.prepareStatement(sql.toString(), PreparedStatement.RETURN_GENERATED_KEYS);

        for (PropertyPair pair : values.values()) {
            PlasmaProperty valueProp = pair.getProp();
            if (pair.getValueProp() != null)
                valueProp = pair.getValueProp();
            int jdbcType = converter.toJDBCDataType(valueProp, pair.getValue());
            Object jdbcValue = converter.toJDBCDataValue(valueProp, pair.getValue());
            if (jdbcType != Types.BLOB && jdbcType != Types.VARBINARY) {
                statement.setObject(pair.getColumn(), jdbcValue, jdbcType);
            } else {
                byte[] bytes = (byte[]) jdbcValue;
                long len = bytes.length;
                ByteArrayInputStream is = new ByteArrayInputStream(bytes);
                statement.setBinaryStream(pair.getColumn(), is, len);
                if (streams == null)
                    streams = new ArrayList<InputStream>();
                streams.add(is);
            }
        }

        statement.execute();
        generatedKeys = statement.getGeneratedKeys();
        ResultSetMetaData rsMeta = generatedKeys.getMetaData();
        int numcols = rsMeta.getColumnCount();
        if (log.isDebugEnabled())
            log.debug("returned " + numcols + " keys");

        if (generatedKeys.next()) {
            // FIXME; without metadata describing which properties
            // are actually a sequence, there is guess work
            // involved in matching the values returned
            // automatically from PreparedStatment as they
            // are anonymous in terms of the column names
            // making it impossible to match them to a metadata
            // property.
            List<Property> pkPropList = type.findProperties(KeyType.primary);
            if (pkPropList == null || pkPropList.size() == 0)
                throw new DataAccessException("no pri-key properties found for type '" + type.getName() + "'");
            if (pkPropList.size() > 1)
                throw new DataAccessException("multiple pri-key properties found for type '" + type.getName()
                        + "' - cannot map to generated keys");
            PlasmaProperty prop = (PlasmaProperty) pkPropList.get(0);
            // FIXME: need to find properties per column by physical name
            // alias
            // in case where multiple generated pri-keys
            for (int i = 1; i <= numcols; i++) {
                String columnName = rsMeta.getColumnName(i);
                if (log.isDebugEnabled())
                    log.debug("returned key column '" + columnName + "'");
                int columnType = rsMeta.getColumnType(i);
                Object value = converter.fromJDBCDataType(generatedKeys, i, columnType, prop);
                PropertyPair pair = new PropertyPair((PlasmaProperty) prop, value);
                resultKeys.add(pair);
            }
        }
    } catch (Throwable t) {
        throw new DataAccessException(t);
    } finally {
        try {
            if (statement != null)
                statement.close();
        } catch (SQLException e) {
            log.error(e.getMessage(), e);
        }
        if (streams != null)
            try {
                for (InputStream stream : streams)
                    stream.close();
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }
    }

    return resultKeys;
}

From source file:org.apache.jackrabbit.core.persistence.pool.BundleDbPersistenceManager.java

/**
 * Reads and parses a bundle from the BLOB in the given column of the
 * current row of the given result set. This is a helper method to
 * circumvent issues JCR-1039 and JCR-1474.
 *
 * @param id bundle identifier//from  w w  w  .  j  a v  a 2  s .  co  m
 * @param rs result set
 * @param column BLOB column
 * @return parsed bundle
 * @throws SQLException if the bundle can not be read or parsed
 */
private NodePropBundle readBundle(NodeId id, ResultSet rs, int column) throws SQLException {
    try {
        InputStream in;
        if (rs.getMetaData().getColumnType(column) == Types.BLOB) {
            in = rs.getBlob(column).getBinaryStream();
        } else {
            in = rs.getBinaryStream(column);
        }
        try {
            return binding.readBundle(in, id);
        } finally {
            in.close();
        }
    } catch (IOException e) {
        SQLException exception = new SQLException("Failed to parse bundle " + id);
        exception.initCause(e);
        throw exception;
    }
}

From source file:org.apache.openjpa.jdbc.sql.SelectImpl.java

/**
 * Select the given column after making the given joins.
 *//*  w  w  w  .  j  a  v  a  2  s.  c om*/
private boolean select(Column col, PathJoins pj, boolean ident) {
    // we cache on column object if there are no joins so that when
    // looking up columns in the result we don't have to create a string
    // buffer for the table + column alias; if there are joins, then
    // we key on the alias
    String alias = getColumnAlias(col, pj);
    Object id;
    if (pj == null || pj.path() == null)
        id = col;
    else
        id = alias;
    if (_selects.contains(id))
        return false;

    if (col.getType() == Types.BLOB || col.getType() == Types.CLOB)
        setLob(true);
    _selects.setAlias(id, alias, ident);
    return true;
}

From source file:org.apache.openjpa.jdbc.sql.SQLAzureSelectImpl.java

/**
 * Select the given column after making the given joins.
 *///from www. j  ava  2 s  .c o  m
private boolean select(Column col, PathJoins pj, boolean ident) {
    // we cache on column object if there are no joins so that when
    // looking up columns in the result we don't have to create a string
    // buffer for the table + column alias; if there are joins, then
    // we key on the alias
    String alias = getColumnAlias(col, pj);
    Object id;
    if (pj == null || pj.path() == null) {
        id = col;
    } else {
        id = alias;
    }
    if (_selects.contains(id)) {
        return false;
    }

    if (col.getType() == Types.BLOB || col.getType() == Types.CLOB) {
        setLob(true);
    }
    _selects.setAlias(id, alias, ident);
    return true;
}