Example usage for java.sql Types BLOB

List of usage examples for java.sql Types BLOB

Introduction

In this page you can find the example usage for java.sql Types BLOB.

Prototype

int BLOB

To view the source code for java.sql Types BLOB.

Click Source Link

Document

The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type BLOB.

Usage

From source file:org.apache.ddlutils.platform.mssql.MSSqlPlatform.java

/**
 * Creates a new platform instance.//from  w  w w  . ja v a2  s . c o m
 */
public MSSqlPlatform() {
    PlatformInfo info = getPlatformInfo();

    info.setMaxIdentifierLength(128);
    info.setPrimaryKeyColumnAutomaticallyRequired(true);
    info.setIdentityColumnAutomaticallyRequired(true);
    info.setMultipleIdentityColumnsSupported(false);
    info.setSupportedOnUpdateActions(
            new CascadeActionEnum[] { CascadeActionEnum.CASCADE, CascadeActionEnum.NONE });
    info.addEquivalentOnUpdateActions(CascadeActionEnum.NONE, CascadeActionEnum.RESTRICT);
    info.setSupportedOnDeleteActions(
            new CascadeActionEnum[] { CascadeActionEnum.CASCADE, CascadeActionEnum.NONE });
    info.addEquivalentOnDeleteActions(CascadeActionEnum.NONE, CascadeActionEnum.RESTRICT);

    info.addNativeTypeMapping(Types.ARRAY, "IMAGE", Types.LONGVARBINARY);
    // BIGINT will be mapped back to BIGINT by the model reader 
    info.addNativeTypeMapping(Types.BIGINT, "DECIMAL(19,0)");
    info.addNativeTypeMapping(Types.BLOB, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.BOOLEAN, "BIT", Types.BIT);
    info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR);
    info.addNativeTypeMapping(Types.DATALINK, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.DATE, "DATETIME", Types.TIMESTAMP);
    info.addNativeTypeMapping(Types.DISTINCT, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.DOUBLE, "FLOAT", Types.FLOAT);
    info.addNativeTypeMapping(Types.INTEGER, "INT");
    info.addNativeTypeMapping(Types.JAVA_OBJECT, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.LONGVARBINARY, "IMAGE");
    info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT");
    info.addNativeTypeMapping(Types.NULL, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.OTHER, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.REF, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.STRUCT, "IMAGE", Types.LONGVARBINARY);
    info.addNativeTypeMapping(Types.TIME, "DATETIME", Types.TIMESTAMP);
    info.addNativeTypeMapping(Types.TIMESTAMP, "DATETIME");
    info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT);

    info.setDefaultSize(Types.CHAR, 254);
    info.setDefaultSize(Types.VARCHAR, 254);
    info.setDefaultSize(Types.BINARY, 254);
    info.setDefaultSize(Types.VARBINARY, 254);

    setSqlBuilder(new MSSqlBuilder(this));
    setModelReader(new MSSqlModelReader(this));
}

From source file:nl.surfnet.coin.api.oauth.OpenConextOauth2JdbcTokenStore.java

@Override
public void storeAccessToken(OAuth2AccessToken token, OAuth2Authentication authentication) {
    AuthorizationRequest authorizationRequest = authentication.getAuthorizationRequest();
    String clientId = authorizationRequest.getClientId();
    ClientDetails clientDetails = clientDetailsService.loadClientByClientId(clientId);
    if (!(clientDetails instanceof OpenConextClientDetails)) {
        throw new RuntimeException("The clientDetails is of the type '"
                + (clientDetails != null ? clientDetails.getClass() : "null")
                + "'. Required is a (sub)class of ExtendedBaseClientDetails");
    }//from w  w  w. ja v  a  2s .  com

    ClientMetaData clientMetaData = ((OpenConextClientDetails) clientDetails).getClientMetaData();

    String refreshToken = null;
    if (token.getRefreshToken() != null) {
        refreshToken = token.getRefreshToken().getValue();
    }

    String value = extractTokenKey(token.getValue());
    jdbcTemplate.update(ACCESS_TOKEN_INSERT_STATEMENT,
            new Object[] { value, new SqlLobValue(SerializationUtils.serialize(token)),
                    authenticationKeyGenerator.extractKey(authentication),
                    authentication.isClientOnly() ? null : authentication.getName(),
                    authentication.getAuthorizationRequest().getClientId(), clientMetaData.getAppEntityId(),
                    new SqlLobValue(SerializationUtils.serialize(authentication)), refreshToken },
            new int[] { Types.VARCHAR, Types.BLOB, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR,
                    Types.BLOB, Types.VARCHAR });

}

From source file:com.p5solutions.core.jpa.orm.oracle.ConversionUtilityImpl.java

/**
 * If blob.//from w ww.  j  a  v  a2  s  .  co m
 * 
 * @param pb
 *          the pb
 * @param value
 *          the value
 * @return the object
 */
protected Object toSqlBlob(ParameterBinder pb, Object value) {
    if (value == null) {
        return null;
    }

    int type = getSqlType(pb);

    // if the sql target type is blob
    if (Types.BLOB == type) {

        Class<?> clazz = value.getClass();
        boolean isByteArray = ReflectionUtility.isByteArray(clazz);
        boolean isBlob = isByteArray ? false : ReflectionUtility.isBlob(clazz);
        boolean isString = isByteArray | isBlob ? false : ReflectionUtility.isBlob(clazz);
        boolean isInputStream = isByteArray | isBlob | isString ? false
                : ReflectionUtility.isStringClass(clazz);

        // if the datasource is not set, then throw an error
        if (dataSource == null) {
            logger.error("Required datasource has not been set for " //
                    + getClass() + ", when dealing with Lob values, datasource " //
                    + "is required for creation of lob space in DB.");

            return null;
        }

        // scope variables
        BLOB blob = null;
        OutputStream os = null;

        // get a database connection
        Connection conn = DataSourceUtils.getConnection(dataSource);

        try {
            // activate the connection and create an empty blob pointer
            blob = BLOB.createTemporary(conn, false, BLOB.DURATION_SESSION);
            blob.open(BLOB.MODE_READWRITE);
            os = blob.setBinaryStream(0);
        } catch (Exception e) {
            logger.error("Unable to create temporary blob when accessing entity " + pb.getEntityClass()
                    + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName());
            blob = null;
            os = null;
            return null;
        }

        InputStream is = null;

        // if the source is of type byte[]
        if (isByteArray) {
            blob.setBytes((byte[]) value);
        } else if (isBlob) {
            Blob sourceBlob = (Blob) value;
            try {
                is = sourceBlob.getBinaryStream();
            } catch (Exception e) {
                logger.error(
                        "Unable to copy input stream to output when accessing entity " + pb.getEntityClass()
                                + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName());

                is = null;
            }
        } else if (isString) {
            String v = (String) value;
            blob.setBytes(v.getBytes());
        } else if (isInputStream) {
            is = (InputStream) value;
        }

        // if the input stream is set
        if (is != null) {
            try {
                IOUtils.copy(is, os);
            } catch (Exception e) {
                logger.error(
                        "Unable to copy input stream to output when accessing entity " + pb.getEntityClass()
                                + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName());
            }
        }

        if (os != null) {
            try {
                os.close();
                blob.close();
            } catch (Exception e) {
                logger.error("Unable to close stream properly when accessing entity " + pb.getEntityClass()
                        + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName());
            }
        }

        return blob;
    }

    return null;
}

From source file:org.tsm.concharto.model.geometry.GeometryUserType.java

/**
 * Write an instance of the mapped class to a prepared statement. Implementors
 * should handle possibility of null values. A multi-column type should be
 * written to parameters starting from <tt>index</tt>.
 * /*w w w  .  j  a  v  a 2s.c  om*/
 * @param st a JDBC prepared statement
 * @param value the object to write
 * @param index statement parameter index
 * @throws HibernateException
 * @throws SQLException
 */
@SuppressWarnings("deprecation")
public void nullSafeSet(PreparedStatement st, Object value, int index) throws HibernateException, SQLException {
    if (value == null) {
        st.setNull(index, Types.BLOB);
    } else {
        Geometry geom = (Geometry) value;

        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        int SRID = geom.getSRID();

        try {
            byte[] buf = new byte[4];
            ByteOrderValues.putInt(SRID, buf, ByteOrderValues.LITTLE_ENDIAN);
            bos.write(buf);

            WKBWriter writer = new WKBWriter(2, ByteOrderValues.LITTLE_ENDIAN);
            writer.write(geom, new OutputStreamOutStream(bos));
        } catch (IOException e) {
            // should be impossible
            throw new UnhandledException(e);
        }

        st.setBytes(index, bos.toByteArray());
    }
}

From source file:com.adaptris.core.services.jdbc.types.BlobColumnTranslatorTest.java

@Test
public void testBlobWrite_ColumnName() throws Exception {
    BlobColumnTranslator translator = new BlobColumnTranslator();
    TestBlob blob = new TestBlob();
    String myData = new String("SomeData");
    blob.setBytes(0, myData.getBytes());

    JdbcResultRow row = new JdbcResultRow();
    row.setFieldValue("testField", blob, Types.BLOB);
    StringWriter writer = new StringWriter();
    try (OutputStream out = new WriterOutputStream(writer)) {
        translator.write(row, "testField", out);
    }/*from w  ww. ja  v a  2 s  .  co  m*/
    String translated = writer.toString();
    assertEquals("SomeData", translated);
}

From source file:madgik.exareme.master.queryProcessor.analyzer.stat.ExternalStat.java

@Override
public Map<String, Table> extractStats() throws Exception {

    DatabaseMetaData dbmd = con.getMetaData(); // dtabase metadata object

    // listing tables and columns
    String catalog = null;/*from ww  w  .  j  a va  2  s .c o  m*/
    String schemaPattern = sch;
    String tableNamePattern = tblName;
    String columnNamePattern = "%";
    if (con.getClass().getName().contains("postgresql")) {
        // tableNamePattern="\""+tableNamePattern+"\"";
        schemaPattern = "public";
    }

    // ResultSet resultTables = dbmd.getTables(catalog, "public",
    // tableNamePattern, types);
    ResultSet resultColumns = dbmd.getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern);
    if (con.getClass().getName().contains("postgresql")) {
        tableNamePattern = "\"" + tableNamePattern + "\"";
    } else if (con.getClass().getName().contains("oracle")) {
        tableNamePattern = schemaPattern + "." + tableNamePattern;
    }
    log.debug("Starting extracting stats");
    // while (resultTables.next()) {
    Map<String, Column> columnMap = new HashMap<String, Column>();
    // StringEscapeUtils.escapeJava(resultTables.getString(3));
    log.debug("Analyzing table " + tblName);

    int toupleSize = 0; // in bytes

    // tableNamePattern = tableName;

    int columnCount = resultColumns.getMetaData().getColumnCount();
    Statement st = con.createStatement();
    ResultSet rs = st.executeQuery("select count(*) from " + tableNamePattern);
    int count = 0;
    if (rs.next()) {
        count = rs.getInt(1);
    } else {
        log.error("could not get count for table " + tableNamePattern);
    }
    rs.close();
    st.close();

    ResultSet pkrs = dbmd.getExportedKeys("", "", tblName);
    String pkey = "DEFAULT_KEY";

    while (pkrs.next()) {
        pkey = pkrs.getString("PKCOLUMN_NAME");
        break;
    }
    pkrs.close();
    if (count == 0) {
        log.debug("Empty table");
        Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey);
        schema.put(tblName, t);
        return schema;
    }

    while (resultColumns.next()) {

        String columnName = StringEscapeUtils.escapeJava(resultColumns.getString(4));
        try {
            String colNamePattern = columnName;
            if (con.getClass().getName().contains("postgresql")) {
                colNamePattern = "\"" + columnName + "\"";
            }
            int columnType = resultColumns.getInt(5);

            // computing column's size in bytes
            int columnSize = computeColumnSize(colNamePattern, columnType, tableNamePattern);
            toupleSize += columnSize;

            // execute queries for numberOfDiffValues, minVal, maxVal
            // Map<String, Integer> diffValFreqMap = new HashMap<String,
            // Integer>();

            // computing column's min and max values
            String minVal = "0";
            String maxVal = "0";
            if (columnType != Types.BLOB) {
                MinMax mm = computeMinMax(tableNamePattern, colNamePattern);
                minVal = mm.getMin();
                maxVal = mm.getMax();
            }
            Map<String, Integer> diffValFreqMap = new HashMap<String, Integer>();
            //only for equidepth!

            // for (ValFreq k : freqs) {
            // diffValFreqMap.put(k.getVal(), k.getFreq());

            // }

            // /add min max diff vals in the sampling values

            int minOcc = 1;
            int maxOcc = 1;
            int diffVals = 0;
            boolean equidepth = false;
            if (equidepth) {

                //diffValFreqMap is used only in equidepth, do not compute it
                //if we have primitive
                diffValFreqMap = computeDistinctValuesFrequency(tableNamePattern, colNamePattern);

                String minValChar = minVal;
                String maxValChar = maxVal;
                if (columnType == Types.VARCHAR || columnType == Types.CHAR || columnType == Types.LONGNVARCHAR
                        || columnType == Types.DATE) {
                    minValChar = "\'" + minVal + "\'";
                    maxValChar = "\'" + maxVal + "\'";
                }
                try {
                    minOcc = computeValOccurences(tableNamePattern, colNamePattern, minValChar);
                } catch (Exception e) {
                    log.error("Could not compute value occurences for column:" + colNamePattern + " and value:"
                            + minValChar);
                }
                if (equidepth && !diffValFreqMap.containsKey(minVal))
                    diffValFreqMap.put(minVal, minOcc);

                try {
                    maxOcc = computeValOccurences(tableNamePattern, colNamePattern, maxValChar);
                } catch (Exception e) {
                    log.error("Could not compute value occurences for column:" + colNamePattern + " and value:"
                            + maxValChar);
                }
                if (diffValFreqMap.containsKey(maxVal))
                    diffValFreqMap.put(maxVal, maxOcc);

                diffVals = diffValFreqMap.size();
            } else {
                diffVals = computeDiffVals(tableNamePattern, colNamePattern, columnType);
            }
            if (diffVals == 0) {
                //all values are null!
                continue;
            }
            Column c = new Column(columnName, columnType, columnSize, diffVals, minVal, maxVal, diffValFreqMap);
            columnMap.put(columnName, c);
        } catch (Exception ex) {
            log.error("could not analyze column " + columnName + ":" + ex.getMessage());
        }

    }

    Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey);
    schema.put(tblName, t);

    // }
    // resultTables.close();
    resultColumns.close();
    return schema;

}

From source file:com.bt.aloha.dao.StateInfoDaoImpl.java

public void add(StateInfoBase<T> info, String collectionTypeName) {
    if (collectionTypeName == null)
        throw new IllegalArgumentException("Cannot add null collection type to collection.");

    if (info == null)
        throw new IllegalArgumentException("Cannot add null info object to collection.");

    if (info.getId() == null)
        throw new IllegalArgumentException("Cannot add info object with null id to collection.");

    try {//from   www .j  a  v a  2s. co  m
        Object[] params = new Object[] { info.getId(), collectionTypeName, info.getVersionId(),
                info.getLastUsedTime(), info.isDead() ? 1 : 0,
                new SqlLobValue(new ObjectSerialiser().serialise(info)), };
        int[] types = new int[] { Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.BIGINT, Types.INTEGER,
                Types.BLOB };
        getJdbcTemplate().update(INSERT_SQL, params, types);
    } catch (DataIntegrityViolationException e) {
        throw new IllegalArgumentException(
                String.format("Info %s already exists in database, use replaceDialog instead", info.getId()),
                e);
    } catch (DataAccessException e) {
        throw new IllegalArgumentException(String.format("Cannot add info %s to database", info.getId()), e);
    }
}

From source file:org.apache.sqoop.manager.ConnManager.java

/**
 * Resolve a database-specific type to the Java type that should contain it.
 * @param sqlType     sql type/*from  w  w w.j  a  v a  2s  . c om*/
 * @return the name of a Java type to hold the sql datatype, or null if none.
 */
public String toJavaType(int sqlType) {
    // Mappings taken from:
    // http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html
    if (sqlType == Types.INTEGER) {
        return "Integer";
    } else if (sqlType == Types.VARCHAR) {
        return "String";
    } else if (sqlType == Types.CHAR) {
        return "String";
    } else if (sqlType == Types.LONGVARCHAR) {
        return "String";
    } else if (sqlType == Types.NVARCHAR) {
        return "String";
    } else if (sqlType == Types.NCHAR) {
        return "String";
    } else if (sqlType == Types.LONGNVARCHAR) {
        return "String";
    } else if (sqlType == Types.NUMERIC) {
        return "java.math.BigDecimal";
    } else if (sqlType == Types.DECIMAL) {
        return "java.math.BigDecimal";
    } else if (sqlType == Types.BIT) {
        return "Boolean";
    } else if (sqlType == Types.BOOLEAN) {
        return "Boolean";
    } else if (sqlType == Types.TINYINT) {
        return "Integer";
    } else if (sqlType == Types.SMALLINT) {
        return "Integer";
    } else if (sqlType == Types.BIGINT) {
        return "Long";
    } else if (sqlType == Types.REAL) {
        return "Float";
    } else if (sqlType == Types.FLOAT) {
        return "Double";
    } else if (sqlType == Types.DOUBLE) {
        return "Double";
    } else if (sqlType == Types.DATE) {
        return "java.sql.Date";
    } else if (sqlType == Types.TIME) {
        return "java.sql.Time";
    } else if (sqlType == Types.TIMESTAMP) {
        return "java.sql.Timestamp";
    } else if (sqlType == Types.BINARY || sqlType == Types.VARBINARY) {
        return BytesWritable.class.getName();
    } else if (sqlType == Types.CLOB) {
        return ClobRef.class.getName();
    } else if (sqlType == Types.BLOB || sqlType == Types.LONGVARBINARY) {
        return BlobRef.class.getName();
    } else {
        // TODO(aaron): Support DISTINCT, ARRAY, STRUCT, REF, JAVA_OBJECT.
        // Return null indicating database-specific manager should return a
        // java data type if it can find one for any nonstandard type.
        return null;
    }
}

From source file:org.apache.openjpa.jdbc.sql.HSQLDictionary.java

@Override
public int getPreferredType(int type) {
    if (dbMajorVersion > 1) {
        return super.getPreferredType(type);
    }/*from  ww  w.  ja va2 s .c  o  m*/
    switch (type) {
    case Types.CLOB:
        return Types.VARCHAR;
    case Types.BLOB:
        return Types.VARBINARY;
    default:
        return super.getPreferredType(type);
    }
}

From source file:org.fao.geonet.arcgis.ArcSDEJdbcConnection.java

@Override
public Map<String, String> retrieveMetadata(AtomicBoolean cancelMonitor, String arcSDEVersion)
        throws Exception {
    Map<String, String> results = new HashMap<>();

    ArcSDEVersionFactory arcSDEVersionFactory = new ArcSDEVersionFactory();
    String metadataTable = arcSDEVersionFactory.getTableName(arcSDEVersion);
    String columnName = arcSDEVersionFactory.getMetadataColumnName(arcSDEVersion);

    String sqlQuery = "SELECT " + columnName + ", UUID FROM " + metadataTable;

    getJdbcTemplate().query(sqlQuery, new RowCallbackHandler() {
        @Override/*from   w  ww  .j  a va 2 s. com*/
        public void processRow(ResultSet rs) throws SQLException {
            // Cancel processing
            if (cancelMonitor.get()) {
                Log.warning(ARCSDE_LOG_MODULE_NAME,
                        "Cancelling metadata retrieve using " + "ArcSDE connection (via JDBC)");
                rs.getStatement().cancel();
                results.clear();
            }

            String document = "";
            int colId = rs.findColumn(columnName);
            int colIdUuid = rs.findColumn("UUID");
            // very simple type check:
            if (rs.getObject(colId) != null) {
                if (rs.getMetaData().getColumnType(colId) == Types.BLOB) {
                    Blob blob = rs.getBlob(columnName);
                    byte[] bdata = blob.getBytes(1, (int) blob.length());
                    document = new String(bdata);

                } else if (rs.getMetaData().getColumnType(colId) == Types.LONGVARBINARY) {
                    byte[] byteData = rs.getBytes(colId);
                    document = new String(byteData);

                } else if (rs.getMetaData().getColumnType(colId) == Types.LONGNVARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.LONGVARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.VARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.SQLXML) {
                    document = rs.getString(colId);

                } else {
                    throw new SQLException("Trying to harvest from a column with an invalid datatype: "
                            + rs.getMetaData().getColumnTypeName(colId));
                }

                String uuid = rs.getString(colIdUuid);
                ;
                results.put(uuid, document);
            }

        }
    });

    Log.info(ARCSDE_LOG_MODULE_NAME,
            "Finished retrieving metadata, found: #" + results.size() + " metadata records");

    return results;
}