Example usage for java.sql Types CHAR

List of usage examples for java.sql Types CHAR

Introduction

In this page you can find the example usage for java.sql Types CHAR.

Prototype

int CHAR

To view the source code for java.sql Types CHAR.

Click Source Link

Document

The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type CHAR.

Usage

From source file:com.splicemachine.derby.utils.SpliceAdmin.java

/**
 * Prints all the information related to the execution plans of the stored prepared statements (metadata queries).
 *//*from   w  w w  .ja  v  a2 s. c o m*/

public static void SYSCS_GET_STORED_STATEMENT_PLAN_INFO(ResultSet[] rs) throws SQLException {
    try {
        // Wow...  who knew it was so much work to create a ResultSet?  Ouch!  The following code is annoying.

        LanguageConnectionContext lcc = ConnectionUtil.getCurrentLCC();
        DataDictionary dd = lcc.getDataDictionary();
        List list = dd.getAllSPSDescriptors();
        ArrayList<ExecRow> rows = new ArrayList<>(list.size());

        // Describe the format of the input rows (ExecRow).
        //
        // Columns of "virtual" row:
        //   STMTNAME            VARCHAR
        //   TYPE               CHAR
        //   VALID               BOOLEAN
        //   LASTCOMPILED         TIMESTAMP
        //   INITIALLY_COMPILABLE   BOOLEAN
        //   CONSTANTSTATE         BLOB --> VARCHAR showing existence of plan
        DataValueDescriptor[] dvds = new DataValueDescriptor[] { new SQLVarchar(), new SQLChar(),
                new SQLBoolean(), new SQLTimestamp(), new SQLBoolean(), new SQLVarchar() };
        int numCols = dvds.length;
        ExecRow dataTemplate = new ValueRow(numCols);
        dataTemplate.setRowArray(dvds);

        // Transform the descriptors into the rows.
        for (Object aList : list) {
            SPSDescriptor spsd = (SPSDescriptor) aList;
            ExecPreparedStatement ps = spsd.getPreparedStatement(false);
            dvds[0].setValue(spsd.getName());
            dvds[1].setValue(spsd.getTypeAsString());
            dvds[2].setValue(spsd.isValid());
            dvds[3].setValue(spsd.getCompileTime());
            dvds[4].setValue(spsd.initiallyCompilable());
            dvds[5].setValue(spsd.getPreparedStatement(false) == null ? null : "[object]");
            rows.add(dataTemplate.getClone());
        }

        // Describe the format of the output rows (ResultSet).
        ResultColumnDescriptor[] columnInfo = new ResultColumnDescriptor[numCols];
        columnInfo[0] = new GenericColumnDescriptor("STMTNAME",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, 60));
        columnInfo[1] = new GenericColumnDescriptor("TYPE",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.CHAR, 4));
        columnInfo[2] = new GenericColumnDescriptor("VALID",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BOOLEAN));
        columnInfo[3] = new GenericColumnDescriptor("LASTCOMPILED",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.TIMESTAMP));
        columnInfo[4] = new GenericColumnDescriptor("INITIALLY_COMPILABLE",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BOOLEAN));
        columnInfo[5] = new GenericColumnDescriptor("CONSTANTSTATE",
                DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, 13));
        EmbedConnection defaultConn = (EmbedConnection) getDefaultConn();
        Activation lastActivation = defaultConn.getLanguageConnection().getLastActivation();
        IteratorNoPutResultSet resultsToWrap = new IteratorNoPutResultSet(rows, columnInfo, lastActivation);
        resultsToWrap.openCore();
        EmbedResultSet ers = new EmbedResultSet40(defaultConn, resultsToWrap, false, null, true);
        rs[0] = ers;
    } catch (StandardException se) {
        throw PublicAPI.wrapStandardException(se);
    }
}

From source file:org.nuclos.server.dblayer.impl.standard.StandardSqlDBAccess.java

protected static DbGenericType getDbGenericType(int sqlType, String typeName) {
    switch (sqlType) {
    case Types.VARCHAR:
    case Types.NVARCHAR:
    case Types.NCHAR:
    case Types.CHAR:
        return DbGenericType.VARCHAR;
    case Types.NUMERIC:
    case Types.DECIMAL:
        return DbGenericType.NUMERIC;
    case Types.BIT:
    case Types.BOOLEAN:
        return DbGenericType.BOOLEAN;
    case Types.DATE:
        return DbGenericType.DATE;
    case Types.BLOB:
    case Types.VARBINARY:
    case Types.BINARY:
    case Types.LONGVARBINARY:
        return DbGenericType.BLOB;
    case Types.CLOB:
    case Types.LONGVARCHAR:
        return DbGenericType.CLOB;
    case Types.TIMESTAMP:
        return DbGenericType.DATETIME;
    default:/* w  ww. ja v  a 2s. c o  m*/
        return null;
    }
}

From source file:org.apache.openjpa.jdbc.meta.MappingInfo.java

/**
 * Merge the given columns if possible.//  w ww  . ja  va 2  s . c  o  m
 *
 * @param context the mapping we're retrieving columns for
 * @param prefix localized error message key prefix
 * @param tmplate template for expected column information
 * @param compat whether the existing column type must be compatible
 * with the type of the template column
 * @param given the given column information from mapping info
 * @param table the table for the columns
 * @param adapt whether we can modify the existing mapping or schema
 * @param fill whether to default missing column information
 */
protected static Column mergeColumn(MetaDataContext context, String prefix, Column tmplate, boolean compat,
        Column given, Table table, boolean adapt, boolean fill) {
    assertTable(context, table);

    // if not adapting must provide column name at a minimum
    DBIdentifier colName = (given == null) ? DBIdentifier.NULL : given.getIdentifier();
    if (DBIdentifier.isNull(colName) && !adapt && !fill)
        throw new MetaDataException(_loc.get(prefix + "-no-col-name", context));

    MappingRepository repos = (MappingRepository) context.getRepository();
    DBDictionary dict = repos.getDBDictionary();

    // determine the column name based on given info, or template if none;
    // also make sure that if the user gave a column name, he didn't try
    // to put the column in an unexpected table
    if (DBIdentifier.isNull(colName))
        colName = tmplate.getIdentifier();
    QualifiedDBIdentifier path = QualifiedDBIdentifier.getPath(colName);
    if (path.isUnqualifiedColumn()) {
        colName = path.getIdentifier();
    } else if (!DBIdentifier.isNull(path.getObjectTableName())) {
        findTable(context, path.getObjectTableName(), table, null, null);
        colName = path.getUnqualifiedName();
    }

    // find existing column
    Column col = table.getColumn(colName);
    if (col == null && !adapt) {
        // 
        // See if column name has already been validated in a dynamic table.
        // If so then want to use that validated column name instead. This
        // should seldom if ever occur as long as the database dictionaries
        // are kept up-to-date. 
        // 
        if ((colName.getName().length() > dict.maxColumnNameLength)
                || dict.getInvalidColumnWordSet().contains(DBIdentifier.toUpper(colName).getName())
                        && !(table.getClass().getName().contains("DynamicTable"))) {
            colName = dict.getValidColumnName(colName, new Table());
            col = table.getColumn(colName);
            if (col == null && !adapt) {
                throw new MetaDataException(_loc.get(prefix + "-bad-col-name", context, colName, table));
            }
        } else {
            throw new MetaDataException(_loc.get(prefix + "-bad-col-name", context, colName, table));
        }
    }

    // use information from template column by default, allowing any
    // user-given specifics to override it
    int type = tmplate.getType();
    int size = tmplate.getSize();
    if (type == Types.OTHER) {
        int precis = 0;
        int scale = 0;
        if (given != null) {
            precis = given.getSize();
            scale = given.getDecimalDigits();
        }
        type = dict.getJDBCType(tmplate.getJavaType(), size == -1, precis, scale, tmplate.isXML());
    }

    boolean ttype = true;
    int otype = type;
    String typeName = tmplate.getTypeName();
    Boolean notNull = null;
    if (tmplate.isNotNullExplicit())
        notNull = (tmplate.isNotNull()) ? Boolean.TRUE : Boolean.FALSE;
    int decimals = tmplate.getDecimalDigits();
    String defStr = tmplate.getDefaultString();
    boolean autoAssign = tmplate.isAutoAssigned();
    boolean relationId = tmplate.isRelationId();
    boolean implicitRelation = tmplate.isImplicitRelation();
    String targetField = tmplate.getTargetField();
    if (given != null) {
        // use given type if provided, but warn if it isn't compatible with
        // the expected column type
        if (given.getType() != Types.OTHER) {
            ttype = false;
            if (compat && !given.isCompatible(type, typeName, size, decimals)) {
                Log log = repos.getLog();
                if (log.isWarnEnabled())
                    log.warn(_loc.get(prefix + "-incompat-col", context, colName, Schemas.getJDBCName(type)));
            }
            otype = given.getType();
            type = dict.getPreferredType(otype);
        }
        typeName = given.getTypeName();
        if (given.getSize() > 0)
            size = given.getSize();
        decimals = given.getDecimalDigits();

        // leave this info as the template defaults unless the user
        // explicitly turns it on in the given column
        if (given.isNotNullExplicit())
            notNull = (given.isNotNull()) ? Boolean.TRUE : Boolean.FALSE;
        if (given.getDefaultString() != null)
            defStr = given.getDefaultString();
        if (given.isAutoAssigned())
            autoAssign = true;
        if (given.isRelationId())
            relationId = true;
        if (given.isImplicitRelation())
            implicitRelation = true;
    }

    // default char column size if original type is char (test original
    // type rather than final type because orig might be clob, translated
    // to an unsized varchar, which is supported by some dbs)
    if (size == 0 && (otype == Types.VARCHAR || otype == Types.CHAR))
        size = dict.characterColumnSize;

    // create column, or make sure existing column matches expected type
    if (col == null) {
        col = table.addColumn(colName);
        col.setType(type);
    } else if ((compat || !ttype) && !col.isCompatible(type, typeName, size, decimals)) {
        // if existing column isn't compatible with desired type, die if
        // can't adapt, else warn and change the existing column type
        Message msg = _loc.get(prefix + "-bad-col", context, Schemas.getJDBCName(type), col.getDescription());
        if (!adapt)
            throw new MetaDataException(msg);
        Log log = repos.getLog();
        if (log.isWarnEnabled())
            log.warn(msg);

        col.setType(type);
    } else if (given != null && given.getType() != Types.OTHER) {
        // as long as types are compatible, set column to expected type
        col.setType(type);
    }

    // always set the java type and autoassign to expected values, even on
    // an existing column, since we don't get this from the DB
    if (compat)
        col.setJavaType(tmplate.getJavaType());
    else if (col.getJavaType() == JavaTypes.OBJECT) {
        if (given != null && given.getJavaType() != JavaTypes.OBJECT)
            col.setJavaType(given.getJavaType());
        else
            col.setJavaType(JavaTypes
                    .getTypeCode(Schemas.getJavaType(col.getType(), col.getSize(), col.getDecimalDigits())));
    }
    col.setAutoAssigned(autoAssign);
    col.setRelationId(relationId);
    col.setImplicitRelation(implicitRelation);
    col.setTargetField(targetField);

    // we need this for runtime, and the dynamic schema factory might
    // not know it, so set it even if not adapting
    if (defStr != null)
        col.setDefaultString(defStr);
    if (notNull != null)
        col.setNotNull(notNull.booleanValue());

    // add other details if adapting
    if (adapt) {
        if (typeName != null)
            col.setTypeName(typeName);
        if (size != 0)
            col.setSize(size);
        if (decimals != 0)
            col.setDecimalDigits(decimals);
    }

    if (tmplate.hasComment())
        col.setComment(tmplate.getComment());
    if (tmplate.isXML())
        col.setXML(tmplate.isXML());
    return col;
}

From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java

public static String getJavaDatatype(int colType) {

    switch (colType) {
    case Types.CHAR:
    case Types.VARCHAR:
    case Types.LONGVARCHAR:
        return "java.lang.String";

    case Types.NUMERIC:
    case Types.DECIMAL:
        return "java.math.BigDecimal";

    case Types.BIT:
        return "boolean";

    case Types.TINYINT:
        return "byte";

    case Types.SMALLINT:
        return "short";

    case Types.INTEGER:
        return "int";

    case Types.BIGINT:
        return "long";

    case Types.REAL:
        return "float";

    case Types.FLOAT:
    case Types.DOUBLE:
        return "double";

    case Types.BINARY:
    case Types.VARBINARY:
    case Types.LONGVARBINARY:
        return "byte[]";

    case Types.DATE:
        return "java.sql.Date";

    case Types.TIME:
        return "java.sql.Time";

    case Types.TIMESTAMP:
        return "java.sql.Timestamp";

    case Types.OTHER:
        return "java.lang.Object";
    default://from   w  w w .  j a  v  a2 s  .co  m
        return null;
    }
}

From source file:org.openiot.gsn.storage.StorageManager.java

public void executeInsert(CharSequence tableName, DataField[] fields, StreamElement streamElement,
        Connection connection) throws SQLException {
    PreparedStatement ps = null;// w  w w  .  j  a  v  a 2  s.c o m
    String query = getStatementInsert(tableName, fields).toString();
    try {
        ps = connection.prepareStatement(query);
        int counter = 1;
        for (DataField dataField : fields) {
            if (dataField.getName().equalsIgnoreCase("timed"))
                continue;
            Serializable value = streamElement.getData(dataField.getName());

            switch (dataField.getDataTypeID()) {
            case DataTypes.VARCHAR:
                if (value == null)
                    ps.setNull(counter, Types.VARCHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.CHAR:
                if (value == null)
                    ps.setNull(counter, Types.CHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.INTEGER:
                if (value == null)
                    ps.setNull(counter, Types.INTEGER);
                else
                    ps.setInt(counter, ((Number) value).intValue());
                break;
            case DataTypes.SMALLINT:
                if (value == null)
                    ps.setNull(counter, Types.SMALLINT);
                else
                    ps.setShort(counter, ((Number) value).shortValue());
                break;
            case DataTypes.TINYINT:
                if (value == null)
                    ps.setNull(counter, Types.TINYINT);
                else
                    ps.setByte(counter, ((Number) value).byteValue());
                break;
            case DataTypes.DOUBLE:
                if (value == null)
                    ps.setNull(counter, Types.DOUBLE);
                else
                    ps.setDouble(counter, ((Number) value).doubleValue());
                break;
            case DataTypes.BIGINT:
                if (value == null)
                    ps.setNull(counter, Types.BIGINT);
                else
                    ps.setLong(counter, ((Number) value).longValue());
                break;
            case DataTypes.BINARY:
                if (value == null)
                    ps.setNull(counter, Types.BINARY);
                else
                    ps.setBytes(counter, (byte[]) value);
                break;
            default:
                logger.error("The type conversion is not supported for : " + dataField.getName() + "("
                        + dataField.getDataTypeID() + ") : ");
            }
            counter++;
        }
        ps.setLong(counter, streamElement.getTimeStamp());
        ps.execute();
    } catch (GSNRuntimeException e) {
        //if (e.getType() == GSNRuntimeException.UNEXPECTED_VIRTUAL_SENSOR_REMOVAL) {
        //    if (logger.isDebugEnabled())
        //        logger.debug("An stream element dropped due to unexpected virtual sensor removal. (Stream element: " + streamElement.toString() + ")+ Query: " + query, e);
        //} else
        logger.warn("Inserting a stream element failed : " + streamElement.toString(), e);
    } catch (SQLException e) {
        if (e.getMessage().toLowerCase().contains("duplicate entry"))
            logger.info("Error occurred on inserting data to the database, an stream element dropped due to: "
                    + e.getMessage() + ". (Stream element: " + streamElement.toString() + ")+ Query: " + query);
        else
            logger.warn("Error occurred on inserting data to the database, an stream element dropped due to: "
                    + e.getMessage() + ". (Stream element: " + streamElement.toString() + ")+ Query: " + query);
        throw e;
    } finally {
        close(ps);
    }
}

From source file:tinygsn.storage.StorageManager.java

public void executeInsert(CharSequence tableName, DataField[] fields, StreamElement streamElement,
        Connection connection) throws SQLException {
    PreparedStatement ps = null;//w w  w  . j  a v  a 2 s. com
    String query = getStatementInsert(tableName, fields).toString();
    try {
        ps = connection.prepareStatement(query);
        int counter = 1;
        for (DataField dataField : fields) {
            if (dataField.getName().equalsIgnoreCase("timed"))
                continue;
            Serializable value = streamElement.getData(dataField.getName());

            switch (dataField.getDataTypeID()) {
            case DataTypes.VARCHAR:
                if (value == null)
                    ps.setNull(counter, Types.VARCHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.CHAR:
                if (value == null)
                    ps.setNull(counter, Types.CHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.INTEGER:
                if (value == null)
                    ps.setNull(counter, Types.INTEGER);
                else
                    ps.setInt(counter, ((Number) value).intValue());
                break;
            case DataTypes.SMALLINT:
                if (value == null)
                    ps.setNull(counter, Types.SMALLINT);
                else
                    ps.setShort(counter, ((Number) value).shortValue());
                break;
            case DataTypes.TINYINT:
                if (value == null)
                    ps.setNull(counter, Types.TINYINT);
                else
                    ps.setByte(counter, ((Number) value).byteValue());
                break;
            case DataTypes.DOUBLE:
                if (value == null)
                    ps.setNull(counter, Types.DOUBLE);
                else
                    ps.setDouble(counter, ((Number) value).doubleValue());
                break;
            case DataTypes.BIGINT:
                if (value == null)
                    ps.setNull(counter, Types.BIGINT);
                else
                    ps.setLong(counter, ((Number) value).longValue());
                break;
            case DataTypes.BINARY:
                if (value == null)
                    ps.setNull(counter, Types.BINARY);
                else
                    ps.setBytes(counter, (byte[]) value);
                break;
            default:
                // logger.error("The type conversion is not supported for : "
                // + dataField.getName() + "(" + dataField.getDataTypeID() + ") : ");
            }
            counter++;
        }
        ps.setLong(counter, streamElement.getTimeStamp());
        ps.execute();
    } catch (GSNRuntimeException e) {
        // if (e.getType() ==
        // GSNRuntimeException.UNEXPECTED_VIRTUAL_SENSOR_REMOVAL) {
        // if (logger.isDebugEnabled())
        //
        // logger
        // .debug(
        // "An stream element dropped due to unexpected virtual sensor removal. (Stream element: "
        // + streamElement.toString() + ")+ Query: " + query, e);
        // } else
        // logger.warn(
        // "Inserting a stream element failed : " + streamElement.toString(), e);
    } catch (SQLException e) {
        if (e.getMessage().toLowerCase().contains("duplicate entry"))
            // logger
            // .info("Error occurred on inserting data to the database, an stream element dropped due to: "
            // + e.getMessage()
            // + ". (Stream element: "
            // + streamElement.toString() + ")+ Query: " + query);
            // else
            // logger
            // .warn("Error occurred on inserting data to the database, an stream element dropped due to: "
            // + e.getMessage()
            // + ". (Stream element: "
            // + streamElement.toString() + ")+ Query: " + query);
            throw e;
    } finally {
        close(ps);
    }
}

From source file:org.pentaho.di.jdbc.Support.java

/**
 * Retrieve the fully qualified java class name for the
 * supplied JDBC Types constant.//from  ww  w  .j a v a2 s  .c  om
 *
 * @param jdbcType The JDBC Types constant.
 * @return The fully qualified java class name as a <code>String</code>.
 */
static String getClassName(int jdbcType) {
    switch (jdbcType) {
    case java.sql.Types.BOOLEAN:
    case java.sql.Types.BIT:
        return "java.lang.Boolean";

    case java.sql.Types.TINYINT:
    case java.sql.Types.SMALLINT:
    case java.sql.Types.INTEGER:
        return "java.lang.Integer";

    case java.sql.Types.BIGINT:
        return "java.lang.Long";

    case java.sql.Types.NUMERIC:
    case java.sql.Types.DECIMAL:
        return "java.math.BigDecimal";

    case java.sql.Types.REAL:
        return "java.lang.Float";

    case java.sql.Types.FLOAT:
    case java.sql.Types.DOUBLE:
        return "java.lang.Double";

    case java.sql.Types.CHAR:
    case java.sql.Types.VARCHAR:
        return "java.lang.String";

    case java.sql.Types.BINARY:
    case java.sql.Types.VARBINARY:
        return "[B";

    case java.sql.Types.LONGVARBINARY:
    case java.sql.Types.BLOB:
        return "java.sql.Blob";

    case java.sql.Types.LONGVARCHAR:
    case java.sql.Types.CLOB:
        return "java.sql.Clob";

    case java.sql.Types.DATE:
        return "java.sql.Date";

    case java.sql.Types.TIME:
        return "java.sql.Time";

    case java.sql.Types.TIMESTAMP:
        return "java.sql.Timestamp";
    default:
        break;
    }

    return "java.lang.Object";
}

From source file:gsn.storage.StorageManager.java

public void executeInsert(CharSequence tableName, DataField[] fields, StreamElement streamElement,
        Connection connection) throws SQLException {
    PreparedStatement ps = null;/*  w  w  w .  j  ava2  s  . c  o  m*/
    String query = getStatementInsert(tableName, fields).toString();
    try {
        ps = connection.prepareStatement(query);
        int counter = 1;
        for (DataField dataField : fields) {
            if (dataField.getName().equalsIgnoreCase("timed"))
                continue;
            Serializable value = streamElement.getData(dataField.getName());

            switch (dataField.getDataTypeID()) {
            case DataTypes.VARCHAR:
                if (value == null)
                    ps.setNull(counter, Types.VARCHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.CHAR:
                if (value == null)
                    ps.setNull(counter, Types.CHAR);
                else
                    ps.setString(counter, value.toString());
                break;
            case DataTypes.INTEGER:
                if (value == null)
                    ps.setNull(counter, Types.INTEGER);
                else
                    ps.setInt(counter, ((Number) value).intValue());
                break;
            case DataTypes.SMALLINT:
                if (value == null)
                    ps.setNull(counter, Types.SMALLINT);
                else
                    ps.setShort(counter, ((Number) value).shortValue());
                break;
            case DataTypes.TINYINT:
                if (value == null)
                    ps.setNull(counter, Types.TINYINT);
                else
                    ps.setByte(counter, ((Number) value).byteValue());
                break;
            case DataTypes.DOUBLE:
                if (value == null)
                    ps.setNull(counter, Types.DOUBLE);
                else
                    ps.setDouble(counter, ((Number) value).doubleValue());
                break;
            case DataTypes.FLOAT:
                if (value == null)
                    ps.setNull(counter, Types.FLOAT);
                else
                    ps.setFloat(counter, ((Number) value).floatValue());
                break;
            case DataTypes.BIGINT:
                if (value == null)
                    ps.setNull(counter, Types.BIGINT);
                else
                    ps.setLong(counter, ((Number) value).longValue());
                break;
            case DataTypes.BINARY:
                if (value == null)
                    ps.setNull(counter, Types.BINARY);
                else
                    ps.setBytes(counter, (byte[]) value);
                break;
            default:
                logger.error("The type conversion is not supported for : " + dataField.getName() + "("
                        + dataField.getDataTypeID() + ") : ");
            }
            counter++;
        }
        ps.setLong(counter, streamElement.getTimeStamp());
        ps.execute();
    } catch (GSNRuntimeException e) {
        //if (e.getType() == GSNRuntimeException.UNEXPECTED_VIRTUAL_SENSOR_REMOVAL) {
        //    if (logger.isDebugEnabled())
        //        logger.debug("An stream element dropped due to unexpected virtual sensor removal. (Stream element: " + streamElement.toString() + ")+ Query: " + query, e);
        //} else
        logger.warn("Inserting a stream element failed : " + streamElement.toString(), e);
    } catch (SQLException e) {
        if (e.getMessage().toLowerCase().contains("duplicate entry"))
            logger.info("Error occurred on inserting data to the database, an stream element dropped due to: "
                    + e.getMessage() + ". (Stream element: " + streamElement.toString() + ")+ Query: " + query);
        else
            logger.warn("Error occurred on inserting data to the database, an stream element dropped due to: "
                    + e.getMessage() + ". (Stream element: " + streamElement.toString() + ")+ Query: " + query);
        throw e;
    } finally {
        close(ps);
    }
}

From source file:org.jumpmind.symmetric.db.ase.AseTriggerTemplate.java

@Override
protected String buildKeyVariablesDeclare(Column[] columns, String prefix) {
    String text = "";
    for (int i = 0; i < columns.length; i++) {
        text += "declare @" + prefix + "pk" + i + " ";
        switch (columns[i].getMappedTypeCode()) {
        case Types.TINYINT:
        case Types.SMALLINT:
        case Types.INTEGER:
        case Types.BIGINT:
            // ASE does not support bigint
            text += "NUMERIC(18,0)\n";
            break;
        case Types.NUMERIC:
        case Types.DECIMAL:
            // Use same default scale and precision used by Sybase ASA
            // for a decimal with unspecified scale and precision.
            text += "decimal(30,6)\n";
            break;
        case Types.FLOAT:
        case Types.REAL:
        case Types.DOUBLE:
            text += "float\n";
            break;
        case Types.CHAR:
        case Types.VARCHAR:
        case Types.LONGVARCHAR:
            text += "varchar(1000)\n";
            break;
        case Types.DATE:
            text += "date\n";
            break;
        case Types.TIME:
            text += "time\n";
            break;
        case Types.TIMESTAMP:
            text += "datetime\n";
            break;
        case Types.BOOLEAN:
        case Types.BIT:
            text += "bit\n";
            break;
        case Types.CLOB:
            text += "varchar(32767)\n";
            break;
        case Types.BLOB:
        case Types.BINARY:
        case Types.VARBINARY:
        case Types.LONGVARBINARY:
        case -10: // SQL-Server ntext binary type
            text += "varbinary(32767)\n";
            break;
        case Types.OTHER:
            text += "varbinary(32767)\n";
            break;
        default:/*from   w w  w.  j a va 2  s.c om*/
            if (columns[i].getJdbcTypeName() != null
                    && columns[i].getJdbcTypeName().equalsIgnoreCase("interval")) {
                text += "interval";
                break;
            }
            throw new NotImplementedException(columns[i] + " is of type " + columns[i].getMappedType());
        }
    }

    return text;
}

From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java

public static boolean isPrimitiveType(int colType) {

    switch (colType) {

    case Types.BIT:
    case Types.TINYINT:
    case Types.SMALLINT:
    case Types.INTEGER:
    case Types.BIGINT:
    case Types.REAL:
    case Types.FLOAT:
    case Types.DOUBLE:
    case Types.BINARY:
    case Types.VARBINARY:
    case Types.LONGVARBINARY:
        return true;

    case Types.CHAR:
    case Types.VARCHAR:
    case Types.LONGVARCHAR:
    case Types.NUMERIC:
    case Types.DECIMAL:
    case Types.DATE:
    case Types.TIME:
    case Types.TIMESTAMP:
        return false;

    default://from w w  w  . ja v  a 2  s  . c  o  m
        return false;
    }
}