List of usage examples for java.sql Types CHAR
int CHAR
To view the source code for java.sql Types CHAR.
Click Source Link
The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type CHAR
.
From source file:com.google.visualization.datasource.util.SqlDataSourceHelperTest.java
/** * Sets the information of the table columns: labels and types. Creates empty * list for the table rows as well./*w w w .j a v a2s . c om*/ * * This method is called before a test is executed. */ @Override protected void setUp() { labels = Lists.newArrayList("ID", "Fname", "Lname", "Gender", "Salary", "IsMarried", "StartDate", "TimeStamp", "Time"); // Use the JDBC type constants as defined in java.sql.Types. types = Lists.newArrayList(Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.CHAR, Types.INTEGER, Types.BOOLEAN, Types.DATE, Types.TIMESTAMP, Types.TIME); rows = Lists.newArrayList(); }
From source file:nl.strohalm.cyclos.utils.hibernate.AmountType.java
public int[] sqlTypes() { final int[] columns = { Types.NUMERIC, Types.CHAR }; return columns; }
From source file:org.apache.ddlutils.platform.postgresql.PostgreSqlPlatform.java
/** * Creates a new platform instance./*from w w w . j av a 2 s. com*/ */ public PostgreSqlPlatform() { PlatformInfo info = getPlatformInfo(); info.setPrimaryKeyColumnAutomaticallyRequired(true); // this is the default length though it might be changed when building PostgreSQL // in file src/include/postgres_ext.h info.setMaxIdentifierLength(31); info.addNativeTypeMapping(Types.ARRAY, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BINARY, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BIT, "BOOLEAN"); info.addNativeTypeMapping(Types.BLOB, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.BOOLEAN, "BOOLEAN", Types.BIT); info.addNativeTypeMapping(Types.CLOB, "TEXT", Types.LONGVARCHAR); info.addNativeTypeMapping(Types.DATALINK, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.DECIMAL, "NUMERIC", Types.NUMERIC); info.addNativeTypeMapping(Types.DISTINCT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.DOUBLE, "DOUBLE PRECISION"); info.addNativeTypeMapping(Types.FLOAT, "DOUBLE PRECISION", Types.DOUBLE); info.addNativeTypeMapping(Types.JAVA_OBJECT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.LONGVARBINARY, "BYTEA"); info.addNativeTypeMapping(Types.LONGVARCHAR, "TEXT", Types.LONGVARCHAR); info.addNativeTypeMapping(Types.NULL, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.OTHER, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.REF, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.STRUCT, "BYTEA", Types.LONGVARBINARY); info.addNativeTypeMapping(Types.TINYINT, "SMALLINT", Types.SMALLINT); info.addNativeTypeMapping(Types.VARBINARY, "BYTEA", Types.LONGVARBINARY); info.setDefaultSize(Types.CHAR, 254); info.setDefaultSize(Types.VARCHAR, 254); // no support for specifying the size for these types (because they are mapped // to BYTEA which back-maps to BLOB) info.setHasSize(Types.BINARY, false); info.setHasSize(Types.VARBINARY, false); setSqlBuilder(new PostgreSqlBuilder(this)); setModelReader(new PostgreSqlModelReader(this)); }
From source file:com.hangum.tadpole.engine.sql.util.RDBTypeToJavaTypeUtils.java
/** * ? ?/*from w w w . ja v a 2s . c o m*/ * * @param sqlType * @return */ public static boolean isCharType(int sqlType) { switch (sqlType) { case Types.CHAR: case Types.VARCHAR: return true; } return false; }
From source file:org.jamwiki.db.WikiPreparedStatement.java
/** * */// w ww . j av a 2s . co m private void loadStatement() throws Exception { for (int i = 0; i < this.paramTypes.length; i++) { if (params[i] == null) { this.statement.setNull(i + 1, paramTypes[i]); } else if (paramTypes[i] == Types.CHAR) { char value = ((Character) params[i]).charValue(); this.statement.setString(i + 1, Character.toString(value)); } else if (paramTypes[i] == Types.INTEGER) { int value = ((Integer) params[i]).intValue(); this.statement.setInt(i + 1, value); } else if (paramTypes[i] == Types.TIMESTAMP) { Timestamp value = (Timestamp) params[i]; this.statement.setTimestamp(i + 1, value); } else if (paramTypes[i] == Types.VARCHAR) { String value = (String) params[i]; this.statement.setString(i + 1, value); } } }
From source file:org.brucalipto.sqlutil.DB2SQLManager.java
/** * Method useful for using STORED PROCEDURE * @param spib The {@link SPInputBean} bean containing data to execute the stored procedure * @return The {@link SPOutputBean} containing returned values *//* w w w . j a v a 2 s .c om*/ public SPOutputBean executeSP(final SPInputBean spib) throws SQLException { Connection conn = null; CallableStatement call = null; ResultSet resultSet = null; final String procedureName = spib.spName; SPParameter[] inputParameters = spib.inputParams; int[] outputParameters = spib.outputParams; final int inputParametersSize = inputParameters.length; final int outputParametersSize = outputParameters.length; final StringBuffer spName = new StringBuffer("call ").append(procedureName).append('('); int totalParameters = inputParametersSize + outputParametersSize; for (int i = 0; i < totalParameters; i++) { if (i != totalParameters - 1) { spName.append("?,"); } else { spName.append('?'); } } spName.append(")"); try { if (this.dataSource != null) { conn = this.dataSource.getConnection(); } else { conn = this.connection; } call = conn.prepareCall(spName.toString()); for (int i = 0; i < inputParametersSize; i++) { final SPParameter inputParam = inputParameters[i]; final int sqlType = inputParam.sqlType; final Object inputParamValue = inputParam.value; log.debug((i + 1) + ") Setting input value: " + inputParam); if (inputParamValue == null) { call.setNull(i + 1, sqlType); continue; } switch (sqlType) { case Types.VARCHAR: call.setString(i + 1, (String) inputParamValue); break; case Types.INTEGER: if (inputParamValue instanceof Integer) { call.setInt(i + 1, ((Integer) inputParamValue).intValue()); } else if (inputParamValue instanceof Long) { call.setLong(i + 1, ((Long) inputParamValue).longValue()); } break; case Types.DATE: call.setDate(i + 1, (Date) inputParamValue); break; case Types.BOOLEAN: call.setBoolean(i + 1, ((Boolean) inputParamValue).booleanValue()); break; case Types.CHAR: call.setString(i + 1, ((Character) inputParamValue).toString()); break; case Types.DOUBLE: call.setDouble(i + 1, ((Double) inputParamValue).doubleValue()); break; case Types.FLOAT: call.setFloat(i + 1, ((Float) inputParamValue).floatValue()); break; case Types.TIMESTAMP: call.setTimestamp(i + 1, (Timestamp) inputParamValue); break; default: call.setObject(i + 1, inputParamValue); break; } } for (int i = 0; i < outputParametersSize; i++) { int sqlType = outputParameters[i]; log.debug((i + 1) + ") Registering output type 'Types." + SQLUtilTypes.SQL_TYPES.get(Integer.valueOf("" + sqlType)) + "'"); call.registerOutParameter(inputParametersSize + i + 1, sqlType); } log.debug("Going to call: '" + procedureName + "'"); long elapsedTime = System.currentTimeMillis(); boolean hasResultSet = call.execute(); log.debug("SP '" + procedureName + "' executed in " + (System.currentTimeMillis() - elapsedTime) + "millis"); if (hasResultSet) { log.debug("This SP is going to return also a resultSet"); } final SPOutputBean output = new SPOutputBean(); for (int i = 0; i < outputParametersSize; i++) { int sqlType = outputParameters[i]; log.debug((i + 1) + ") Getting output type 'Types." + SQLUtilTypes.SQL_TYPES.get(Integer.valueOf("" + sqlType)) + "'"); final Object spResult = call.getObject(inputParametersSize + i + 1); SPParameter outParam = new SPParameter(sqlType, spResult); output.addResult(outParam); } if (hasResultSet) { RowSetDynaClass rowSetDynaClass = new RowSetDynaClass(call.getResultSet(), false); if (log.isDebugEnabled()) { log.debug("Going to return a RowSetDynaClass with following properties:"); DynaProperty[] properties = rowSetDynaClass.getDynaProperties(); for (int i = 0; i < properties.length; i++) { log.debug("Name: '" + properties[i].getName() + "'; Type: '" + properties[i].getType().getName() + "'"); } } SPParameter outParam = new SPParameter(Types.JAVA_OBJECT, rowSetDynaClass); output.addResult(outParam); } return output; } finally { closeResources(resultSet, call, conn); } }
From source file:com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat.java
/** * @return the DBSplitter implementation to use to divide the table/query * into InputSplits./*from ww w. ja v a2 s . c o m*/ */ protected DBSplitter getSplitter(int sqlDataType) { switch (sqlDataType) { case Types.NUMERIC: case Types.DECIMAL: return new BigDecimalSplitter(); case Types.BIT: case Types.BOOLEAN: return new BooleanSplitter(); case Types.INTEGER: case Types.TINYINT: case Types.SMALLINT: case Types.BIGINT: return new IntegerSplitter(); case Types.REAL: case Types.FLOAT: case Types.DOUBLE: return new FloatSplitter(); case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: return new TextSplitter(); case Types.DATE: case Types.TIME: case Types.TIMESTAMP: return new DateSplitter(); default: // TODO: Support BINARY, VARBINARY, LONGVARBINARY, DISTINCT, CLOB, // BLOB, ARRAY, STRUCT, REF, DATALINK, and JAVA_OBJECT. return null; } }
From source file:org.apache.ddlutils.platform.JdbcModelReader.java
/** * Creates a new model reader instance.// www.j a v a 2 s . co m * * @param platform The plaftform this builder belongs to */ public JdbcModelReader(Platform platform) { _platform = platform; _defaultSizes.put(new Integer(Types.CHAR), "254"); _defaultSizes.put(new Integer(Types.VARCHAR), "254"); _defaultSizes.put(new Integer(Types.LONGVARCHAR), "254"); _defaultSizes.put(new Integer(Types.BINARY), "254"); _defaultSizes.put(new Integer(Types.VARBINARY), "254"); _defaultSizes.put(new Integer(Types.LONGVARBINARY), "254"); _defaultSizes.put(new Integer(Types.INTEGER), "32"); _defaultSizes.put(new Integer(Types.BIGINT), "64"); _defaultSizes.put(new Integer(Types.REAL), "7,0"); _defaultSizes.put(new Integer(Types.FLOAT), "15,0"); _defaultSizes.put(new Integer(Types.DOUBLE), "15,0"); _defaultSizes.put(new Integer(Types.DECIMAL), "15,15"); _defaultSizes.put(new Integer(Types.NUMERIC), "15,15"); _columnsForTable = initColumnsForTable(); _columnsForColumn = initColumnsForColumn(); _columnsForPK = initColumnsForPK(); _columnsForFK = initColumnsForFK(); _columnsForIndex = initColumnsForIndex(); }
From source file:co.nubetech.apache.hadoop.DataDrivenDBInputFormat.java
/** * @return the DBSplitter implementation to use to divide the table/query * into InputSplits./* w w w .j ava 2s . co m*/ */ protected DBSplitter getSplitter(int sqlDataType) { switch (sqlDataType) { case Types.NUMERIC: case Types.DECIMAL: return new BigDecimalSplitter(); case Types.BIT: case Types.BOOLEAN: return new BooleanSplitter(); case Types.INTEGER: case Types.TINYINT: case Types.SMALLINT: case Types.BIGINT: return new IntegerSplitter(); case Types.REAL: case Types.FLOAT: case Types.DOUBLE: return new FloatSplitter(); case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: return new TextSplitter(); case Types.DATE: case Types.TIME: case Types.TIMESTAMP: return new DateSplitter(); default: // TODO: Support BINARY, VARBINARY, LONGVARBINARY, DISTINCT, CLOB, // BLOB, ARRAY // STRUCT, REF, DATALINK, and JAVA_OBJECT. return null; } }