Example usage for java.sql Types TIMESTAMP

List of usage examples for java.sql Types TIMESTAMP

Introduction

In this page you can find the example usage for java.sql Types TIMESTAMP.

Prototype

int TIMESTAMP

To view the source code for java.sql Types TIMESTAMP.

Click Source Link

Document

The constant in the Java programming language, sometimes referred to as a type code, that identifies the generic SQL type TIMESTAMP.

Usage

From source file:org.openbel.framework.tools.pkam.KAMImportDAO.java

private void setParameterValue(final int parameterIndex, final String columnName, final int sqlType,
        String value) throws SQLException, EncryptionServiceException {
    if (value == null || value.equals("NULL")) {
        insertPs.setNull(parameterIndex, sqlType);
    } else if (sqlType == Types.INTEGER) {
        if (!StringUtils.isNumeric(value)) {
            throw new IllegalStateException("Column '" + columnName + "' does not have a numeric value");
        }/* w  w  w.  j av  a2  s. co  m*/

        try {
            insertPs.setInt(parameterIndex, Integer.parseInt(value));
        } catch (NumberFormatException e) {
            // swallowed since we check if the data is numeric
        }
    } else if (sqlType == Types.BIGINT) {
        insertPs.setLong(parameterIndex, Long.parseLong(value));
    } else if (sqlType == Types.VARCHAR) {
        // decrypt column values if text value of objects tables
        if ((table == KAM_OBJECTS && "varchar_value".equals(columnName))
                || (table == KAM_OBJECTS_TEXT && "text_value".equals(columnName))) {
            value = encryptionService.encrypt(value);
        }

        insertPs.setString(parameterIndex, value);
    } else if (sqlType == Types.TIMESTAMP) {
        if (!StringUtils.isNumeric(value)) {
            throw new IllegalStateException(
                    "Column '" + columnName + "' does not have a numeric value for timestamp");
        }

        try {
            insertPs.setTimestamp(parameterIndex, new Timestamp(Long.parseLong(value)));
        } catch (NumberFormatException e) {
            // swallowed since we check if the data is numeric
        }
    } else if (sqlType == Types.CLOB) {
        // decrypt column values if text value of objects tables
        if ((table == KAM_OBJECTS && "varchar_value".equals(columnName))
                || (table == KAM_OBJECTS_TEXT && "text_value".equals(columnName))) {
            value = encryptionService.encrypt(value);
        }

        final StringReader sr = new StringReader(value);
        insertPs.setClob(parameterIndex, sr);
    } else {
        throw new UnsupportedOperationException("Cannot convert String to SQL Type - " + sqlType);
    }
}

From source file:com.opencsv.ResultSetHelperService.java

private String getColumnValue(ResultSet rs, int colType, int colIndex, boolean trim, String dateFormatString,
        String timestampFormatString) throws SQLException, IOException {

    String value = "";

    switch (colType) {
    case Types.BIT:
    case Types.JAVA_OBJECT:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getObject(colIndex), "");
        value = ObjectUtils.toString(rs.getObject(colIndex), "");
        break;//from ww w. j  a  v a 2s  . co m
    case Types.BOOLEAN:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getBoolean(colIndex));
        value = ObjectUtils.toString(rs.getBoolean(colIndex));
        break;
    case Types.NCLOB: // todo : use rs.getNClob
    case Types.CLOB:
        Clob c = rs.getClob(colIndex);
        if (c != null) {
            StrBuilder sb = new StrBuilder();
            sb.readFrom(c.getCharacterStream());
            value = sb.toString();
        }
        break;
    case Types.BIGINT:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getLong(colIndex));
        value = ObjectUtils.toString(rs.getLong(colIndex));
        break;
    case Types.DECIMAL:
    case Types.REAL:
    case Types.NUMERIC:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getBigDecimal(colIndex), "");
        value = ObjectUtils.toString(rs.getBigDecimal(colIndex), "");
        break;
    case Types.DOUBLE:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getDouble(colIndex));
        value = ObjectUtils.toString(rs.getDouble(colIndex));
        break;
    case Types.FLOAT:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getFloat(colIndex));
        value = ObjectUtils.toString(rs.getFloat(colIndex));
        break;
    case Types.INTEGER:
    case Types.TINYINT:
    case Types.SMALLINT:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getInt(colIndex));
        value = ObjectUtils.toString(rs.getInt(colIndex));
        break;
    case Types.DATE:
        java.sql.Date date = rs.getDate(colIndex);
        if (date != null) {
            SimpleDateFormat df = new SimpleDateFormat(dateFormatString);
            value = df.format(date);
        }
        break;
    case Types.TIME:
        // Once Java 7 is the minimum supported version.
        //            value = Objects.toString(rs.getTime(colIndex), "");
        value = ObjectUtils.toString(rs.getTime(colIndex), "");
        break;
    case Types.TIMESTAMP:
        value = handleTimestamp(rs.getTimestamp(colIndex), timestampFormatString);
        break;
    case Types.NVARCHAR: // todo : use rs.getNString
    case Types.NCHAR: // todo : use rs.getNString
    case Types.LONGNVARCHAR: // todo : use rs.getNString
    case Types.LONGVARCHAR:
    case Types.VARCHAR:
    case Types.CHAR:
        String columnValue = rs.getString(colIndex);
        if (trim && columnValue != null) {
            value = columnValue.trim();
        } else {
            value = columnValue;
        }
        break;
    default:
        value = "";
    }

    if (rs.wasNull() || value == null) {
        value = "";
    }

    return value;
}

From source file:solidbase.core.plugins.DumpJSON.java

public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException {
    if (!triggerPattern.matcher(command.getCommand()).matches())
        return false;

    if (command.isTransient()) {
        /* DUMP JSON DATE_CREATED ON | OFF */

        SQLTokenizer tokenizer = new SQLTokenizer(
                SourceReaders.forString(command.getCommand(), command.getLocation()));

        // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET
        // TODO What about other configuration settings?
        tokenizer.get("DUMP");
        tokenizer.get("JSON");
        tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE
        Token t = tokenizer.get("ON", "OFF");
        tokenizer.get((String) null);

        // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file.
        AbstractScope scope = processor.getContext().getScope();
        scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant

        return true;
    }/*from   w w w .  j  a  v a 2s  . c o  m*/

    if (skip)
        return true;

    Parsed parsed = parse(command);

    AbstractScope scope = processor.getContext().getScope();
    Object object = scope.get("solidbase.dump_json.dateCreated");
    boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object;

    Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder

    try {
        OutputStream out = jsvResource.getOutputStream();
        if (parsed.gzip)
            out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream?

        JSONWriter jsonWriter = new JSONWriter(out);
        try {
            Statement statement = processor.createStatement();
            try {
                ResultSet result = statement.executeQuery(parsed.query);
                ResultSetMetaData metaData = result.getMetaData();

                // Define locals

                int columns = metaData.getColumnCount();
                int[] types = new int[columns];
                String[] names = new String[columns];
                boolean[] ignore = new boolean[columns];
                FileSpec[] fileSpecs = new FileSpec[columns];
                String schemaNames[] = new String[columns];
                String tableNames[] = new String[columns];

                // Analyze metadata

                for (int i = 0; i < columns; i++) {
                    int col = i + 1;
                    String name = metaData.getColumnName(col).toUpperCase();
                    types[i] = metaData.getColumnType(col);
                    if (types[i] == Types.DATE && parsed.dateAsTimestamp)
                        types[i] = Types.TIMESTAMP;
                    names[i] = name;
                    if (parsed.columns != null) {
                        ColumnSpec columnSpec = parsed.columns.get(name);
                        if (columnSpec != null)
                            if (columnSpec.skip)
                                ignore[i] = true;
                            else
                                fileSpecs[i] = columnSpec.toFile;
                    }
                    if (parsed.coalesce != null && parsed.coalesce.notFirst(name))
                        ignore[i] = true;
                    // TODO STRUCT serialize
                    // TODO This must be optional and not the default
                    else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null)
                        ignore[i] = true;
                    tableNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null));
                    schemaNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null));
                }

                if (parsed.coalesce != null)
                    parsed.coalesce.bind(names);

                // Write header

                JSONObject properties = new JSONObject();
                properties.set("version", "1.0");
                properties.set("format", "record-stream");
                properties.set("description", "SolidBase JSON Data Dump File");
                properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0"));

                if (dateCreated) {
                    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    properties.set("createdDate", format.format(new Date()));
                }

                if (parsed.binaryFileName != null) {
                    // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH
                    Resource binResource = Resources.getResource(parsed.binaryFileName);
                    Resource resource = Resources.getResource(parsed.fileName);
                    properties.set("binaryFile", binResource.getPathFrom(resource).toString());
                }

                JSONArray fields = new JSONArray();
                properties.set("fields", fields);
                for (int i = 0; i < columns; i++)
                    if (!ignore[i]) {
                        JSONObject field = new JSONObject();
                        field.set("schemaName", schemaNames[i]);
                        field.set("tableName", tableNames[i]);
                        field.set("name", names[i]);
                        field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type
                        FileSpec spec = fileSpecs[i];
                        if (spec != null && !spec.generator.isDynamic()) {
                            Resource fileResource = new FileResource(spec.generator.fileName);
                            field.set("file", fileResource.getPathFrom(jsvResource).toString());
                        }
                        fields.add(field);
                    }

                FileSpec binaryFile = parsed.binaryFileName != null
                        ? new FileSpec(true, parsed.binaryFileName, 0)
                        : null;

                jsonWriter.writeFormatted(properties, 120);
                jsonWriter.getWriter().write('\n');

                Counter counter = null;
                if (parsed.logRecords > 0)
                    counter = new FixedCounter(parsed.logRecords);
                else if (parsed.logSeconds > 0)
                    counter = new TimedCounter(parsed.logSeconds);

                try {
                    while (result.next()) {
                        Object[] values = new Object[columns];
                        for (int i = 0; i < values.length; i++)
                            values[i] = JDBCSupport.getValue(result, types, i);

                        if (parsed.coalesce != null)
                            parsed.coalesce.coalesce(values);

                        JSONArray array = new JSONArray();
                        for (int i = 0; i < columns; i++)
                            if (!ignore[i]) {
                                Object value = values[i];
                                if (value == null) {
                                    array.add(null);
                                    continue;
                                }

                                // TODO 2 columns can't be written to the same dynamic filename

                                FileSpec spec = fileSpecs[i];
                                if (spec != null) // The column is redirected to its own file
                                {
                                    String relFileName = null;
                                    int startIndex;
                                    if (spec.binary) {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.out == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                        }
                                        if (value instanceof Blob) {
                                            InputStream in = ((Blob) value).getBinaryStream();
                                            startIndex = spec.index;
                                            byte[] buf = new byte[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.out.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else if (value instanceof byte[]) {
                                            startIndex = spec.index;
                                            spec.out.write((byte[]) value);
                                            spec.index += ((byte[]) value).length;
                                        } else
                                            throw new SourceException(names[i] + " ("
                                                    + value.getClass().getName()
                                                    + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file",
                                                    command.getLocation());
                                        if (spec.generator.isDynamic()) {
                                            spec.out.close();
                                            JSONObject ref = new JSONObject();
                                            ref.set("file", relFileName);
                                            ref.set("size", spec.index - startIndex);
                                            array.add(ref);
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    } else {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new DeferringWriter(spec.threshold, fileResource,
                                                    jsonWriter.getEncoding());
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.writer == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new OutputStreamWriter(fileResource.getOutputStream(),
                                                    jsonWriter.getEncoding());
                                        }
                                        if (value instanceof Blob || value instanceof byte[])
                                            throw new SourceException(names[i]
                                                    + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file",
                                                    command.getLocation());
                                        if (value instanceof Clob) {
                                            Reader in = ((Clob) value).getCharacterStream();
                                            startIndex = spec.index;
                                            char[] buf = new char[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.writer.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else {
                                            String val = value.toString();
                                            startIndex = spec.index;
                                            spec.writer.write(val);
                                            spec.index += val.length();
                                        }
                                        if (spec.generator.isDynamic()) {
                                            DeferringWriter writer = (DeferringWriter) spec.writer;
                                            if (writer.isBuffered())
                                                array.add(writer.clearBuffer());
                                            else {
                                                JSONObject ref = new JSONObject();
                                                ref.set("file", relFileName);
                                                ref.set("size", spec.index - startIndex);
                                                array.add(ref);
                                            }
                                            writer.close();
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    }
                                } else if (value instanceof Clob)
                                    array.add(((Clob) value).getCharacterStream());
                                else if (binaryFile != null
                                        && (value instanceof Blob || value instanceof byte[])) {
                                    if (binaryFile.out == null) {
                                        String fileName = binaryFile.generator.generateFileName(null);
                                        Resource fileResource = new FileResource(fileName);
                                        binaryFile.out = fileResource.getOutputStream();
                                        if (parsed.binaryGzip)
                                            binaryFile.out = new BufferedOutputStream(
                                                    new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream?
                                    }
                                    int startIndex = binaryFile.index;
                                    if (value instanceof Blob) {
                                        InputStream in = ((Blob) value).getBinaryStream();
                                        byte[] buf = new byte[4096];
                                        for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                            binaryFile.out.write(buf, 0, read);
                                            binaryFile.index += read;
                                        }
                                        in.close();
                                    } else {
                                        binaryFile.out.write((byte[]) value);
                                        binaryFile.index += ((byte[]) value).length;
                                    }
                                    JSONObject ref = new JSONObject();
                                    ref.set("index", startIndex);
                                    ref.set("length", binaryFile.index - startIndex);
                                    array.add(ref);
                                } else
                                    array.add(value);
                            }

                        for (ListIterator<Object> i = array.iterator(); i.hasNext();) {
                            Object value = i.next();
                            if (value instanceof java.sql.Date || value instanceof java.sql.Time
                                    || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId)
                                i.set(value.toString());
                        }
                        jsonWriter.write(array);
                        jsonWriter.getWriter().write('\n');

                        if (counter != null && counter.next())
                            processor.getProgressListener()
                                    .println("Exported " + counter.total() + " records.");
                    }
                    if (counter != null && counter.needFinal())
                        processor.getProgressListener().println("Exported " + counter.total() + " records.");
                } finally {
                    // Close files that have been left open
                    for (FileSpec fileSpec : fileSpecs)
                        if (fileSpec != null) {
                            if (fileSpec.out != null)
                                fileSpec.out.close();
                            if (fileSpec.writer != null)
                                fileSpec.writer.close();
                        }
                    if (binaryFile != null && binaryFile.out != null)
                        binaryFile.out.close();
                }
            } finally {
                processor.closeStatement(statement, true);
            }
        } finally {
            jsonWriter.close();
        }
    } catch (IOException e) {
        throw new SystemException(e);
    }

    return true;
}

From source file:org.netflux.core.FieldMetadata.java

/**
 * Sets the <code>type</code> of the field that this metadata describes. The currently supported types are: string ({@link java.sql.Types#CHAR},
 * {@link java.sql.Types#VARCHAR}), date ({@link java.sql.Types#DATE}, {@link java.sql.Types#TIMESTAMP}), numeric ({@link java.sql.Types#SMALLINT},
 * {@link java.sql.Types#INTEGER}, {@link java.sql.Types#BIGINT}, {@link java.sql.Types#DECIMAL}, {@link java.sql.Types#FLOAT},
 * {@link java.sql.Types#DOUBLE}) and boolean ({@link java.sql.Types#BOOLEAN}). If the supplied <code>type</code> is not one of
 * the above, an <code>IllegalArgumentException</code> will be thrown.
 * /*from  w w w. jav a 2  s  .  c  om*/
 * @param type the <code>type</code> of the field that this metadata describes.
 * @throws IllegalArgumentException if the supplied <code>type</code> is not included in the supported types.
 */
public void setType(int type) {
    switch (type) {
    case Types.CHAR:
    case Types.VARCHAR:
    case Types.DATE:
    case Types.TIMESTAMP:
    case Types.SMALLINT:
    case Types.INTEGER:
    case Types.BIGINT:
    case Types.DECIMAL:
    case Types.FLOAT:
    case Types.DOUBLE:
    case Types.BOOLEAN:
        this.type = type;
        break;
    default:
        if (FieldMetadata.log.isInfoEnabled()) {
            FieldMetadata.log.info(FieldMetadata.messages.getString("exception.unsupported.type"));
        }
        throw new IllegalArgumentException();
    }
}

From source file:org.executequery.gui.browser.ColumnData.java

/**
 * Returns whether this column is a date type or 
 * extension of./*from  w w  w . j  a v a2 s  . c o m*/
 *
 * ie. Types.DATE, Types.TIME, Types.TIMESTAMP.
 *
 * @return true | false
 */
public boolean isDateDataType() {
    return sqlType == Types.DATE || sqlType == Types.TIME || sqlType == Types.TIMESTAMP;
}

From source file:alfio.datamapper.QueryType.java

private static SqlParameterSource extractParameters(Method m, Object[] args) {

    Annotation[][] parameterAnnotations = m.getParameterAnnotations();
    if (parameterAnnotations == null || parameterAnnotations.length == 0) {
        return new EmptySqlParameterSource();
    }/*from  ww w  . j av a 2 s .co m*/

    MapSqlParameterSource ps = new MapSqlParameterSource();
    Class<?>[] parameterTypes = m.getParameterTypes();
    for (int i = 0; i < args.length; i++) {
        String name = parameterName(parameterAnnotations[i]);
        if (name != null) {
            if (args[i] != null && ZonedDateTime.class.isAssignableFrom(parameterTypes[i])) {
                ZonedDateTime dateTime = ZonedDateTime.class.cast(args[i]);
                final ZonedDateTime utc = dateTime.withZoneSameInstant(ZoneId.of("UTC"));
                Calendar c = Calendar.getInstance();
                c.setTimeZone(TimeZone.getTimeZone("UTC"));
                c.setTimeInMillis(utc.toInstant().toEpochMilli());
                ps.addValue(name, c, Types.TIMESTAMP);
            } else {
                ps.addValue(name, args[i], StatementCreatorUtils.javaTypeToSqlParameterType(parameterTypes[i]));
            }
        }
    }

    return ps;
}

From source file:org.spring.data.gemfire.app.dao.provider.JdbcUserDao.java

protected PreparedStatement setTimestamp(final PreparedStatement statement, final int parameterIndex,
        final Timestamp value) throws SQLException {
    if (value != null) {
        statement.setTimestamp(parameterIndex, value);
    } else {//from ww  w  .  java  2 s .c o m
        statement.setNull(parameterIndex, Types.TIMESTAMP);
    }

    return statement;
}

From source file:org.jumpmind.db.platform.mssql.MsSqlDdlReader.java

@Override
protected Column readColumn(DatabaseMetaDataWrapper metaData, Map<String, Object> values) throws SQLException {
    Column column = super.readColumn(metaData, values);
    String defaultValue = column.getDefaultValue();

    // Sql Server tends to surround the returned default value with one or
    // two sets of parentheses
    if (defaultValue != null) {
        while (defaultValue.startsWith("(") && defaultValue.endsWith(")")) {
            defaultValue = defaultValue.substring(1, defaultValue.length() - 1);
        }//from   w ww . j  a v  a2 s.c  om

        if (column.getMappedTypeCode() == Types.TIMESTAMP) {
            // Sql Server maintains the default values for DATE/TIME jdbc
            // types, so we have to
            // migrate the default value to TIMESTAMP
            Matcher matcher = isoDatePattern.matcher(defaultValue);
            Timestamp timestamp = null;

            if (matcher.matches()) {
                timestamp = new Timestamp(Date.valueOf(matcher.group(1)).getTime());
            } else {
                matcher = isoTimePattern.matcher(defaultValue);
                if (matcher.matches()) {
                    timestamp = new Timestamp(Time.valueOf(matcher.group(1)).getTime());
                }
            }
            if (timestamp != null) {
                defaultValue = timestamp.toString();
            }
        } else if (column.getMappedTypeCode() == Types.DECIMAL || column.getMappedTypeCode() == Types.BIGINT) {
            // For some reason, Sql Server 2005 always returns DECIMAL
            // default values with a dot
            // even if the scale is 0, so we remove the dot
            if ((column.getScale() == 0) && defaultValue.endsWith(".")) {
                defaultValue = defaultValue.substring(0, defaultValue.length() - 1);
            }
        } else if (TypeMap.isTextType(column.getMappedTypeCode())) {
            if (defaultValue.startsWith("N'") && defaultValue.endsWith("'")) {
                defaultValue = defaultValue.substring(2, defaultValue.length() - 1);
            }
            defaultValue = unescape(defaultValue, "'", "''");
        }

        column.setDefaultValue(defaultValue);
    }

    if ((column.getMappedTypeCode() == Types.DECIMAL) && (column.getSizeAsInt() == 19)
            && (column.getScale() == 0)) {
        column.setMappedTypeCode(Types.BIGINT);
    }

    // These columns return sizes and/or decimal places with the metat data from MSSql Server however
    // the values are not adjustable through the create table so they are omitted 
    if (column.getJdbcTypeName() != null && (column.getJdbcTypeName().equals("smallmoney")
            || column.getJdbcTypeName().equals("money") || column.getJdbcTypeName().equals("timestamp")
            || column.getJdbcTypeName().equals("uniqueidentifier") || column.getJdbcTypeName().equals("time")
            || column.getJdbcTypeName().equals("datetime2") || column.getJdbcTypeName().equals("date"))) {
        removePlatformSizeAndDecimal(column);
    }
    return column;
}

From source file:org.apache.sqoop.manager.ConnManager.java

/**
 * Resolve a database-specific type to Avro data type.
 * @param sqlType     sql type//w  w  w.  j  ava2s  .  c o  m
 * @return            avro type
 */
public Type toAvroType(int sqlType) {
    switch (sqlType) {
    case Types.TINYINT:
    case Types.SMALLINT:
    case Types.INTEGER:
        return Type.INT;
    case Types.BIGINT:
        return Type.LONG;
    case Types.BIT:
    case Types.BOOLEAN:
        return Type.BOOLEAN;
    case Types.REAL:
        return Type.FLOAT;
    case Types.FLOAT:
    case Types.DOUBLE:
        return Type.DOUBLE;
    case Types.NUMERIC:
    case Types.DECIMAL:
        return Type.STRING;
    case Types.CHAR:
    case Types.VARCHAR:
    case Types.LONGVARCHAR:
    case Types.LONGNVARCHAR:
    case Types.NVARCHAR:
    case Types.NCHAR:
        return Type.STRING;
    case Types.DATE:
    case Types.TIME:
    case Types.TIMESTAMP:
        return Type.STRING;
    case Types.BLOB:
    case Types.BINARY:
    case Types.VARBINARY:
    case Types.LONGVARBINARY:
        return Type.BYTES;
    default:
        throw new IllegalArgumentException("Cannot convert SQL type " + sqlType);
    }
}

From source file:ar.com.zauber.commons.spring.secrets.SQLSecretMap.java

/** @see ar.com.zauber.commons.secret.SecretsMap#cleanup() */
public final void cleanup() {
    final String sql = "DELETE FROM " + table + " WHERE date <?";
    template.update(sql, new Object[] { getExpirationDateValidator().getNowInvalid() },
            new int[] { Types.TIMESTAMP });
}