List of usage examples for java.sql Time getTime
public long getTime()
From source file:com.continuent.tungsten.common.mysql.MySQLPacket.java
/** * Puts a jdbc {@link java.sql.Time} in the buffer as:<br> * Send sign (0 for > EPOCH, 1 for < EPOCH)<br> * Then day (always zero for a time)<br> * Finally Hour Minutes and Seconds using * {@link MySQLPacket#putHourMinSec(long)}<br> * Note that MySQL jdbc driver doesn't care about days and millis, so it is * most probably unnecessary to send them... * /*w w w .ja v a2 s. co m*/ * @param t the Time object to write */ public void putTime(Time t) { // guess sign and normalize millis long millis = t.getTime(); if (millis < 0) { putByte((byte) 1); millis = -millis; } else putByte((byte) 0); // Send a fake day putInt32(0); // Send the 3 bytes HHMMSS putHourMinSec(millis); // Don't send millis }
From source file:com.alibaba.wasp.jdbc.TestJdbcResultSet.java
@Test public void testDatetime() throws SQLException { trace("test DATETIME"); ResultSet rs;/*w ww .j a va 2s. c o m*/ Object o; // rs = stat.executeQuery("call date '99999-12-23'"); // rs.next(); // assertEquals("99999-12-23", rs.getString(1)); // rs = stat.executeQuery("call timestamp '99999-12-23 01:02:03.000'"); // rs.next(); // assertEquals("99999-12-23 01:02:03.0", rs.getString(1)); // rs = stat.executeQuery("call date '-99999-12-23'"); // rs.next(); // assertEquals("-99999-12-23", rs.getString(1)); // rs = stat.executeQuery("call timestamp '-99999-12-23 01:02:03.000'"); // rs.next(); // assertEquals("-99999-12-23 01:02:03.0", rs.getString(1)); stat = conn.createStatement(); // stat.execute("CREATE TABLE test(ID INT PRIMARY KEY,VALUE DATETIME)"); stat.execute( "INSERT INTO test (column1,column6,column2,column3) VALUES (1,'2011-11-11 0:0:0', 13, 'testDatetime')"); stat.execute( "INSERT INTO test (column1,column6,column2,column3) VALUES (2,'2002-02-02 02:02:02', 13, 'testDatetime')"); stat.execute( "INSERT INTO test (column1,column6,column2,column3) VALUES (3,'1800-01-01 0:0:0', 13, 'testDatetime')"); stat.execute( "INSERT INTO test (column1,column6,column2,column3) VALUES (4,'9999-12-31 23:59:59', 13, 'testDatetime')"); stat.execute( "INSERT INTO test (column1,column6,column2,column3) VALUES (5,'9999-12-31 23:59:59', 13, 'testDatetime')"); // stat.execute("INSERT INTO test (column1,column6,column2,column3) VALUES(5,NULL)"); rs = stat.executeQuery("SELECT column1,column6 FROM test where column3='testDatetime' ORDER BY column1"); // assertResultSetMeta(rs, 2, new String[] { "ID", "VALUE" }, new int[] { // Types.INTEGER, Types.TIMESTAMP }, new int[] { 10, 23 }, new int[] { 0, // 10 }); // rs = stat.executeQuery("SELECT * FROM test ORDER BY ID"); // assertResultSetMeta(rs, 2, new String[] { "ID", "VALUE" }, new int[] { // Types.INTEGER, Types.TIMESTAMP }, new int[] { 10, 23 }, new int[] { 0, // 10 }); rs.next(); java.sql.Date date; java.sql.Time time; Timestamp ts; date = rs.getDate(2); assertTrue(!rs.wasNull()); time = rs.getTime(2); assertTrue(!rs.wasNull()); ts = rs.getTimestamp(2); assertTrue(!rs.wasNull()); trace("Date: " + date.toString() + " Time:" + time.toString() + " Timestamp:" + ts.toString()); trace("Date ms: " + date.getTime() + " Time ms:" + time.getTime() + " Timestamp ms:" + ts.getTime()); trace("1970 ms: " + Timestamp.valueOf("1970-01-01 00:00:00.0").getTime()); assertEquals(Timestamp.valueOf("2011-11-11 00:00:00.0").getTime(), date.getTime()); assertEquals(Timestamp.valueOf("1970-01-01 00:00:00.0").getTime(), time.getTime()); assertEquals(Timestamp.valueOf("2011-11-11 00:00:00.0").getTime(), ts.getTime()); assertTrue(date.equals(java.sql.Date.valueOf("2011-11-11"))); assertTrue(time.equals(java.sql.Time.valueOf("00:00:00"))); assertTrue(ts.equals(Timestamp.valueOf("2011-11-11 00:00:00.0"))); assertFalse(rs.wasNull()); o = rs.getObject(2); trace(o.getClass().getName()); assertTrue(o instanceof Timestamp); assertTrue(((Timestamp) o).equals(Timestamp.valueOf("2011-11-11 00:00:00"))); assertFalse(rs.wasNull()); rs.next(); date = rs.getDate("COLUMN6"); assertTrue(!rs.wasNull()); time = rs.getTime("COLUMN6"); assertTrue(!rs.wasNull()); ts = rs.getTimestamp("COLUMN6"); assertTrue(!rs.wasNull()); trace("Date: " + date.toString() + " Time:" + time.toString() + " Timestamp:" + ts.toString()); assertEquals("2002-02-02", date.toString()); assertEquals("02:02:02", time.toString()); assertEquals("2002-02-02 02:02:02.0", ts.toString()); rs.next(); assertEquals("1800-01-01", rs.getDate("column6").toString()); assertEquals("00:00:00", rs.getTime("column6").toString()); assertEquals("1800-01-01 00:00:00.0", rs.getTimestamp("column6").toString()); rs.next(); assertEquals("9999-12-31", rs.getDate("Column6").toString()); assertEquals("23:59:59", rs.getTime("Column6").toString()); assertEquals("9999-12-31 23:59:59.0", rs.getTimestamp("Column6").toString()); // assertTrue(!rs.next()); }
From source file:org.apache.sqoop.mapreduce.hcat.SqoopHCatImportHelper.java
private Object converDateTypes(Object val, HCatFieldSchema hfs) { HCatFieldSchema.Type hfsType = hfs.getType(); Date d;/*from w ww . j a v a 2 s .c o m*/ Time t; Timestamp ts; if (val instanceof java.sql.Date) { d = (Date) val; if (hfsType == HCatFieldSchema.Type.DATE) { return d; } else if (hfsType == HCatFieldSchema.Type.TIMESTAMP) { return new Timestamp(d.getTime()); } else if (hfsType == HCatFieldSchema.Type.BIGINT) { return (d.getTime()); } else if (hfsType == HCatFieldSchema.Type.STRING) { return val.toString(); } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hChar = new HiveChar(val.toString(), cti.getLength()); return hChar; } } else if (val instanceof java.sql.Time) { t = (Time) val; if (hfsType == HCatFieldSchema.Type.DATE) { return new Date(t.getTime()); } else if (hfsType == HCatFieldSchema.Type.TIMESTAMP) { return new Timestamp(t.getTime()); } else if (hfsType == HCatFieldSchema.Type.BIGINT) { return ((Time) val).getTime(); } else if (hfsType == HCatFieldSchema.Type.STRING) { return val.toString(); } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hChar = new HiveChar(val.toString(), cti.getLength()); return hChar; } } else if (val instanceof java.sql.Timestamp) { ts = (Timestamp) val; if (hfsType == HCatFieldSchema.Type.DATE) { return new Date(ts.getTime()); } else if (hfsType == HCatFieldSchema.Type.TIMESTAMP) { return ts; } else if (hfsType == HCatFieldSchema.Type.BIGINT) { return ts.getTime(); } else if (hfsType == HCatFieldSchema.Type.STRING) { return val.toString(); } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hc = new HiveChar(val.toString(), cti.getLength()); return hc; } } return null; }
From source file:kx.c.java
void w(Time t) { long j = t.getTime(); w(j == nj ? ni : (int) (lg(j) % 86400000)); }
From source file:fll.db.Queries.java
/** * Convert {@link java.sql.Time} to {@link java.util.Date}. *//*w w w.j ava2 s . c o m*/ public static Date timeToDate(final Time t) { if (null == t) { return null; } else { return new Date(t.getTime()); } }
From source file:dao.DatasetRowMapper.java
@Override public Dataset mapRow(ResultSet rs, int rowNum) throws SQLException { int id = rs.getInt(DATASET_ID_COLUMN); String name = rs.getString(DATASET_NAME_COLUMN); String urn = rs.getString(DATASET_URN_COLUMN); String source = rs.getString(DATASET_SOURCE_COLUMN); String strOwner = rs.getString(DATASET_OWNER_ID_COLUMN); String strOwnerName = rs.getString(DATASET_OWNER_NAME_COLUMN); String schema = rs.getString(DATASET_SCHEMA_COLUMN); Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN); Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN); Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN); Long sourceModifiedTime = rs.getLong(DATASET_SOURCE_MODIFIED_TIME_COLUMN); Dataset dataset = new Dataset(); dataset.id = id;//from w w w. java2s . c o m dataset.name = name; dataset.urn = urn; dataset.schema = schema; String[] owners = null; if (StringUtils.isNotBlank(strOwner)) { owners = strOwner.split(","); } String[] ownerNames = null; if (StringUtils.isNotBlank(strOwnerName)) { ownerNames = strOwnerName.split(","); } dataset.owners = new ArrayList<User>(); if (owners != null && ownerNames != null) { if (owners.length == ownerNames.length) { for (int i = 0; i < owners.length; i++) { User user = new User(); user.userName = owners[i]; if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*")) { user.name = owners[i]; } else { user.name = ownerNames[i]; dataset.owners.add(user); } } } else { Logger.error("DatasetWithUserRowMapper get wrong owner and names. Dataset ID: " + Long.toString(dataset.id) + " Owner: " + owners + " Owner names: " + ownerNames); } } if (StringUtils.isNotBlank(dataset.urn)) { if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX)) { dataset.hdfsBrowserLink = Play.application().configuration() .getString(DatasetsDAO.HDFS_BROWSER_URL_KEY) + dataset.urn.substring(HDFS_URN_PREFIX_LEN); } } dataset.source = source; if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0) { dataset.modified = new java.util.Date(modified.getTime()); dataset.formatedModified = dataset.modified.toString(); } if (created != null) { dataset.created = new java.util.Date(created.getTime()); } else if (modified != null) { dataset.created = new java.util.Date(modified.getTime()); } if (schemaHistoryId != null && schemaHistoryId > 0) { dataset.hasSchemaHistory = true; } else { dataset.hasSchemaHistory = false; } return dataset; }
From source file:org.wso2.carbon.dataservices.core.description.query.SQLQuery.java
private String convertToTimeString(Time sqlTime) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(sqlTime.getTime()); return new org.apache.axis2.databinding.types.Time(cal).toString(); }
From source file:dao.DatasetWithUserRowMapper.java
@Override public Dataset mapRow(ResultSet rs, int rowNum) throws SQLException { int id = rs.getInt(DATASET_ID_COLUMN); String name = rs.getString(DATASET_NAME_COLUMN); String urn = rs.getString(DATASET_URN_COLUMN); String source = rs.getString(DATASET_SOURCE_COLUMN); String schema = rs.getString(DATASET_SCHEMA_COLUMN); Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN); Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN); Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN); Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN); Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN); Long sourceModifiedTime = rs.getLong(DATASET_SOURCE_MODIFIED_TIME_COLUMN); String strOwner = rs.getString(DATASET_OWNER_ID_COLUMN); String strOwnerName = rs.getString(DATASET_OWNER_NAME_COLUMN); Dataset dataset = new Dataset(); dataset.id = id;// w w w.j a va2 s.co m dataset.name = name; dataset.urn = urn; dataset.schema = schema; String[] owners = null; if (StringUtils.isNotBlank(strOwner)) { owners = strOwner.split(","); } String[] ownerNames = null; if (StringUtils.isNotBlank(strOwnerName)) { ownerNames = strOwnerName.split(","); } dataset.owners = new ArrayList<User>(); if (owners != null && ownerNames != null) { if (owners.length == ownerNames.length) { for (int i = 0; i < owners.length; i++) { User user = new User(); user.userName = owners[i]; if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*")) { user.name = owners[i]; } else { user.name = ownerNames[i]; dataset.owners.add(user); } } } else { Logger.error("DatasetWithUserRowMapper get wrong owner and names. Dataset ID: " + Long.toString(dataset.id) + " Owner: " + owners + " Owner names: " + ownerNames); } } if (StringUtils.isNotBlank(dataset.urn)) { if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX)) { dataset.hdfsBrowserLink = Play.application().configuration() .getString(DatasetsDAO.HDFS_BROWSER_URL_KEY) + dataset.urn.substring(DatasetRowMapper.HDFS_URN_PREFIX_LEN); } } dataset.source = source; if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0) { dataset.modified = new java.util.Date(modified.getTime()); dataset.formatedModified = dataset.modified.toString(); } if (created != null) { dataset.created = new java.util.Date(created.getTime()); } else if (modified != null) { dataset.created = new java.util.Date(modified.getTime()); } if (favoriteId != null && favoriteId > 0) { dataset.isFavorite = true; } else { dataset.isFavorite = false; } if (watchId != null && watchId > 0) { dataset.watchId = watchId; dataset.isWatched = true; } else { dataset.watchId = 0L; dataset.isWatched = false; } if (schemaHistoryId != null && schemaHistoryId > 0) { dataset.hasSchemaHistory = true; } else { dataset.hasSchemaHistory = false; } return dataset; }