List of usage examples for java.sql Timestamp from
public static Timestamp from(Instant instant)
From source file:org.ulyssis.ipp.snapshot.Snapshot.java
public static void deleteAfter(Connection connection, Snapshot snapshot) throws SQLException { String statement = "DELETE FROM \"snapshots\" WHERE \"time\" > ? OR (\"time\" = ? AND \"event\" > ?)"; try (PreparedStatement stmt = connection.prepareStatement(statement)) { Timestamp timestamp = Timestamp.from(snapshot.getSnapshotTime()); stmt.setTimestamp(1, timestamp); stmt.setTimestamp(2, timestamp); stmt.setLong(3, snapshot.getEventId().orElse(-1L)); LOG.debug("Executing query: {}", stmt); int affectedRows = stmt.executeUpdate(); LOG.debug("deleteAfter affected {} rows", affectedRows); }//from w ww . j a v a2s. c o m }
From source file:org.ulyssis.ipp.snapshot.Snapshot.java
public void save(Connection connection) throws SQLException { if (id != -1) return;//w w w. ja va 2 s . co m try (PreparedStatement statement = connection.prepareStatement( "INSERT INTO \"snapshots\" (\"time\",\"data\",\"event\") VALUES (?,?,?)", Statement.RETURN_GENERATED_KEYS)) { statement.setTimestamp(1, Timestamp.from(snapshotTime)); String serialized; try { serialized = Serialization.getJsonMapper().writeValueAsString(this); } catch (JsonProcessingException e) { assert false; // TODO(Roel): Programming error return; } statement.setString(2, serialized); statement.setLong(3, eventId); statement.executeUpdate(); ResultSet keys = statement.getGeneratedKeys(); keys.next(); this.id = keys.getLong(1); } }
From source file:org.ulyssis.ipp.snapshot.Event.java
public void save(Connection connection) throws SQLException { if (id != -1) return;//from w w w. jav a 2 s . com try (PreparedStatement statement = connection.prepareStatement( "INSERT INTO \"events\" (\"time\",\"type\",\"data\",\"removed\") " + "VALUES (?,?,?,?)", Statement.RETURN_GENERATED_KEYS)) { statement.setTimestamp(1, Timestamp.from(time)); String serialized; try { serialized = Serialization.getJsonMapper().writeValueAsString(this); } catch (JsonProcessingException e) { assert false; throw new IllegalStateException(e); // TODO(Roel): is this appropriate? } statement.setString(2, this.getClass().getSimpleName()); statement.setString(3, serialized); statement.setBoolean(4, removed); statement.executeUpdate(); ResultSet keys = statement.getGeneratedKeys(); keys.next(); this.id = keys.getLong(1); } }
From source file:com.streamsets.pipeline.lib.jdbc.multithread.TableContextUtil.java
public static String generateNextPartitionOffset(TableContext tableContext, String column, String offset) { final String partitionSize = tableContext.getOffsetColumnToPartitionOffsetAdjustments().get(column); switch (tableContext.getOffsetColumnToType().get(column)) { case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: final int int1 = Integer.parseInt(offset); final int int2 = Integer.parseInt(partitionSize); return String.valueOf(int1 + int2); case Types.TIMESTAMP: final Timestamp timestamp1 = getTimestampForOffsetValue(offset); final long timestampAdj = Long.parseLong(partitionSize); final Timestamp timestamp2 = Timestamp.from(timestamp1.toInstant().plusMillis(timestampAdj)); return getOffsetValueForTimestamp(timestamp2); case Types.BIGINT: // TIME, DATE are represented as long (epoch) case Types.TIME: case Types.DATE: final long long1 = Long.parseLong(offset); final long long2 = Long.parseLong(partitionSize); return String.valueOf(long1 + long2); case Types.FLOAT: case Types.REAL: final float float1 = Float.parseFloat(offset); final float float2 = Float.parseFloat(partitionSize); return String.valueOf(float1 + float2); case Types.DOUBLE: final double double1 = Double.parseDouble(offset); final double double2 = Double.parseDouble(partitionSize); return String.valueOf(double1 + double2); case Types.NUMERIC: case Types.DECIMAL: final BigDecimal decimal1 = new BigDecimal(offset); final BigDecimal decimal2 = new BigDecimal(partitionSize); return decimal1.add(decimal2).toString(); }/*from ww w .java 2 s. c o m*/ return null; }
From source file:com.caricah.iotracah.datastore.ignitecache.IgniteDatastore.java
@Override public Collection<Session> getActiveSessions() { try {//from w w w. j a va 2 s . c om String query = "isActive = ? AND expiryTimestamp < ? LIMIT ?"; Object[] params = { true, Timestamp.from(Instant.now()), 100 }; Observable<IOTClient> clientObservable = clientHandler.getByQuery(IOTClient.class, query, params); Set<Session> activeSessions = new HashSet<>(); clientObservable.toBlocking().forEach(activeSessions::add); if (!activeSessions.isEmpty()) { log.debug(" getActiveSessions : found {} expired sessions", activeSessions.size()); } return activeSessions; } catch (Exception e) { log.error(" getActiveSessions : problems with active sessions collector ", e); return Collections.emptySet(); } }
From source file:com.ikanow.aleph2.management_db.services.DataBucketCrudService.java
/** Internal function to delete the bucket, while notifying active users of the bucket * @param to_delete/*from w w w . ja va 2 s .co m*/ * @return a management future containing the result */ private ManagementFuture<Boolean> deleteBucket(final DataBucketBean to_delete) { try { // Also delete the file paths (currently, just add ".deleted" to top level path) deleteFilePath(to_delete, _storage_service.get()); //delete the logging path as well if it exists (it's okay if it fails, should mean it doesn't exist) try { deleteFilePath(BucketUtils.convertDataBucketBeanToLogging(to_delete), _storage_service.get()); } catch (Exception ex) { } // Add to the deletion queue (do it before trying to delete the bucket in case this bucket deletion fails - if so then delete queue will retry every hour) final Date to_delete_date = Timestamp.from(Instant.now().plus(1L, ChronoUnit.MINUTES)); final CompletableFuture<Supplier<Object>> enqueue_delete = this._bucket_deletion_queue.get() .storeObject(new BucketDeletionMessage(to_delete, to_delete_date, false)); final CompletableFuture<Boolean> delete_reply = enqueue_delete .thenCompose(__ -> _underlying_data_bucket_db.get().deleteObjectById(to_delete._id())); return FutureUtils.denestManagementFuture(delete_reply.thenCompose(del_reply -> { if (!del_reply) { // Didn't find an object to delete, just return that information to the user return CompletableFuture.completedFuture(Optional.empty()); } else { //Get the status and delete it final CompletableFuture<Optional<DataBucketStatusBean>> future_status_bean = _underlying_data_bucket_status_db .get().updateAndReturnObjectBySpec( CrudUtils.allOf(DataBucketStatusBean.class).when(DataBucketStatusBean::_id, to_delete._id()), Optional.empty(), CrudUtils.update(DataBucketStatusBean.class).deleteObject(), Optional.of(true), Collections.emptyList(), false); return future_status_bean; } }).thenApply(status_bean -> { if (!status_bean.isPresent()) { return FutureUtils.createManagementFuture(delete_reply); } else { final BucketActionMessage.DeleteBucketActionMessage delete_message = new BucketActionMessage.DeleteBucketActionMessage( to_delete, new HashSet<String>(Optional .ofNullable(status_bean.isPresent() ? status_bean.get().node_affinity() : null) .orElse(Collections.emptyList()))); final CompletableFuture<Collection<BasicMessageBean>> management_results = MgmtCrudUtils .applyRetriableManagementOperation(to_delete, _actor_context, _bucket_action_retry_store.get(), delete_message, source -> { return new BucketActionMessage.DeleteBucketActionMessage( delete_message.bucket(), new HashSet<String>(Arrays.asList(source))); }); // Convert BucketActionCollectedRepliesMessage into a management side-channel: return FutureUtils.createManagementFuture(delete_reply, management_results); } })); } catch (Exception e) { // This is a serious enough exception that we'll just leave here return FutureUtils.createManagementFuture(FutureUtils.returnError(e)); } }
From source file:org.apache.nifi.processors.hive.TestPutHive3Streaming.java
@Test public void testDataTypeConversions() throws Exception { final String avroSchema = IOUtils.toString(new FileInputStream("src/test/resources/datatype_test.avsc"), StandardCharsets.UTF_8); schema = new Schema.Parser().parse(avroSchema); processor.setFields(Arrays.asList(new FieldSchema("uuid", serdeConstants.STRING_TYPE_NAME, "uuid"), new FieldSchema("stringc", serdeConstants.STRING_TYPE_NAME, "stringc"), new FieldSchema("charc", serdeConstants.CHAR_TYPE_NAME + "(1)", "charc"), new FieldSchema("varcharc", serdeConstants.VARCHAR_TYPE_NAME + "(100)", "varcharc"), new FieldSchema("intc", serdeConstants.INT_TYPE_NAME, "intc"), new FieldSchema("tinyintc", serdeConstants.TINYINT_TYPE_NAME, "tinyintc"), new FieldSchema("smallintc", serdeConstants.SMALLINT_TYPE_NAME, "smallintc"), new FieldSchema("bigintc", serdeConstants.BIGINT_TYPE_NAME, "bigintc"), new FieldSchema("booleanc", serdeConstants.BOOLEAN_TYPE_NAME, "booleanc"), new FieldSchema("floatc", serdeConstants.FLOAT_TYPE_NAME, "floatc"), new FieldSchema("doublec", serdeConstants.DOUBLE_TYPE_NAME, "doublec"), new FieldSchema("bytesc", serdeConstants.BINARY_TYPE_NAME, "bytesc"), new FieldSchema("listc", serdeConstants.LIST_TYPE_NAME + "<" + serdeConstants.STRING_TYPE_NAME + ">", "listc"), new FieldSchema("structc", serdeConstants.STRUCT_TYPE_NAME + "<sint:" + serdeConstants.INT_TYPE_NAME + "," + "sboolean:" + serdeConstants.BOOLEAN_TYPE_NAME + "," + "sstring:" + serdeConstants.STRING_TYPE_NAME + ">", "structc"), new FieldSchema("mapc", serdeConstants.MAP_TYPE_NAME + "<" + serdeConstants.STRING_TYPE_NAME + "," + serdeConstants.INT_TYPE_NAME + ">", "mapc"), new FieldSchema("datec", serdeConstants.DATE_TYPE_NAME, "datec"), new FieldSchema("timestampc", serdeConstants.TIMESTAMP_TYPE_NAME, "timestampc"), new FieldSchema("decimalc", serdeConstants.DECIMAL_TYPE_NAME + "(4,2)", "decimalc"), new FieldSchema("enumc", serdeConstants.STRING_TYPE_NAME, "enumc"))); runner = TestRunners.newTestRunner(processor); runner.setProperty(PutHive3Streaming.HIVE_CONFIGURATION_RESOURCES, TEST_CONF_PATH); MockRecordParser readerFactory = new MockRecordParser(); final RecordSchema recordSchema = AvroTypeUtil.createSchema(schema); for (final RecordField recordField : recordSchema.getFields()) { readerFactory.addSchemaField(recordField.getFieldName(), recordField.getDataType().getFieldType(), recordField.isNullable()); }//from w w w . j a v a 2 s.co m List<String> enumc = Arrays.asList("SPADES", "HEARTS", "DIAMONDS", "CLUBS"); Random r = new Random(); for (int index = 0; index < 10; index++) { final int i = index; Record structRecord = new MapRecord( AvroTypeUtil.createSchema(schema.getField("structc").schema().getTypes().get(1)), // Get non-null type in union new HashMap<String, Object>() { { put("sint", i + 2); // {"name": "sint", "type": "int"}, if (i % 3 == 2) { put("sboolean", null); } else { put("sboolean", i % 3 == 1); // {"name": "sboolean", "type": ["null","boolean"]}, } put("sstring", "world"); // {"name": "sstring", "type": "string"} } }); readerFactory.addRecord(UUID.randomUUID(), // {"name": "uuid", "type": "string"}, "hello", // {"name": "stringc", "type": "string"}, 'a', "world", i, // {"name": "intc", "type": "int"}, i + 1, // {"name": "tinyintc", "type": ["null", "int"]}, i * 10, // {"name": "smallintc", "type": "int"}, i * Integer.MAX_VALUE, // {"name": "bigintc", "type": "long"}, i % 2 == 0, // {"name": "booleanc", "type": "boolean"}, i * 100.0f, // {"name": "floatc", "type": "floatc"}, i * 100.0, // {"name": "doublec", "type": "double"}, "Hello".getBytes(), new String[] { "a", "b" }, // {"name": "listc", "type": ["null", {"type": "array", "items": "string"}]}, structRecord, new HashMap<String, Integer>() { //"name": "mapType", "type": "map", "values": "string"} { put("sint1", i + 2); // {"name": "sint", "type": "int"}, put("sint2", i); // {"name": "x", "type": "int"}, } }, new java.sql.Date(Calendar.getInstance().getTimeInMillis()), Timestamp.from(Instant.now()), i * 99.0 / 100, enumc.get(r.nextInt(4)) // {"name": "enumc", "type": {"type": "enum", "name": "Suit", "symbols": ["SPADES","HEARTS","DIAMONDS","CLUBS"]}} ); } runner.addControllerService("mock-reader-factory", readerFactory); runner.enableControllerService(readerFactory); runner.setProperty(PutHive3Streaming.RECORD_READER, "mock-reader-factory"); runner.setProperty(PutHive3Streaming.METASTORE_URI, "thrift://localhost:9083"); runner.setProperty(PutHive3Streaming.DB_NAME, "default"); runner.setProperty(PutHive3Streaming.TABLE_NAME, "users"); runner.enqueue(new byte[0]); runner.run(); runner.assertTransferCount(PutHive3Streaming.REL_SUCCESS, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutHive3Streaming.REL_SUCCESS).get(0); assertEquals("10", flowFile.getAttribute(HIVE_STREAMING_RECORD_COUNT_ATTR)); assertEquals("default.users", flowFile.getAttribute(ATTR_OUTPUT_TABLES)); }
From source file:org.openmailarchive.Entities.Mail.java
public boolean insert(Connection conn) throws SQLException { conn.setAutoCommit(false);//from ww w .j a v a 2s. com String insertMail = "INSERT INTO mail(mailid, filepath, mailfrom, dt, subject, body, bodyType, spamScore) VALUES(?, ?, ?, ?, ?, ?, ?, ?)"; String insertRecipient = "INSERT INTO `recipient`(`mailid`, `recipientType`, `recipient`) VALUES(?, ?, ?)"; String insertAttachment = "INSERT INTO `attachment`(`mailid`, `mimeType`, `filename`) VALUES(?, ?, ?)"; try { PreparedStatement stmtMail = conn.prepareStatement(insertMail); stmtMail.setString(1, mailid); stmtMail.setString(2, filepath); stmtMail.setString(3, mailfrom); if (dt == null) dt = Timestamp.from(Instant.now()); stmtMail.setTimestamp(4, dt); stmtMail.setString(5, subject); stmtMail.setString(6, body); stmtMail.setInt(7, bodyType); stmtMail.setDouble(8, spamScore); stmtMail.executeUpdate(); PreparedStatement stmtRecip = conn.prepareStatement(insertRecipient); for (Recipient r : recipients) { stmtRecip.setString(1, mailid); stmtRecip.setString(2, r.getType()); stmtRecip.setString(3, r.getAddress()); stmtRecip.executeUpdate(); } PreparedStatement stmtAttach = conn.prepareStatement(insertAttachment); for (Attachment a : attachments) { stmtAttach.setString(1, mailid); stmtAttach.setString(2, a.getType()); stmtAttach.setString(3, a.getFilename()); stmtAttach.executeUpdate(); } conn.commit(); } catch (SQLException e) { e.printStackTrace(); conn.rollback(); return false; } return true; }
From source file:org.unitedinternet.cosmo.model.hibernate.HibPasswordRecovery.java
/** * //from w ww .j av a 2s. c om */ public HibPasswordRecovery(User user, String key, long timeout) { this.user = user; this.key = key; this.timeout = timeout; this.created = Timestamp.from(Instant.now().truncatedTo(ChronoUnit.SECONDS)); }
From source file:org.wso2.carbon.apimgt.core.dao.impl.AnalyticsDAOImpl.java
/** * @see AnalyticsDAO#getApplicationCount(Instant, Instant, String) *//*from w w w . j av a 2s . co m*/ @Override @SuppressFBWarnings("SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING") public List<ApplicationCount> getApplicationCount(Instant fromTimestamp, Instant toTimestamp, String createdBy) throws APIMgtDAOException { final String query; if (StringUtils.isNotEmpty(createdBy)) { query = "SELECT COUNT(UUID) AS count, CREATED_TIME AS time " + "FROM AM_APPLICATION " + "WHERE (CREATED_TIME BETWEEN ? AND ?) " + "AND CREATED_BY = ?" + "GROUP BY CREATED_TIME " + "ORDER BY CREATED_TIME ASC"; } else { query = "SELECT COUNT(UUID) AS count, CREATED_TIME AS time " + "FROM AM_APPLICATION " + "WHERE (CREATED_TIME BETWEEN ? AND ?) " + "GROUP BY CREATED_TIME " + "ORDER BY CREATED_TIME ASC"; } List<ApplicationCount> applicationCountList = new ArrayList<>(); try (Connection connection = DAOUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(query)) { statement.setTimestamp(1, Timestamp.from(fromTimestamp)); statement.setTimestamp(2, Timestamp.from(toTimestamp)); if (StringUtils.isNotEmpty(createdBy)) { statement.setString(3, createdBy); } log.debug("Executing query: {} ", query); statement.execute(); try (ResultSet rs = statement.getResultSet()) { long count = 0; while (rs.next()) { ApplicationCount applicationCount = new ApplicationCount(); count += rs.getLong("count"); applicationCount.setTimestamp(rs.getTimestamp("time").getTime()); applicationCount.setCount(count); applicationCountList.add(applicationCount); } } } catch (SQLException e) { String errorMsg = "Error while creating database connection/prepared-statement"; throw new APIMgtDAOException(errorMsg, e); } return applicationCountList; }