Example usage for java.sql ResultSet getTimestamp

List of usage examples for java.sql ResultSet getTimestamp

Introduction

In this page you can find the example usage for java.sql ResultSet getTimestamp.

Prototype

java.sql.Timestamp getTimestamp(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a java.sql.Timestamp object in the Java programming language.

Usage

From source file:com.webpagebytes.wpbsample.database.WPBDatabase.java

public List<Transaction> getTransactionsForUser(int user_id, Date date, int page, int pageSize)
        throws SQLException {
    Connection connection = getConnection();
    List<Transaction> result = new ArrayList<Transaction>();
    PreparedStatement statement = null;
    try {//  w ww . ja  v  a2s  .  c  om
        statement = connection.prepareStatement(GET_ALL_TRANSACTIONS_FOR_USER_STATEMENT);
        statement.setTimestamp(1, new Timestamp(date.getTime()));
        statement.setInt(2, user_id);
        statement.setInt(3, user_id);
        if (page <= 0)
            page = 1;
        statement.setInt(4, (page - 1) * (pageSize - 1)); // the offset
        statement.setInt(5, pageSize); // how many records
        ResultSet rs = statement.executeQuery();
        while (rs.next()) {
            Transaction transaction = new Transaction();
            transaction.setId(rs.getLong(1));
            transaction.setDate(rs.getTimestamp(2));
            transaction.setSource_user_id(rs.getInt(3));
            transaction.setDestination_user_id(rs.getInt(4));
            transaction.setAmount(rs.getLong(5));
            transaction.setSourceUserName(rs.getString(6));
            transaction.setDestinationUserName(rs.getString(7));
            result.add(transaction);
        }
    } catch (SQLException e) {
        throw e;
    } finally {
        if (statement != null) {
            statement.close();
        }
        if (connection != null) {
            connection.close();
        }
    }
    return result;
}

From source file:dk.netarkivet.harvester.datamodel.RunningJobsInfoDBDAO.java

/**
 * Retrieve a frontier report from a job id and a given filter class.
 * @param jobId the job id//  w ww .j a v a 2  s. c  o  m
 * @param filterId the id of the filter that produced the report
 * @return a frontier report
 */
public InMemoryFrontierReport getFrontierReport(long jobId, String filterId) {

    ArgumentNotValid.checkNotNull(jobId, "jobId");
    ArgumentNotValid.checkNotNull(filterId, "filterId");

    InMemoryFrontierReport report = new InMemoryFrontierReport(Long.toString(jobId));

    Connection c = HarvestDBConnection.get();
    PreparedStatement stm = null;
    try {
        stm = c.prepareStatement("SELECT " + FR_COLUMN.getColumnsInOrder() + " FROM frontierReportMonitor"
                + " WHERE jobId=? AND filterId=?");
        stm.setLong(1, jobId);
        stm.setString(2, filterId);

        ResultSet rs = stm.executeQuery();

        // Process first line to get report timestamp
        if (rs.next()) {
            report.setTimestamp(rs.getTimestamp(FR_COLUMN.tstamp.rank()).getTime());
            report.addLine(getLine(rs));

            while (rs.next()) {
                report.addLine(getLine(rs));
            }
        }

    } catch (SQLException e) {
        String message = "SQL error fetching report for job ID " + jobId + " and filterId " + filterId + "\n"
                + ExceptionUtils.getSQLExceptionCause(e);
        log.warn(message, e);
    } finally {
        DBUtils.closeStatementIfOpen(stm);
        HarvestDBConnection.release(c);
    }

    return report;
}

From source file:edu.wisc.jmeter.dao.JdbcMonitorDao.java

@Override
public HostStatus getHostStatus(final String hostName) {
    final Object lock = this.getHostLock(hostName);

    synchronized (lock) {
        HostStatus hostStatus = this.hostStatusCache.get(hostName);
        if (hostStatus != null) {
            return hostStatus;
        }//from   w  w w  . j a va2  s . c  o m

        final Map<String, Object> params = new LinkedHashMap<String, Object>();
        params.put("hostName", hostName);

        try {
            hostStatus = this.transactionTemplate.execute(new TransactionCallback<HostStatus>() {
                @Override
                public HostStatus doInTransaction(TransactionStatus transactionStatus) {
                    final List<HostStatus> results = jdbcTemplate.query(
                            "SELECT STATUS, FAILURE_COUNT, MESSAGE_COUNT, LAST_NOTIFICATION, LAST_UPDATED "
                                    + "FROM MONITOR_HOST_STATUS " + "WHERE HOST_NAME = :hostName",
                            params, new RowMapper<HostStatus>() {
                                @Override
                                public HostStatus mapRow(ResultSet rs, int row) throws SQLException {
                                    final HostStatus hostStatus = new HostStatus();

                                    hostStatus.setHost(hostName);
                                    hostStatus.setStatus(Status.valueOf(rs.getString("STATUS")));
                                    hostStatus.setFailureCount(rs.getInt("FAILURE_COUNT"));
                                    hostStatus.setMessageCount(rs.getInt("MESSAGE_COUNT"));
                                    hostStatus.setLastMessageSent(rs.getTimestamp("LAST_NOTIFICATION"));
                                    hostStatus.setLastUpdated(rs.getTimestamp("LAST_UPDATED"));

                                    return hostStatus;
                                }
                            });

                    HostStatus hostStatus = DataAccessUtils.singleResult(results);
                    if (hostStatus != null) {
                        return hostStatus;
                    }

                    hostStatus = new HostStatus();
                    hostStatus.setHost(hostName);
                    hostStatus.setLastUpdated(new Date());

                    params.put("status", hostStatus.getStatus().toString());
                    params.put("failureCount", hostStatus.getFailureCount());
                    params.put("messageCount", hostStatus.getMessageCount());
                    params.put("lastNotification", hostStatus.getLastMessageSent());
                    params.put("lastUpdated", hostStatus.getLastUpdated());

                    jdbcTemplate.update(
                            "INSERT INTO MONITOR_HOST_STATUS (HOST_NAME, STATUS, FAILURE_COUNT, MESSAGE_COUNT, LAST_NOTIFICATION, LAST_UPDATED) "
                                    + "VALUES (:hostName, :status, :failureCount, :messageCount, :lastNotification, :lastUpdated)",
                            params);

                    return hostStatus;
                }
            });
        } catch (RuntimeException re) {
            //Want things to still work if the database is broken so create an empty HostStatus to work with in memory only
            if (hostStatus == null) {
                hostStatus = new HostStatus();
                hostStatus.setHost(hostName);
                hostStatus.setLastUpdated(new Date());
            }

            log.warn("Failed to retrieve/create HostStatus via database, using memory storage only", re);
        }

        this.hostStatusCache.put(hostName, hostStatus);
        return hostStatus;
    }
}

From source file:net.duckling.falcon.api.orm.DAOUtils.java

private void setValueToObj(Field field, Object obj, ResultSet rs) {
    String setName = getSetMethodName(field.getName());
    String exceptionMsg = "???";
    try {/*from  w w w.  java2 s. c  om*/
        Method method = objClass.getMethod(setName, field.getType());
        if (isString(field)) {
            method.invoke(obj, new Object[] { rs.getString(getDBField(field.getName())) });
        } else if (isInt(field)) {
            method.invoke(obj, new Object[] { rs.getInt(getDBField(field.getName())) });
        } else if (isDate(field)) {
            method.invoke(obj, new Object[] { rs.getTimestamp(getDBField(field.getName())) });
        } else if (isBoolean(field)) {
            method.invoke(obj, new Object[] { rs.getBoolean(getDBField(field.getName())) });
        } else {
            LOG.debug("Unsupported type");
        }
    } catch (ReflectiveOperationException e) {
        LOG.debug(exceptionMsg + obj.getClass() + "@" + field, e);
    } catch (SQLException e) {
        LOG.debug(exceptionMsg + obj.getClass() + "@" + field, e);
    }
}

From source file:fr.ericlab.sondy.core.DataManipulation.java

public void prepareStream(String datasetName, int intervalDuration, int ngram, String stemLanguage,
        boolean lemmatization, AppVariables appVariables) {
    try {// www .  j  a  va  2  s.  co m
        Connection connection;
        Class.forName("com.mysql.jdbc.Driver").newInstance();
        connection = DriverManager.getConnection("jdbc:mysql://" + appVariables.configuration.getHost(),
                appVariables.configuration.getUsername(), appVariables.configuration.getPassword());
        Statement statement = connection.createStatement();
        Statement statement2 = connection.createStatement();

        String lemStr = (lemmatization) ? "_lem1" : "_lem0";
        statement.executeUpdate("CREATE TABLE " + appVariables.configuration.getSchema() + "." + datasetName
                + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram ( id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, msg_author VARCHAR(100), msg_post_time TIMESTAMP, msg_text VARCHAR(600), time_slice INT)ENGINE=myisam;");
        //            statement.executeUpdate("CREATE INDEX index_time ON "+appVariables.configuration.getSchema()+"."+datasetName+"_messages (msg_post_time)");

        ResultSet rsTMin = statement.executeQuery("select min(msg_post_time) from "
                + appVariables.configuration.getSchema() + "." + datasetName + "_messages;");
        rsTMin.next();
        Timestamp tMin = rsTMin.getTimestamp(1);
        ResultSet rsTMax = statement.executeQuery("select max(msg_post_time) from "
                + appVariables.configuration.getSchema() + "." + datasetName + "_messages;");
        rsTMax.next();
        Timestamp tMax = rsTMax.getTimestamp(1);
        Timestamp tRef = new Timestamp(0);
        long base = (tMin.getTime() - tRef.getTime()) * 1L;
        long streamDuration = (tMax.getTime() - tMin.getTime()) * 1L;
        long streamDurationMin = (streamDuration / 1000) / 60;

        String path = appVariables.configuration.getWorkspace() + "/datasets/" + datasetName + "/"
                + intervalDuration + "min-" + stemLanguage;
        path += (lemmatization) ? "-lem1" : "-lem0";
        path += "-" + ngram + "gram";
        String pathMention = path + "-m";

        FSDirectory indexGlobal = FSDirectory.open(new File(path));
        FSDirectory indexMention = FSDirectory.open(new File(pathMention));
        Analyzer analyzer;
        Properties props = new Properties();
        props.put("annotators", "tokenize,ssplit,parse,lemma");
        StanfordCoreNLP pipeline = new StanfordCoreNLP(props);
        Annotation annotation;
        if (stemLanguage.equalsIgnoreCase("Standard")) {
            analyzer = new StandardAnalyzer(Version.LUCENE_36);
        } else {
            Class cl;
            if (stemLanguage.equals("Chinese")) {
                analyzer = new SmartChineseAnalyzer(Version.LUCENE_36);
            } else {
                String packageName = stemLanguage.substring(0, 2).toLowerCase();
                cl = Class
                        .forName("org.apache.lucene.analysis." + packageName + "." + stemLanguage + "Analyzer");
                Class[] types = new Class[] { Version.class, Set.class };
                Constructor ct = cl.getConstructor(types);
                analyzer = (Analyzer) ct.newInstance(Version.LUCENE_36, appVariables.currentStopWords.getSet());
            }
        }
        IndexWriterConfig configGlobal;
        IndexWriterConfig configMention;
        ShingleAnalyzerWrapper shingleAnalyzer = null;
        if (ngram > 1) {
            shingleAnalyzer = new ShingleAnalyzerWrapper(analyzer, ngram, ngram, " ", false, false);
            WhitespaceAnalyzer whitespaceAnalyzer = new WhitespaceAnalyzer(Version.LUCENE_36);
            configGlobal = new IndexWriterConfig(Version.LUCENE_36, whitespaceAnalyzer);
            configMention = new IndexWriterConfig(Version.LUCENE_36, whitespaceAnalyzer);
        } else {
            configGlobal = new IndexWriterConfig(Version.LUCENE_36, analyzer);
            configMention = new IndexWriterConfig(Version.LUCENE_36, analyzer);
        }
        IndexWriter wGlobal = new IndexWriter(indexGlobal, configGlobal);
        IndexWriter wMention = new IndexWriter(indexMention, configMention);

        int docId = 0;
        for (int i = 0; i < streamDurationMin; i += intervalDuration) {
            statement = connection.createStatement();
            long infBound = base + i * 60 * 1000L;
            long supBound = base + (i + intervalDuration) * 60 * 1000L;
            Timestamp infTime = new Timestamp(infBound);
            Timestamp supTime = new Timestamp(supBound);
            ResultSet rs = statement.executeQuery("SELECT msg_text, msg_post_time, msg_author FROM "
                    + appVariables.configuration.getSchema() + "." + datasetName
                    + "_messages WHERE msg_post_time>'" + infTime + "' AND msg_post_time< '" + supTime + "'");
            String globalContent = new String();
            String mentionContent = new String();
            String timestamps = new String();
            NumberFormat formatter = new DecimalFormat("00000000");
            int bulk = 0;
            String bulkString = "";
            boolean mention;
            while (rs.next()) {
                String message = rs.getString(1).toLowerCase();
                mention = message.contains("@");
                if (lemmatization) {
                    annotation = new Annotation(message);
                    message = "";
                    pipeline.annotate(annotation);
                    List<CoreMap> lem = annotation.get(SentencesAnnotation.class);
                    for (CoreMap l : lem) {
                        for (CoreLabel token : l.get(TokensAnnotation.class)) {
                            message += token.get(LemmaAnnotation.class) + " ";
                        }
                    }
                }
                if (ngram > 1) {
                    String processedMessage = "";
                    TokenStream tokenStream = shingleAnalyzer.tokenStream("text", new StringReader(message));
                    CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
                    while (tokenStream.incrementToken()) {
                        String termToken = charTermAttribute.toString();
                        if (!termToken.contains("_")) {
                            processedMessage += termToken.replace(" ", "=") + " ";
                        }
                    }
                    message = processedMessage;
                }
                bulk++;
                if (bulk < _BULK_SIZE_) {
                    bulkString += " (" + docId + ",'" + rs.getString(2) + "',\"" + message + "\",\""
                            + rs.getString(3) + "\"),";
                } else {
                    bulk = 0;
                    bulkString += " (" + docId + ",'" + rs.getString(2) + "',\"" + message + "\",\""
                            + rs.getString(3) + "\");";
                    statement2.executeUpdate("INSERT INTO " + appVariables.configuration.getSchema() + "."
                            + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_"
                            + ngram + "gram (time_slice,msg_post_time,msg_text,msg_author) VALUES"
                            + bulkString);
                    bulkString = "";
                }
                globalContent += message + "\n";
                if (mention) {
                    mentionContent += message + "\n";
                }
                timestamps += rs.getString(2) + "\n";
            }
            if (bulk > 0 && bulkString.length() > 0) {
                statement2.executeUpdate("INSERT INTO " + appVariables.configuration.getSchema() + "."
                        + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                        + "gram (time_slice,msg_post_time,msg_text,msg_author) VALUES"
                        + bulkString.substring(0, bulkString.length() - 1) + ";");
            }
            Document docGlobal = new Document();
            docGlobal.add(new Field("content", globalContent, Field.Store.YES, Field.Index.ANALYZED,
                    Field.TermVector.YES));
            docGlobal.add(new Field("id", Integer.toString(docId), Field.Store.YES, Field.Index.NOT_ANALYZED));
            wGlobal.addDocument(docGlobal);
            wGlobal.commit();
            Document docMention = new Document();
            docMention.add(new Field("content", mentionContent, Field.Store.YES, Field.Index.ANALYZED,
                    Field.TermVector.YES));
            docMention.add(new Field("id", Integer.toString(docId), Field.Store.YES, Field.Index.NOT_ANALYZED));
            wMention.addDocument(docMention);
            wMention.commit();

            File textFile = new File(path + "/input/" + formatter.format(docId) + ".text");
            FileUtils.writeStringToFile(textFile, globalContent);
            File timeFile = new File(path + "/input/" + formatter.format(docId) + ".time");
            FileUtils.writeStringToFile(timeFile, timestamps);

            docId++;
            statement.close();
        }
        statement2.executeUpdate("CREATE INDEX index_time_slice ON " + appVariables.configuration.getSchema()
                + "." + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram (time_slice);");
        statement2.executeUpdate("CREATE FULLTEXT INDEX index_text ON " + appVariables.configuration.getSchema()
                + "." + datasetName + "_" + intervalDuration + "min_" + stemLanguage + lemStr + "_" + ngram
                + "gram (msg_text);");
        statement2.close();
        connection.close();
        wGlobal.close();
        wMention.close();
    } catch (IOException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    } catch (SQLException | InstantiationException | IllegalAccessException | ClassNotFoundException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    } catch (NoSuchMethodException | SecurityException | IllegalArgumentException
            | InvocationTargetException ex) {
        Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.pinterest.pinlater.backends.mysql.PinLaterMySQLBackend.java

/**
 * Processor for dequeue results. Converts a row into a tuple containing fields in the
 * following order: local_id, attempts_allowed, attempts_remaining, updated_at, create_at, body.
 *///from   w w  w .  java  2 s.  c  om
private RowProcessor<Tuple6<String, Integer, Integer, Timestamp, Timestamp, ByteBuffer>> constructDequeueRowProcessor(
        final String queueName, final String shardName, final int priority) {
    return new RowProcessor<Tuple6<String, Integer, Integer, Timestamp, Timestamp, ByteBuffer>>() {
        @Override
        public Tuple6<String, Integer, Integer, Timestamp, Timestamp, ByteBuffer> process(ResultSet rs)
                throws IOException, SQLException {
            return new Tuple6<String, Integer, Integer, Timestamp, Timestamp, ByteBuffer>(
                    new PinLaterJobDescriptor(queueName, shardName, priority, rs.getLong(1)).toString(),
                    rs.getInt(2), rs.getInt(3), rs.getTimestamp(4), rs.getTimestamp(5),
                    ByteBuffer.wrap(rs.getBytes(6)));
        }
    };
}

From source file:edumsg.core.commands.list.GetListFeedsCommand.java

@Override
public void execute() {

    ResultSet set = null;
    try {/*  w  ww  .ja v  a  2  s . c om*/
        dbConn = PostgresConnection.getDataSource().getConnection();
        dbConn.setAutoCommit(false);
        proc = dbConn.prepareCall("{? = call get_list_feeds(?)}");
        proc.setPoolable(true);
        proc.registerOutParameter(1, Types.OTHER);
        proc.setInt(2, Integer.parseInt(map.get("list_id")));
        proc.execute();

        set = (ResultSet) proc.getObject(1);

        ArrayNode tweets = nf.arrayNode();
        root.put("app", map.get("app"));
        root.put("method", map.get("method"));
        root.put("status", "ok");
        root.put("code", "200");

        while (set.next()) {
            Integer id = set.getInt(1);
            String tweet = set.getString(2);
            String image_url = set.getString(3);
            Timestamp created_at = set.getTimestamp(4);
            String creator_name = set.getString(5);
            String creator_username = set.getString(6);
            String creator_avatar = set.getString(7);
            String retweeter = set.getString(8);
            Integer creator_id = set.getInt(9);
            Integer retweeter_id = set.getInt(10);

            Tweet t = new Tweet();
            t.setId(id);
            t.setTweetText(tweet);
            t.setImageUrl(image_url);
            t.setCreatedAt(created_at);
            User creator = new User();
            creator.setId(creator_id);
            creator.setName(creator_name);
            creator.setAvatarUrl(creator_avatar);
            creator.setUsername(creator_username);
            t.setCreator(creator);
            if (!creator_name.equals(retweeter)) {
                User r = new User();
                r.setId(retweeter_id);
                r.setName(retweeter);
                t.setRetweeter(r);
            }

            tweets.addPOJO(t);
        }

        root.set("list_feeds", tweets);
        try {
            CommandsHelp.submit(map.get("app"), mapper.writeValueAsString(root), map.get("correlation_id"),
                    LOGGER);
            JSONObject cacheEntry = new JSONObject(mapper.writeValueAsString(root));
            cacheEntry.put("cacheStatus", "valid");
            ListCache.listCache.set("get_list_feeds:" + map.getOrDefault("session_id", ""),
                    cacheEntry.toString());
        } catch (JsonGenerationException e) {
            LOGGER.log(Level.SEVERE, e.getMessage(), e);
        } catch (JsonMappingException e) {
            LOGGER.log(Level.SEVERE, e.getMessage(), e);
        } catch (IOException e) {
            LOGGER.log(Level.SEVERE, e.getMessage(), e);
        }

        dbConn.commit();
    } catch (PSQLException e) {
        CommandsHelp.handleError(map.get("app"), map.get("method"), e.getMessage(), map.get("correlation_id"),
                LOGGER);
        LOGGER.log(Level.SEVERE, e.getMessage(), e);
    } catch (SQLException e) {
        CommandsHelp.handleError(map.get("app"), map.get("method"), e.getMessage(), map.get("correlation_id"),
                LOGGER);
        LOGGER.log(Level.SEVERE, e.getMessage(), e);
    } finally {
        PostgresConnection.disconnect(set, proc, dbConn);
    }
}

From source file:com.agiletec.plugins.jpcrowdsourcing.aps.system.services.idea.IdeaDAO.java

protected IIdea buildIdeaFromRes(ResultSet res) throws SQLException {
    Idea idea = null;//from   w w w. j a  v  a 2 s.  c o  m
    try {
        idea = new Idea();
        idea.setId(res.getString("id"));
        idea.setTitle(res.getString("title"));
        idea.setDescr(res.getString("descr"));
        idea.setPubDate(new Date(res.getTimestamp("pubdate").getTime()));
        idea.setUsername(res.getString("username"));
        idea.setStatus(res.getInt("status"));
        idea.setVoteNegative(res.getInt("votenegative"));
        idea.setVotePositive(res.getInt("votepositive"));
        idea.setInstanceCode(res.getString("instancecode"));
    } catch (SQLException e) {
        _logger.error("error in buildIdeaFromRes", e);
        throw e;
    }
    return idea;
}

From source file:gda.jython.scriptcontroller.logging.LoggingScriptController.java

@Override
public ScriptControllerLogResults[] getTable() {
    ResultSet rs = null;
    try {/*from  w ww  .  j  a v  a 2  s  .  c om*/
        ScriptControllerLogResults[] results = new ScriptControllerLogResults[] {};
        String myVisit = InterfaceProvider.getBatonStateProvider().getBatonHolder().getVisitID();

        psSimpleListAll.setString(1, myVisit);
        rs = psSimpleListAll.executeQuery();
        Boolean atFirst = rs.next();
        while (atFirst) {
            ScriptControllerLogResults result = new ScriptControllerLogResults(rs.getString(1), rs.getString(2),
                    rs.getTimestamp(3), rs.getTimestamp(4));
            results = (ScriptControllerLogResults[]) ArrayUtils.add(results, result);
            atFirst = rs.next();
        }
        return results;
    } catch (SQLException e) {
        logger.error("Exception fetching stored log messages in " + getName(), e);
        return new ScriptControllerLogResults[] {};
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
            }
        }
    }
}

From source file:iddb.runtime.db.model.dao.impl.mysql.PenaltyDAOImpl.java

/**
 * @param penalty//  w ww .  java2 s  .com
 * @param rs
 * @throws SQLException 
 */
private void loadPenalty(Penalty penalty, ResultSet rs) throws SQLException {
    penalty.setKey(rs.getLong("id"));
    penalty.setPlayer(rs.getLong("playerid"));
    penalty.setAdmin(rs.getLong("adminid"));
    penalty.setType(rs.getInt("type"));
    penalty.setReason(rs.getString("reason"));
    penalty.setDuration(rs.getLong("duration"));
    penalty.setSynced(rs.getBoolean("synced"));
    penalty.setActive(rs.getBoolean("active"));
    penalty.setCreated(rs.getTimestamp("created"));
    penalty.setUpdated(rs.getTimestamp("updated"));
}