Example usage for java.sql ResultSet getTimestamp

List of usage examples for java.sql ResultSet getTimestamp

Introduction

In this page you can find the example usage for java.sql ResultSet getTimestamp.

Prototype

java.sql.Timestamp getTimestamp(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a java.sql.Timestamp object in the Java programming language.

Usage

From source file:dao.UserSearchQuery.java

/**
 * This method lists all the results for the search text from directories
 * @param conn the connection/*from w  w  w  .  j ava  2s  .c  o  m*/
 * @param collabrumId the collabrumid
 * @return HashSet the set that has the list of moderators for these collabrums.
 * @throws BaseDaoException - when error occurs
 **/
public HashSet run(Connection conn, String sString) throws BaseDaoException {

    if ((RegexStrUtil.isNull(sString) || conn == null)) {
        return null;
    }

    ResultSet rs = null;
    StringBuffer sb = new StringBuffer(
            "select hdlogin.loginid, login, fname, lname, hits, membersince, LEFT(description, 160) as info from hdlogin left join usertab on hdlogin.loginid=usertab.loginid left join yourkeywords on hdlogin.loginid=yourkeywords.loginid left join mykeywords on hdlogin.loginid=mykeywords.loginid where ");

    ArrayList columns = new ArrayList();
    columns.add("login");
    columns.add("email");
    columns.add("description");
    columns.add("fname");
    columns.add("lname");
    columns.add("interests");
    columns.add("yourkeyword");
    columns.add("keyword");
    //columns.add("industry");
    columns.add("city");
    sb.append(sqlSearch.getConstraint(columns, sString));
    sb.append(" group by login order by hits DESC");
    logger.info("search query string" + sb.toString());

    try {
        PreparedStatement stmt = conn.prepareStatement(sb.toString());
        rs = stmt.executeQuery();

        Vector columnNames = null;
        Userpage userpage = null;
        HashSet pendingSet = new HashSet();

        if (rs != null) {
            columnNames = dbutils.getColumnNames(rs);
        } else {
            return null;
        }

        while (rs.next()) {
            userpage = (Userpage) eop.newObject(DbConstants.USER_PAGE);
            for (int j = 0; j < columnNames.size(); j++) {
                if (((String) (columnNames.elementAt(j))).equalsIgnoreCase("membersince")) {
                    try {
                        userpage.setValue(DbConstants.DATE,
                                GlobalConst.dncalendar.getDisplayDate(rs.getTimestamp("membersince")));
                    } catch (ParseException e) {
                        throw new BaseDaoException(
                                "could not parse the date for membersince in UserSearchQuery()"
                                        + rs.getTimestamp("membersince"),
                                e);
                    }
                } else {
                    userpage.setValue((String) columnNames.elementAt(j),
                            (String) rs.getString((String) columnNames.elementAt(j)));
                }
            }
            pendingSet.add(userpage);
        }
        return pendingSet;
    } catch (Exception e) {
        throw new BaseDaoException(
                "Error occured while executing search in userpage run query " + sb.toString(), e);
    }
}

From source file:ManagerQuery.java

private void executeQueryJVRidesAtaf() {
    try {/*from   w  ww.j  a  v a2  s  . c  o  m*/
        Repository repo = buildSparqlRepository();
        repo.initialize();
        RepositoryConnection con = repo.getConnection();
        TupleQuery tupleQueryRitAntAvm = con.prepareTupleQuery(QueryLanguage.SPARQL, this.query);
        TupleQueryResult resultEvent = tupleQueryRitAntAvm.evaluate();

        double percent_inOrario = -1;
        double percent_inAnticipo = -1;
        double percent_inRitardo = -1;
        int num_inOrario = -1;
        int num_inAnticipo = -1;
        int num_inRitardo = -1;
        int num_total = 0;

        if (resultEvent != null) {
            while (resultEvent.hasNext()) {
                BindingSet bindingSetRides = resultEvent.next();
                String state = bindingSetRides.getValue("state").stringValue().trim();
                String sum = bindingSetRides.getValue("sum").stringValue();
                if (state.equals("In orario")) {
                    num_inOrario = Integer.parseInt(sum);
                    num_total = num_total + Integer.parseInt(sum);
                } else if (state.equals("Ritardo")) {
                    num_inRitardo = Integer.parseInt(sum);
                    num_total = num_total + Integer.parseInt(sum);
                } else if (state.equals("Anticipo")) {
                    num_inAnticipo = Integer.parseInt(sum);
                    num_total = num_total + Integer.parseInt(sum);
                }
            }
            if (num_total != 0 && num_inOrario != -1 && num_inRitardo != -1 && num_inAnticipo != -1) {
                percent_inOrario = (num_inOrario * 100.0) / num_total;
                percent_inAnticipo = (num_inAnticipo * 100.0) / num_total;
                percent_inRitardo = (num_inRitardo * 100.0) / num_total;
                DBAccess mysql_access = new DBAccess(this.map_dbAcc.get("AlarmEmail"));
                mysql_access.setConnection(this.map_dbAcc.get("Dashboard"));
                System.out.println("Percentuale in orario : " + percent_inOrario);
                System.out.println("Percentuale in anticipo : " + percent_inAnticipo);
                System.out.println("Percentuale in ritardo : " + percent_inRitardo);
                DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                Date data_attuale = new Date();
                String data_attuale_fixed = df.format(data_attuale);
                String query_insert = "INSERT INTO Dashboard.Data"
                        + "(IdMetric_data, computationDate, value_num, value_perc1, value_perc2, value_perc3, value_text, quant_perc1, quant_perc2, quant_perc3, tot_perc1, tot_perc2, tot_perc3) VALUES"
                        + "(\"" + this.idProc + "\",\"" + data_attuale_fixed + "\", null,\"" + percent_inOrario
                        + "\"  ,\"" + percent_inAnticipo + "\",\"" + percent_inRitardo + "\", \"\",\""
                        + num_inOrario + "\",\"" + num_inAnticipo + "\",\"" + num_inRitardo + "\",\""
                        + num_total + "\",\"" + num_total + "\",\"" + num_total + "\")";
                mysql_access.writeDataBaseData(query_insert);
                mysql_access.close();
            } else {
                DBAccess mysql_access2 = new DBAccess(this.map_dbAcc.get("AlarmEmail"));
                mysql_access2.setConnection(this.map_dbAcc.get("Dashboard"));
                String LastCompDateQuery = "SELECT MAX(computationDate) as lastCompDate, value_num, value_perc1, value_perc2, value_perc3, value_text FROM Dashboard.Data WHERE IdMetric_data=\""
                        + this.idProc + "\" ORDER BY computationDate DESC";
                ResultSet resultSet = mysql_access2.readDataBase(LastCompDateQuery, this);
                Timestamp lastTimestamp = null;
                Double[] lastValues = new Double[(resultSet.getMetaData().getColumnCount()) - 1];
                while (resultSet.next()) {
                    lastTimestamp = resultSet.getTimestamp("lastCompDate");
                    lastValues[0] = Double.parseDouble((resultSet.getString("value_perc1").trim()));
                    lastValues[1] = Double.parseDouble((resultSet.getString("value_perc2").trim()));
                    lastValues[2] = Double.parseDouble((resultSet.getString("value_perc3").trim()));
                }
                mysql_access2.close();
                //this.almMng.updateStatusOnComputingDate(lastTimestamp, lastValues);
            }
        }
    } catch (Exception exp) {
        String msgBody = Utility.exceptionMessage(exp, this.getClass().getName(),
                this.idProc + " - " + this.descrip);
        this.notifyEvent("Import data error", msgBody);
    }
}

From source file:com.alfaariss.oa.authentication.remote.saml2.idp.storage.jdbc.IDPJDBCStorage.java

private SAML2IDP retrieveBySourceID(byte[] baSourceID) throws OAException {
    Connection connection = null;
    PreparedStatement pSelect = null;
    ResultSet resultSet = null;
    SAML2IDP saml2IDP = null;/*from  ww  w  . j  a v  a  2  s.c  om*/

    IMetadataProviderManager oMPM = MdMgrManager.getInstance().getMetadataProviderManager(_sId);

    try {
        connection = _dataSource.getConnection();

        pSelect = connection.prepareStatement(_sQuerySelectOnSourceID);
        pSelect.setBoolean(1, true);
        pSelect.setBytes(2, baSourceID);
        resultSet = pSelect.executeQuery();
        if (resultSet.next()) {
            boolean bACSIndex = resultSet.getBoolean(COLUMN_ACS_INDEX);

            Boolean boolAllowCreate = null;
            String sAllowCreate = resultSet.getString(COLUMN_ALLOW_CREATE);
            if (sAllowCreate != null) {
                boolean bAllowCreate = resultSet.getBoolean(COLUMN_ALLOW_CREATE);
                boolAllowCreate = new Boolean(bAllowCreate);
            }

            boolean bScoping = resultSet.getBoolean(COLUMN_SCOPING);
            boolean bNameIDPolicy = resultSet.getBoolean(COLUMN_NAMEIDPOLICY);
            boolean bAvoidSubjectConfirmation = resultSet.getBoolean(COLUMN_AVOID_SUBJCONF);
            boolean bDisableSSOForIDP = resultSet.getBoolean(COLUMN_DISABLE_SSO);

            Date dLastModified = null;
            try {
                dLastModified = resultSet.getTimestamp(COLUMN_DATELASTMODIFIED);
            } catch (Exception e) {
                _oLogger.info("No " + COLUMN_DATELASTMODIFIED + " column found for SAML2IDP with sourceid '"
                        + baSourceID + "'; ignoring.");
            }

            saml2IDP = new SAML2IDP(resultSet.getString(COLUMN_ID), baSourceID,
                    resultSet.getString(COLUMN_FRIENDLYNAME), resultSet.getString(COLUMN_METADATA_FILE),
                    resultSet.getString(COLUMN_METADATA_URL), resultSet.getInt(COLUMN_METADATA_TIMEOUT),
                    bACSIndex, boolAllowCreate, bScoping, bNameIDPolicy,
                    resultSet.getString(COLUMN_NAMEIDFORMAT), bAvoidSubjectConfirmation, bDisableSSOForIDP,
                    dLastModified, oMPM.getId());
        }
    } catch (OAException e) {
        throw e;
    } catch (Exception e) {
        _oLogger.fatal("Internal error during retrieval of organization with SourceID: " + baSourceID, e);
        throw new OAException(SystemErrors.ERROR_INTERNAL);
    } finally {
        try {
            if (pSelect != null)
                pSelect.close();
        } catch (Exception e) {
            _oLogger.error("Could not close select statement", e);
        }

        try {
            if (connection != null)
                connection.close();
        } catch (Exception e) {
            _oLogger.error("Could not close connection", e);
        }
    }
    return saml2IDP;
}

From source file:com.sfs.whichdoctor.dao.ReportDAOImpl.java

/**
 * Load the report bean from the result set.
 *
 * @param rs the rs/*from w ww .  ja  v  a2  s. c  o m*/
 * @return the report bean
 * @throws SQLException the sQL exception
 */
private ReportBean loadReport(final ResultSet rs) throws SQLException {

    ReportBean report = new ReportBean();

    report.setId(rs.getInt("ReportId"));
    report.setGUID(rs.getInt("GUID"));
    report.setReferenceGUID(rs.getInt("ReferenceGUID"));
    report.setReportType(rs.getString("ReportType"));
    report.setReportStatus(rs.getString("ReportStatus"));
    report.setReportGrouping(rs.getString("ReportGrouping"));
    report.setReportOrder(rs.getInt("ReportOrder"));
    report.setYear(rs.getInt("ReportYear"));
    report.setMemo(rs.getString("Memo"));

    if (StringUtils.equalsIgnoreCase(rs.getString("ReportPublish"), "yes")) {
        report.setReportPublish(true);
    }

    String authors = rs.getString("Authors");

    if (StringUtils.isNotBlank(authors)) {
        StringTokenizer st = new StringTokenizer(authors, ",");
        while (st.hasMoreTokens()) {
            String strGUID = st.nextToken();
            try {
                int guid = Integer.parseInt(strGUID);
                PersonBean person = personDAO.loadGUID(guid);

                if (person != null) {
                    report.addAuthor(person);
                }
            } catch (Exception e) {
                dataLogger.error("Could not load PersonBean for report");
            }
        }
    }

    report.setActive(rs.getBoolean("Active"));
    try {
        report.setCreatedDate(rs.getTimestamp("CreatedDate"));
    } catch (SQLException sqe) {
        dataLogger.debug("Error reading CreatedDate: " + sqe.getMessage());
    }
    report.setCreatedBy(rs.getString("CreatedBy"));
    try {
        report.setModifiedDate(rs.getTimestamp("ModifiedDate"));
    } catch (SQLException sqe) {
        dataLogger.debug("Error reading ModifiedDate: " + sqe.getMessage());
    }
    report.setModifiedBy(rs.getString("ModifiedBy"));
    try {
        report.setExportedDate(rs.getTimestamp("ExportedDate"));
    } catch (SQLException sqe) {
        dataLogger.debug("Error reading ExportedDate: " + sqe.getMessage());
    }
    report.setExportedBy(rs.getString("ExportedBy"));

    // Load user details from DB
    UserBean user = new UserBean();
    user.setDN(rs.getString("CreatedBy"));
    user.setPreferredName(rs.getString("CreatedFirstName"));
    user.setLastName(rs.getString("CreatedLastName"));
    report.setCreatedUser(user);

    UserBean modified = new UserBean();
    modified.setDN(rs.getString("ModifiedBy"));
    modified.setPreferredName(rs.getString("ModifiedFirstName"));
    modified.setLastName(rs.getString("ModifiedLastName"));
    report.setModifiedUser(modified);

    UserBean export = new UserBean();
    export.setDN(rs.getString("ExportedBy"));
    export.setPreferredName(rs.getString("ExportedFirstName"));
    export.setLastName(rs.getString("ExportedLastName"));
    report.setExportedUser(export);

    return report;
}

From source file:com.commander4j.db.JDBDespatch.java

public Vector<JDBDespatch> getDespatchData(PreparedStatement criteria) {
    ResultSet rs;
    Vector<JDBDespatch> result = new Vector<JDBDespatch>();

    if (Common.hostList.getHost(getHostID()).toString().equals(null)) {
        result.addElement(new JDBDespatch(getHostID(), getSessionID(), "despatch_no", null, "location_id_from",
                "location_id_to", "status", 0, "trailer", "haulier", "load_no", "user_id", "journey_ref"));
    } else {//  www .ja va  2  s . com
        try {
            rs = criteria.executeQuery();

            while (rs.next()) {
                result.addElement(new JDBDespatch(getHostID(), getSessionID(), rs.getString("despatch_no"),
                        rs.getTimestamp("despatch_date"), rs.getString("location_id_from"),
                        rs.getString("location_id_to"), rs.getString("status"), rs.getInt("total_pallets"),
                        rs.getString("trailer"), rs.getString("haulier"), rs.getString("load_no"),
                        rs.getString("user_id"), rs.getString("journey_ref")));
            }

            rs.close();
        } catch (Exception e) {
            setErrorMessage(e.getMessage());
        }
    }

    return result;
}

From source file:com.enonic.vertical.engine.handlers.SectionHandler.java

public long getSectionContentTimestamp(int sectionKey) {
    long timestamp = 0;

    Connection con = null;/*from   w  w w .j ava 2  s.  c o  m*/
    PreparedStatement preparedStmt = null;
    ResultSet resultSet = null;

    try {
        con = getConnection();
        StringBuffer sql = XDG.generateSelectSQL(db.tSectionContent2, db.tSectionContent2.sco_dteTimestamp,
                false, db.tSectionContent2.sco_mei_lKey);
        sql.append(" ORDER BY sco_dteTimestamp DESC");
        preparedStmt = con.prepareStatement(sql.toString());
        preparedStmt.setInt(1, sectionKey);

        resultSet = preparedStmt.executeQuery();
        if (resultSet.next()) {
            Timestamp time = resultSet.getTimestamp("sco_dteTimestamp");
            timestamp = time.getTime();
        }
    } catch (SQLException sqle) {
        String message = "Failed to get section content timestamp: %t";
        VerticalEngineLogger.error(this.getClass(), 1, message, sqle);
    } finally {
        close(resultSet);
        close(preparedStmt);
        close(con);
    }

    return timestamp;
}

From source file:dao.UserSearchBizAwareQuery.java

/**
 * This method lists all the results for the search text from directories
 * @param conn the connection/*from   www .j a va 2s  .  co  m*/
 * @param collabrumId the collabrumid
 * @return HashSet the set that has the list of moderators for these collabrums.
 * @throws BaseDaoException - when error occurs
 **/
public HashSet run(Connection conn, String sString, String bid) throws BaseDaoException {

    if (RegexStrUtil.isNull(sString) || conn == null || RegexStrUtil.isNull(bid)) {
        return null;
    }

    ResultSet rs = null;
    //StringBuffer sb = new StringBuffer("select distinct c2.login, c2.fname, c2.lname, c1.hits, c1.membersince, LEFT(c1.description, 160) as info from usertab c1 left outer join hdlogin c2 on c1.loginid=c2.loginid where "); 

    StringBuffer sb = new StringBuffer(
            "select login, fname, lname, hits, membersince, LEFT(description, 160) as info, hdlogin.bid, bsearch from business, hdlogin left join usertab on hdlogin.loginid=usertab.loginid left join yourkeywords on hdlogin.loginid=yourkeywords.loginid left join mykeywords on hdlogin.loginid=mykeywords.loginid where business.bid=hdlogin.bid and (");

    ArrayList columns = new ArrayList();
    columns.add("description");
    columns.add("fname");
    columns.add("lname");
    columns.add("interests");
    columns.add("yourkeyword");
    columns.add("keyword");
    //columns.add("industry");
    columns.add("city");
    sb.append(sqlSearch.getConstraint(columns, sString));
    sb.append(") group by login order by hits DESC");
    logger.info("search query string" + sb.toString());

    try {
        PreparedStatement stmt = conn.prepareStatement(sb.toString());
        rs = stmt.executeQuery();

        Vector columnNames = null;
        Userpage userpage = null;
        HashSet pendingSet = new HashSet();

        if (rs != null) {
            columnNames = dbutils.getColumnNames(rs);
        } else {
            return null;
        }

        while (rs.next()) {
            userpage = (Userpage) eop.newObject(DbConstants.USER_PAGE);
            for (int j = 0; j < columnNames.size(); j++) {
                if (((String) (columnNames.elementAt(j))).equalsIgnoreCase("membersince")
                        && (rs.getTimestamp("membersince") != null)) {
                    try {
                        userpage.setValue(DbConstants.DATE,
                                GlobalConst.dncalendar.getDisplayDate(rs.getTimestamp("membersince")));
                    } catch (ParseException e) {
                        throw new BaseDaoException(
                                "could not parse the date for membersince in UserSearchBizAwareQuery()"
                                        + rs.getTimestamp("membersince"),
                                e);
                    }
                } else {
                    userpage.setValue((String) columnNames.elementAt(j),
                            (String) rs.getString((String) columnNames.elementAt(j)));
                }
            }
            pendingSet.add(userpage);
        }
        return pendingSet;
    } catch (Exception e) {
        throw new BaseDaoException(
                "Error occured while executing search in userpage run query " + sb.toString(), e);
    }
}

From source file:dk.netarkivet.harvester.datamodel.HarvestDefinitionDBDAO.java

/**
 * Read the stored harvest definition for the given ID.
 *
 * @see HarvestDefinitionDAO#read(Long)/* w  w  w  .j av  a2s .  c om*/
 * @param c
 *            The used database connection
 * @param harvestDefinitionID
 *            An ID number for a harvest definition
 * @return A harvest definition that has been read from persistent storage.
 * @throws UnknownID
 *             if no entry with that ID exists in the database
 * @throws IOFailure
 *             If DB-failure occurs?
 */
private HarvestDefinition read(Connection c, Long harvestDefinitionID) throws UnknownID, IOFailure {

    if (!exists(c, harvestDefinitionID)) {
        String message = "Unknown harvest definition " + harvestDefinitionID;
        log.debug(message);
        throw new UnknownID(message);
    }
    log.debug("Reading harvestdefinition w/ id " + harvestDefinitionID);
    PreparedStatement s = null;
    try {
        s = c.prepareStatement(
                "SELECT name, comments, numevents, submitted, " + "previoushd, maxobjects, maxbytes, "
                        + "maxjobrunningtime, isindexready, isactive, edition, audience "
                        + "FROM harvestdefinitions, fullharvests " + "WHERE harvestdefinitions.harvest_id = ?"
                        + "  AND harvestdefinitions.harvest_id " + " = fullharvests.harvest_id");
        s.setLong(1, harvestDefinitionID);
        ResultSet res = s.executeQuery();
        if (res.next()) {
            // Found full harvest
            log.debug("fullharvest found w/id " + harvestDefinitionID);
            final String name = res.getString(1);
            final String comments = res.getString(2);
            final int numEvents = res.getInt(3);
            final Date submissionDate = new Date(res.getTimestamp(4).getTime());
            final long maxObjects = res.getLong(6);
            final long maxBytes = res.getLong(7);
            final long maxJobRunningtime = res.getLong(8);
            final boolean isIndexReady = res.getBoolean(9);
            FullHarvest fh;
            final long prevhd = res.getLong(5);
            if (!res.wasNull()) {
                fh = new FullHarvest(name, comments, prevhd, maxObjects, maxBytes, maxJobRunningtime,
                        isIndexReady);
            } else {
                fh = new FullHarvest(name, comments, null, maxObjects, maxBytes, maxJobRunningtime,
                        isIndexReady);
            }
            fh.setSubmissionDate(submissionDate);
            fh.setNumEvents(numEvents);
            fh.setActive(res.getBoolean(10));
            fh.setOid(harvestDefinitionID);
            fh.setEdition(res.getLong(11));
            fh.setAudience(res.getString(12));

            readExtendedFieldValues(fh);

            // We found a FullHarvest object, just return it.
            log.debug("Returned FullHarvest object w/ id " + harvestDefinitionID);
            return fh;
        }
        s.close();
        // No full harvest with that ID, try selective harvest
        s = c.prepareStatement("SELECT harvestdefinitions.name," + "       harvestdefinitions.comments,"
                + "       harvestdefinitions.numevents," + "       harvestdefinitions.submitted,"
                + "       harvestdefinitions.isactive," + "       harvestdefinitions.edition,"
                + "       harvestdefinitions.audience," + "       schedules.name,"
                + "       partialharvests.nextdate, " + "       harvestdefinitions.channel_id "
                + "FROM harvestdefinitions, partialharvests, schedules"
                + " WHERE harvestdefinitions.harvest_id = ?" + "   AND harvestdefinitions.harvest_id "
                + "= partialharvests.harvest_id" + "   AND schedules.schedule_id "
                + "= partialharvests.schedule_id");
        s.setLong(1, harvestDefinitionID);
        res = s.executeQuery();
        boolean foundPartialHarvest = res.next();
        if (foundPartialHarvest) {
            log.debug("Partialharvest found w/ id " + harvestDefinitionID);
            // Have to get configs before creating object, so storing data
            // here.
            final String name = res.getString(1);
            final String comments = res.getString(2);
            final int numEvents = res.getInt(3);
            final Date submissionDate = new Date(res.getTimestamp(4).getTime());
            final boolean active = res.getBoolean(5);
            final long edition = res.getLong(6);
            final String audience = res.getString(7);
            final String scheduleName = res.getString(8);
            final Date nextDate = DBUtils.getDateMaybeNull(res, 9);
            final Long channelId = DBUtils.getLongMaybeNull(res, 10);
            s.close();
            // Found partial harvest -- have to find configurations.
            // To avoid holding on to the readlock while getting domains,
            // we grab the strings first, then look up domains and configs.
            final DomainDAO domainDao = DomainDAO.getInstance();
            List<SparseDomainConfiguration> configs = new ArrayList<SparseDomainConfiguration>();
            s = c.prepareStatement("SELECT domains.name, configurations.name "
                    + "FROM domains, configurations, harvest_configs " + "WHERE harvest_id = ?"
                    + "  AND configurations.config_id " + "= harvest_configs.config_id"
                    + "  AND configurations.domain_id = domains.domain_id");
            s.setLong(1, harvestDefinitionID);
            res = s.executeQuery();
            while (res.next()) {
                configs.add(new SparseDomainConfiguration(res.getString(1), res.getString(2)));
            }
            s.close();
            List<DomainConfiguration> configurations = new ArrayList<DomainConfiguration>();
            for (SparseDomainConfiguration domainConfig : configs) {
                configurations.add(domainDao.getDomainConfiguration(domainConfig.getDomainName(),
                        domainConfig.getConfigurationName()));
            }

            Schedule schedule = ScheduleDAO.getInstance().read(scheduleName);

            PartialHarvest ph = new PartialHarvest(configurations, schedule, name, comments, audience);

            ph.setNumEvents(numEvents);
            ph.setSubmissionDate(submissionDate);
            ph.setActive(active);
            ph.setEdition(edition);
            ph.setNextDate(nextDate);
            ph.setOid(harvestDefinitionID);
            if (channelId != null) {
                ph.setChannelId(channelId);
            }

            readExtendedFieldValues(ph);

            return ph;
        } else {
            throw new IllegalState(
                    "No entries in fullharvests or " + "partialharvests found for id " + harvestDefinitionID);
        }
    } catch (SQLException e) {
        throw new IOFailure("SQL Error while reading harvest definition " + harvestDefinitionID + "\n"
                + ExceptionUtils.getSQLExceptionCause(e), e);
    }
}

From source file:dao.PblogSearchQuery.java

/**
 * This method lists all the results for the search text from pblogs
 * @param conn the connection//from  www .  jav  a2 s  . c o  m
 * @param sString
 * @return HashSet the set that has the list of moderators for these collabrums.
 * @throws BaseDaoException - when error occurs
 **/
public HashSet run(Connection conn, String sString) throws BaseDaoException {

    if ((RegexStrUtil.isNull(sString) || conn == null)) {
        return null;
    }
    ResultSet rs = null;
    StringBuffer sb = new StringBuffer(
            "select hdlogin.loginid, hdlogin.login, hdlogin.fname, lname, LEFT(message, 160) as info, entrydate, usertags, pblogtopics.tid as ptid, hits from hdlogin left join pblogtopics on hdlogin.loginid=pblogtopics.pblogid left join pblogtags on pblogtopics.tid=pblogtags.tid left join pblog on hdlogin.loginid=pblog.loginid where ");

    // StringBuffer sb = new StringBuffer("select login, fname, lname, hits, membersince, LEFT(description, 160) as info from hdlogin left join usertab on hdlogin.loginid=usertab.loginid left join yourkeywords on hdlogin.loginid=yourkeywords.loginid left join mykeywords on hdlogin.loginid=mykeywords.loginid where ");

    ArrayList columns = new ArrayList();
    columns.add("topic");
    columns.add("message");
    columns.add("usertags");
    sb.append(sqlSearch.getConstraint(columns, sString));
    sb.append(" group by login order by hits DESC");

    logger.info("search query string" + sb.toString());

    try {
        PreparedStatement stmt = conn.prepareStatement(sb.toString());
        rs = stmt.executeQuery();

        Vector columnNames = null;
        Blog pblog = null;
        HashSet pendingSet = new HashSet();

        if (rs != null) {
            columnNames = dbutils.getColumnNames(rs);
        } else {
            return null;
        }

        while (rs.next()) {
            pblog = (Blog) eop.newObject(DbConstants.BLOG);
            for (int j = 0; j < columnNames.size(); j++) {
                if (((String) (columnNames.elementAt(j))).equalsIgnoreCase("entrydate")) {
                    try {
                        pblog.setValue("entrydate",
                                GlobalConst.dncalendar.getDisplayDate(rs.getTimestamp("entrydate")));
                        logger.info("entrydate" + rs.getTimestamp("entrydate"));
                    } catch (ParseException e) {
                        throw new BaseDaoException(
                                "could not parse the date for entrydate in PblogSearchQuery()"
                                        + rs.getTimestamp("entrydate"),
                                e);
                    }
                } else {
                    pblog.setValue((String) columnNames.elementAt(j),
                            (String) rs.getString((String) columnNames.elementAt(j)));
                }
            }
            pendingSet.add(pblog);
        }
        return pendingSet;
    } catch (Exception e) {
        throw new BaseDaoException("Error occured while executing search in pblog run query ", e);
    }
}

From source file:com.alfaariss.oa.engine.session.jdbc.JDBCSessionFactory.java

/**
 * Retrieve the Session with the given id.
 * @param id The Session id.//w ww  .  j  a va2  s .  c o  m
 * @return The Session, or null if a Session with the given id does not exist.
 * @throws PersistenceException If retrieving fails.
 */
@SuppressWarnings("unchecked") //Serialize value can not be checked
public JDBCSession retrieve(Object id) throws PersistenceException {
    if (id == null || !(id instanceof String))
        throw new IllegalArgumentException("Suplied id is empty or invalid");

    Connection oConnection = null;
    JDBCSession session = null;
    PreparedStatement ps = null;
    ResultSet rs = null;

    try {
        oConnection = _oDataSource.getConnection();
        ps = oConnection.prepareStatement(_sSearchQuery);
        ps.setString(1, (String) id);
        rs = ps.executeQuery();
        if (rs.next()) {
            session = new JDBCSession(this, rs.getString(_sColumnREQUESTOR));
            session.setId((String) id);

            String sTGTID = rs.getString(_sColumnTGT_ID);
            if (sTGTID != null)
                session.setTGTId(sTGTID);

            session.setState(SessionState.values()[rs.getInt(_sColumnSTATE)]);

            String sUrl = rs.getString(_sColumnURL);
            if (sUrl != null)
                session.setProfileURL(sUrl);

            IUser oUser = (IUser) Serialize.decode(rs.getBytes(_sColumnOWNER));
            if (oUser != null)
                session.setUser(oUser);

            session.setExpTime(rs.getTimestamp(_sColumnEXPIRATION).getTime());
            session.setForcedAuthentication(rs.getBoolean(_sColumnFORCED_AUTHENTICATE));
            session.setPassive(rs.getBoolean(_sColumnPASSIVE));

            SessionAttributes oAttributes = (SessionAttributes) Serialize
                    .decode(rs.getBytes(_sColumnATTRIBUTES));
            if (oAttributes != null)
                session.setAttributes(oAttributes);

            String sForcedUid = rs.getString(_sColumnFORCED_USERID);
            if (sForcedUid != null)
                session.setForcedUserID(sForcedUid);

            Locale oLocale = (Locale) Serialize.decode(rs.getBytes(_sColumnLOCALE));
            if (oLocale != null)
                session.setLocale(oLocale);

            List listProfiles = (List) Serialize.decode(rs.getBytes(_sColumnAUTHN_PROFILES));
            if (listProfiles != null)
                session.setAuthNProfiles(listProfiles);

            AuthenticationProfile oProfile = (AuthenticationProfile) Serialize
                    .decode(rs.getBytes(_sColumnSELECTED_AUTHN_PROFILE));
            if (oProfile != null)
                session.setSelectedAuthNProfile(oProfile);
        }
    } catch (SQLException e) {
        _logger.error("Could not execute search query: " + _sSearchQuery, e);
        throw new PersistenceException(SystemErrors.ERROR_RESOURCE_RETRIEVE);
    } catch (ClassCastException e) {
        _logger.error("Could not decode, invalid class type", e);
        throw new PersistenceException(SystemErrors.ERROR_RESOURCE_RETRIEVE);
    } catch (Exception e) {
        _logger.error("Internal error during retrieve of session id: " + id, e);
        throw new PersistenceException(SystemErrors.ERROR_RESOURCE_RETRIEVE);
    } finally {
        try {
            if (rs != null)
                rs.close();
        } catch (SQLException e) {
            _logger.debug("Could not close resultset", e);
        }
        try {
            if (ps != null)
                ps.close();
        } catch (SQLException e) {
            _logger.debug("Could not close statement", e);
        }
        try {
            if (oConnection != null)
                oConnection.close();
        } catch (SQLException e) {
            _logger.debug("Could not close connection", e);
        }
    }
    return session;
}