Example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

List of usage examples for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

Introduction

In this page you can find the example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Prototype

int TYPE_SCROLL_INSENSITIVE

To view the source code for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Click Source Link

Document

The constant indicating the type for a ResultSet object that is scrollable but generally not sensitive to changes to the data that underlies the ResultSet.

Usage

From source file:no.polaric.aprsdb.MyDBSession.java

/**
 * Return the mission that was (or is going to be) active for a station at a 
 * given time. //from ww  w  .  ja  va2 s  .  c o m
 * If time is null, return the mission currently active. 
 *
 * @param src Source callsign (or identifier)
 * @param at  Time when the mission (that we search for) is active. 
 */
public Mission getMission(String src, java.util.Date at) throws java.sql.SQLException {
    PreparedStatement stmt = getCon().prepareStatement(
            " SELECT src,alias,icon,start,end,descr FROM \"Mission\"" + " WHERE src=? AND time = ?",
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    stmt.setString(1, src);
    stmt.setTimestamp(2, date2ts(at));
    ResultSet rs = stmt.executeQuery();
    if (rs.next())
        return new Mission(rs.getString("src"), rs.getString("alias"), rs.getString("icon"),
                rs.getTimestamp("start"), rs.getTimestamp("end"), rs.getString("descr"));
    else
        return null;
}

From source file:org.openbravo.database.ConnectionProviderImpl.java

public PreparedStatement getPreparedStatement(Connection conn, String SQLPreparedStatement)
        throws SQLException {
    if (conn == null || SQLPreparedStatement == null || SQLPreparedStatement.equals(""))
        return null;
    PreparedStatement ps = null;/*from w  w  w . jav a 2s  .c o m*/
    try {
        log4j.debug("preparedStatement requested");
        ps = conn.prepareStatement(SQLPreparedStatement, ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        log4j.debug("preparedStatement received");
    } catch (SQLException e) {
        log4j.error("getPreparedStatement: " + SQLPreparedStatement + "\n" + e);
        releaseConnection(conn);
        throw e;
    }
    return (ps);
}

From source file:com.udps.hive.jdbc.HiveQueryResultSet.java

@Override
public int getType() throws SQLException {
    if (isClosed) {
        throw new SQLException("Resultset is closed");
    }//ww w.  j  a  v a 2s. com
    if (isScrollable) {
        return ResultSet.TYPE_SCROLL_INSENSITIVE;
    } else {
        return ResultSet.TYPE_FORWARD_ONLY;
    }
}

From source file:edu.ku.brc.specify.conversion.ConvertTaxonHelper.java

/**
 * Converts the taxonomy tree definition from the old taxonomicunittype table to the new table
 * pair: TaxonTreeDef & TaxonTreeDefItems.
 * // w  w  w  .  j  a  v a2 s  .  c o m
 * @param taxonomyTypeId the tree def id in taxonomicunittype
 * @return the TaxonTreeDef object
 * @throws SQLException
 */
public void convertTaxonTreeDefinition(final CollectionInfo colInfo) {
    if (!colInfo.isInUse()) {
        return;
    }

    TaxonTreeDef taxonTreeDef = newTaxonInfoHash.get(colInfo.getTaxonNameId());
    if (taxonTreeDef != null) {
        colInfo.setTaxonTreeDef(taxonTreeDef);
        return;
    }

    Integer oldTaxonRootId = colInfo.getTaxonNameId();
    Integer taxonomyTypeId = colInfo.getTaxonomyTypeId();

    try {
        Statement st = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);

        taxonTreeDef = new TaxonTreeDef();
        taxonTreeDef.initialize();

        String sql = "SELECT TaxonomyTypeName, KingdomID FROM taxonomytype WHERE TaxonomyTypeID = "
                + taxonomyTypeId;
        log.debug(sql);
        ResultSet rs = st.executeQuery(sql);
        rs.next();
        String taxonomyTypeName = rs.getString(1);
        int kingdomID = rs.getInt(2);
        rs.close();

        taxonTreeDef.setName(taxonomyTypeName + " taxonomy tree");
        taxonTreeDef.setRemarks("Tree converted from " + oldDBName);
        taxonTreeDef.setFullNameDirection(TreeDefIface.FORWARD);

        sql = String.format(
                "SELECT RankID, RankName, RequiredParentRankID, TaxonomicUnitTypeID FROM taxonomicunittype "
                        + "WHERE TaxonomyTypeID = %d AND (Kingdom = %d  OR RankID = 0) ORDER BY RankID",
                taxonomyTypeId, kingdomID);
        log.debug(sql);
        rs = st.executeQuery(sql);

        Hashtable<Integer, Integer> rankId2TxnUntTypId = new Hashtable<Integer, Integer>();
        int rank;
        String name;
        int requiredRank;

        Vector<TaxonTreeDefItem> items = new Vector<TaxonTreeDefItem>();
        Vector<Integer> enforcedRanks = new Vector<Integer>();

        while (rs.next()) {
            rank = rs.getInt(1);
            name = rs.getString(2);
            requiredRank = rs.getInt(3);

            int taxUnitTypeId = rs.getInt(4);

            if (StringUtils.isEmpty(name) || (rank > 0 && requiredRank == 0)) {
                continue;
            }

            if (rankId2TxnUntTypId.get(rank) != null) {
                String msg = String.format(
                        "Old TreeDef has two of the same Rank %d, throwing it out.\n\nYou must fix this before proceeding.",
                        rank);
                tblWriter.logError(msg);
                log.debug(msg);
                UIRegistry.displayErrorDlg(msg);
                System.exit(0);
            }
            rankId2TxnUntTypId.put(rank, taxUnitTypeId);

            log.debug(rank + "  " + name + "  TaxonomicUnitTypeID: " + taxUnitTypeId);

            TaxonTreeDefItem ttdi = new TaxonTreeDefItem();
            ttdi.initialize();
            ttdi.setName(name);
            ttdi.setFullNameSeparator(" ");
            ttdi.setRankId(rank);
            ttdi.setTreeDef(taxonTreeDef);
            taxonTreeDef.getTreeDefItems().add(ttdi);

            ttdi.setIsInFullName(rank >= TaxonTreeDef.GENUS);

            // setup the parent/child relationship
            if (items.isEmpty()) {
                ttdi.setParent(null);
            } else {
                ttdi.setParent(items.lastElement());
            }
            items.add(ttdi);
            enforcedRanks.add(requiredRank);
        }
        rs.close();

        for (TaxonTreeDefItem i : items) {
            i.setIsEnforced(enforcedRanks.contains(i.getRankId()));
        }

        try {
            Session session = HibernateUtil.getNewSession();
            Transaction trans = session.beginTransaction();
            session.save(taxonTreeDef);
            trans.commit();
            session.close();

        } catch (Exception ex) {
            ex.printStackTrace();
            throw new RuntimeException(ex);
        }

        IdMapperMgr idMapperMgr = IdMapperMgr.getInstance();
        IdMapperIFace tutMapper = idMapperMgr.get("TaxonomicUnitType", "TaxonomicUnitTypeID");
        IdMapperIFace taxonomyTypeMapper = idMapperMgr.get("TaxonomyType", "TaxonomyTypeID");

        //tutMapper.reset();

        //if (taxonomyTypeMapper.get(taxonomyTypeId) == null)
        //{
        taxonomyTypeMapper.put(taxonomyTypeId, taxonTreeDef.getId());
        //}

        for (TaxonTreeDefItem ttdi : taxonTreeDef.getTreeDefItems()) {
            int ttdiId = rankId2TxnUntTypId.get(ttdi.getRankId());
            log.debug("Mapping " + ttdiId + " -> " + ttdi.getId() + "  RankId: " + ttdi.getRankId());
            tutMapper.put(ttdiId, ttdi.getId());
        }

        newTaxonInfoHash.put(oldTaxonRootId, taxonTreeDef);

        CollectionInfo ci = getCIByTaxonTypeId(taxonomyTypeId);
        ci.setTaxonTreeDef(taxonTreeDef);

        taxonTreeDefHash.put(taxonomyTypeId, taxonTreeDef);
        log.debug("Hashing taxonomyTypeId: " + taxonomyTypeId + " ->  taxonTreeDefId:" + taxonTreeDef.getId());

    } catch (SQLException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    }
}

From source file:org.wso2.carbon.ml.project.mgt.DatabaseHandler.java

/**
 * Get a list of workflows associated with a given project.
 *
 * @param projectId    Unique identifier for the project for which the wokflows are needed
 * @return             An array of workflow ID's and Names
 * @throws             DatabaseHandlerException
 *///from   w w  w  .j a  v  a 2  s .  co  m
public String[][] getProjectWorkflows(String projectId) throws DatabaseHandlerException {
    Connection connection = null;
    PreparedStatement getProjectWorkflows = null;
    ResultSet result = null;
    String[][] workFlows = null;
    try {
        connection = dataSource.getConnection();
        connection.setAutoCommit(true);
        getProjectWorkflows = connection.prepareStatement(SQLQueries.GET_PROJECT_WORKFLOWS,
                ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        getProjectWorkflows.setString(1, projectId);
        result = getProjectWorkflows.executeQuery();

        // create a 2-d string array having the size of the result set
        result.last();
        int noOfWorkflows = result.getRow();
        if (noOfWorkflows > 0) {
            workFlows = new String[2][noOfWorkflows];
            result.beforeFirst();
            // put the result set to the string array
            for (int i = 0; i < noOfWorkflows; i++) {
                result.next();
                workFlows[0][i] = result.getString(1);
                workFlows[1][i] = result.getString(2);
            }
        }
        return workFlows;
    } catch (SQLException e) {
        MLDatabaseUtil.rollBack(connection);
        throw new DatabaseHandlerException(
                "Error occured while retrieving the Dataset Id of project " + projectId + ": " + e.getMessage(),
                e);
    } finally {
        // close the database resources
        MLDatabaseUtil.closeDatabaseResources(connection, getProjectWorkflows, result);
    }
}

From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java

public void evaluateAll() throws SQLException {
    categoryRegression = new TreeMap<VariableAttributes, KernelRegressionCategories>();
    for (VariableAttributes v : variables) {
        KernelRegressionCategories kCat = new KernelRegressionCategories(v, kernelFunction, bandwidth,
                uniformDistributionApproximation);
        categoryRegression.put(v, kCat);
    }/* w  ww.  ja v  a 2 s. c o m*/

    //connect to db
    Table sqlTable = new Table(tableName.getNameForDatabase());
    SelectQuery select = new SelectQuery();
    for (VariableAttributes v : variables) {
        select.addColumn(sqlTable, v.getName().nameForDatabase());
    }
    select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase());
    if (hasGroupVariable)
        select.addColumn(sqlTable, groupByVariable.getName().nameForDatabase());

    ResultSet rs = null;
    Statement stmt = null;

    try {
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        KernelRegressionCategories kernelRegressionCategories;
        Object itemResponse;
        Double score;
        Object tempGroup;
        String group;

        while (rs.next()) {
            //increment kernel regression objects
            //omit examinees with missing data
            score = rs.getDouble(regressorVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                for (VariableAttributes v : categoryRegression.keySet()) {
                    kernelRegressionCategories = categoryRegression.get(v);
                    itemResponse = rs.getObject(v.getName().nameForDatabase());
                    if (itemResponse != null)
                        kernelRegressionCategories.increment(score, itemResponse);
                }
            }
            updateProgress();
        }
    } catch (SQLException ex) {
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

    this.firePropertyChange("progress-ind-on", null, null);
}

From source file:edu.ku.brc.specify.conversion.IdHashMapper.java

public Integer get(final Integer oldId) {
    if (oldId == null) {
        return null;
    }// w  ww . j  ava2s .  co m

    try {
        if (stmtNew == null) {
            stmtNew = oldConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        }

        Integer newId = null;

        ResultSet rs = stmtNew.executeQuery("SELECT NewID FROM " + mapTableName + " WHERE OldID = " + oldId);
        if (rs.next()) {
            newId = rs.getInt(1);

        } else {
            oldIdNullList.add(oldId);

            if (showLogErrors) {
                String msg = "********** Couldn't find old index [" + oldId + "] for " + mapTableName;
                log.error(msg);
                if (tblWriter != null)
                    tblWriter.logError(msg);
            }
            rs.close();
            return null;
        }
        rs.close();

        return newId;

    } catch (SQLException ex) {
        String msg = "Couldn't find old index [" + oldId + "] for " + mapTableName;
        if (tblWriter != null)
            tblWriter.logError(msg);

        edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount();
        edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(IdHashMapper.class, ex);
        ex.printStackTrace();
        log.error(ex);
        throw new RuntimeException(msg);
    }
}

From source file:jp.mathes.databaseWiki.db.postgres.PostgresBackend.java

@Override
public void deleteDocument(final String user, final String password, final String db, final String table,
        final String name) throws BackendException {
    Connection conn = null;/*from  w  w  w .j  ava2  s .c om*/
    Statement st = null;
    try {
        conn = this.connectToDB(user, password, db);
        st = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        String queryString = String.format("delete from \"%s\".\"%s\" where \"%s\"='%s'",
                this.getSchemaName(table, db), this.getPlainTableName(table),
                this.getNameField(conn, table, db), name);
        this.logString(queryString, "?");
        st.executeUpdate(queryString);
    } catch (SQLException e) {
        throw new BackendException(e);
    } catch (ClassNotFoundException e) {
        throw new BackendException(e);
    } finally {
        DbUtils.closeQuietly(st);
        DbUtils.closeQuietly(conn);
    }
}

From source file:edu.ku.brc.specify.conversion.StratToGTP.java

/**
 * @throws SQLException//www.  ja v a2 s.  co m
 */
public void convertStratToGTPNDGS() throws SQLException {
    Statement stmt = null;
    ResultSet rs = null;

    try {
        // get a Hibernate session for saving the new records
        Session localSession = HibernateUtil.getCurrentSession();
        HibernateUtil.beginTransaction();

        int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM stratigraphy");
        if (count < 1)
            return;

        if (hasFrame) {
            setProcess(0, count);
        }

        IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod",
                "GeologicTimePeriodID");

        Hashtable<Integer, Integer> ceToNewStratIdHash = new Hashtable<Integer, Integer>();

        IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID");

        // get all of the old records
        //  Future GTP                           Period         Epoch         Age   
        String sql = "SELECT StratigraphyID, 'Placeholder',  SuperGroup,  `Group` FROM stratigraphy ORDER BY StratigraphyID";

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(sql);

        int counter = 0;
        // for each old record, convert the record
        while (rs.next()) {
            if (counter % 500 == 0) {
                if (hasFrame) {
                    setProcess(counter);

                } else {
                    log.info("Converted " + counter + " Stratigraphy records");
                }
            }

            // grab the important data fields from the old record
            int oldStratId = rs.getInt(1);
            String period = rs.getString(2);
            String epoch = rs.getString(3);
            String age = rs.getString(4);

            if (StringUtils.isEmpty(epoch)) {
                epoch = "(Empty)";
            }

            // create a new Geography object from the old data
            GeologicTimePeriod newStrat = convertOldStratRecord(localSession, eraNode, null, null, null, period,
                    epoch, age);

            counter++;

            // Map Old GeologicTimePeriod ID to the new Tree Id
            gtpIdMapper.put(oldStratId, newStrat.getGeologicTimePeriodId());

            // Convert Old CEId to new CEId, then map the new CEId -> new StratId
            Integer ceId = ceMapper.get(oldStratId);
            if (ceId != null) {
                ceToNewStratIdHash.put(ceId, newStrat.getGeologicTimePeriodId());
            } else {
                String msg = String.format("No CE mapping for Old StratId %d, when they are a one-to-one.",
                        oldStratId);
                tblWriter.logError(msg);
                log.error(msg);
            }
        }
        stmt.close();

        if (hasFrame) {
            setProcess(counter);

        } else {
            log.info("Converted " + counter + " Stratigraphy records");
        }

        TreeHelper.fixFullnameForNodeAndDescendants(eraNode);
        eraNode.setNodeNumber(1);
        fixNodeNumbersFromRoot(eraNode);
        rs.close();

        HibernateUtil.commitTransaction();
        log.info("Converted " + counter + " Stratigraphy records");

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    // Now in this Step we Add the PaleoContext to the Collecting Events

}

From source file:com.netspective.axiom.sql.Query.java

protected PreparedStatement createStatement(ConnectionContext cc, Object[] overrideParams, boolean scrollable,
        QueryExecutionLogEntry logEntry) throws NamingException, SQLException {
    logEntry.registerGetConnectionBegin();
    Connection conn = cc.getConnection();
    logEntry.registerGetConnectionEnd(conn);
    PreparedStatement stmt = null;
    DbmsSqlText sqlText = getSqlText(cc);
    String sql = sqlText.getSql(cc);
    if (scrollable)
        stmt = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    else/* w  w w  .  j a  v  a2s  .c  om*/
        stmt = conn.prepareStatement(sql);

    logEntry.registerBindParamsBegin();
    if (overrideParams != null) {
        for (int i = 0; i < overrideParams.length; i++)
            stmt.setObject(i + 1, overrideParams[i]);
    } else {
        final QueryParameters parameters = sqlText.getParams();
        if (parameters != null)
            parameters.apply(cc, stmt);
    }
    logEntry.registerBindParamsEnd();
    return stmt;
}