Example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

List of usage examples for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

Introduction

In this page you can find the example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Prototype

int TYPE_SCROLL_INSENSITIVE

To view the source code for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Click Source Link

Document

The constant indicating the type for a ResultSet object that is scrollable but generally not sensitive to changes to the data that underlies the ResultSet.

Usage

From source file:com.netspective.axiom.sql.StoredProcedure.java

/**
 * Executes the stored procedure without any statistical logging
 *
 * @param cc              Connection context
 * @param overrideIndexes parameter indexes to override
 * @param overrideValues  parameter override values
 *///from   w  w w  .j a  va  2  s. c  o m
protected QueryResultSet executeAndIgnoreStatistics(ConnectionContext cc, int[] overrideIndexes,
        Object[] overrideValues, boolean scrollable) throws NamingException, SQLException {
    if (log.isTraceEnabled())
        trace(cc, overrideIndexes, overrideValues);

    Connection conn = null;
    CallableStatement stmt = null;
    boolean closeConnection = true;
    try {
        getMetaData(cc);
        conn = cc.getConnection();
        String sql = StringUtils.strip(getSqlText(cc));
        if (scrollable)
            stmt = conn.prepareCall(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        else
            stmt = conn.prepareCall(sql);

        if (parameters != null) {
            parameters.apply(cc, stmt, overrideIndexes, overrideValues);
            stmt.execute();
            parameters.extract(cc, stmt);
            StoredProcedureParameter rsParameter = parameters.getResultSetParameter();
            if (rsParameter != null) {
                closeConnection = false;
                return (QueryResultSet) rsParameter.getExtractedValue(cc.getDatabaseValueContext());
            } else
                return null;
        } else {
            stmt.execute();
            return null;
        }
    } catch (SQLException e) {
        log.error(createExceptionMessage(cc, overrideIndexes, overrideValues), e);
        throw e;
    }
}

From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java

public void evaluate() throws SQLException {
    kernelRegression = new TreeMap<VariableAttributes, KernelRegressionItem>();
    for (VariableAttributes v : variables) {
        KernelRegressionItem kItem = new KernelRegressionItem(v, kernelFunction, bandwidth,
                uniformDistributionApproximation);
        kernelRegression.put(v, kItem);/*from ww  w  .  ja va 2  s  .  co  m*/
    }

    ResultSet rs = null;
    Statement stmt = null;

    try {
        //connect to db
        Table sqlTable = new Table(tableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        for (VariableAttributes v : variables) {
            select.addColumn(sqlTable, v.getName().nameForDatabase());
        }
        select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase());
        if (hasGroupVariable)
            select.addColumn(sqlTable, groupByVariable.getName().nameForDatabase());

        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        KernelRegressionItem kernelRegressionItem;
        Object itemResponse;
        Double score;
        Object tempGroup;
        String group;

        while (rs.next()) {
            //increment kernel regression objects
            //omit examinees with missing data
            score = rs.getDouble(regressorVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                for (VariableAttributes v : kernelRegression.keySet()) {
                    kernelRegressionItem = kernelRegression.get(v);
                    itemResponse = rs.getObject(v.getName().nameForDatabase());
                    if (itemResponse != null)
                        kernelRegressionItem.increment(score, itemResponse);
                }
            }
            updateProgress();
        }
    } catch (SQLException ex) {
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

    this.firePropertyChange("progress-ind-on", null, null);

}

From source file:edu.ku.brc.specify.conversion.ConvertVerifier.java

/**
 * @param databaseNameSource//  www  .j  av  a2  s .co  m
 * @param databaseNameDest
 * @throws Exception
 */
public void verifyDB(final String databaseNameSource, final String databaseNameDest) throws Exception {
    String path = UIRegistry.getUserHomeDir() + File.separator + "verify";
    convLogger.initialize(path, databaseNameDest);

    String title = "From " + databaseNameSource + " to " + databaseNameDest;
    System.out.println("************************************************************");
    System.out.println(title);
    System.out.println("************************************************************");

    HibernateUtil.shutdown();

    Properties initPrefs = BuildSampleDatabase.getInitializePrefs(databaseNameDest);

    String driverNameSource = "";
    String databaseHostSource = "";
    DatabaseDriverInfo driverInfoSource = null;

    String driverNameDest = "";
    String databaseHostDest = "";
    DatabaseDriverInfo driverInfoDest = null;

    log.debug("Running an non-custom MySQL convert, using old default login creds");
    driverNameSource = initPrefs.getProperty("initializer.driver", "MySQL");
    databaseHostSource = initPrefs.getProperty("initializer.host", "localhost");

    driverNameDest = initPrefs.getProperty("initializer.driver", "MySQL");
    databaseHostDest = initPrefs.getProperty("initializer.host", "localhost");

    log.debug("Custom Convert Source Properties ----------------------");
    log.debug("databaseNameSource: " + databaseNameSource);
    log.debug("driverNameSource: " + driverNameSource);
    log.debug("databaseHostSource: " + databaseHostSource);

    log.debug("Custom Convert Destination Properties ----------------------");
    log.debug("databaseNameDest: " + databaseNameDest);
    log.debug("driverNameDest: " + driverNameDest);
    log.debug("databaseHostDest: " + databaseHostDest);

    driverInfoSource = DatabaseDriverInfo.getDriver(driverNameSource);
    driverInfoDest = DatabaseDriverInfo.getDriver(driverNameDest);

    if (driverInfoSource == null) {
        throw new RuntimeException(
                "Couldn't find Source DB driver by name [" + driverInfoSource + "] in driver list.");
    }
    if (driverInfoDest == null) {
        throw new RuntimeException(
                "Couldn't find Destination driver by name [" + driverInfoDest + "] in driver list.");
    }

    if (driverNameDest.equals("MySQL"))
        BasicSQLUtils.myDestinationServerType = BasicSQLUtils.SERVERTYPE.MySQL;
    else if (driverNameDest.equals("SQLServer"))
        BasicSQLUtils.myDestinationServerType = BasicSQLUtils.SERVERTYPE.MS_SQLServer;

    if (driverNameSource.equals("MySQL"))
        BasicSQLUtils.mySourceServerType = BasicSQLUtils.SERVERTYPE.MySQL;
    else if (driverNameSource.equals("SQLServer"))
        BasicSQLUtils.mySourceServerType = BasicSQLUtils.SERVERTYPE.MS_SQLServer;

    else {
        log.error("Error setting ServerType for destination database for conversion.  Could affect the"
                + " way that SQL string are generated and executed on differetn DB egnines");
    }
    String destConnectionString = driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open,
            databaseHostDest, "", itUsrPwd.first, itUsrPwd.second, driverNameDest);
    log.debug("attempting login to destination: " + destConnectionString);
    // This will log us in and return true/false
    // This will connect without specifying a DB, which allows us to create the DB
    if (!UIHelper.tryLogin(driverInfoDest.getDriverClassName(), driverInfoDest.getDialectClassName(),
            databaseNameDest, destConnectionString, itUsrPwd.first, itUsrPwd.second)) {
        log.error("Failed connection string: "
                + driverInfoSource.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest,
                        databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest));
        throw new RuntimeException(
                "Couldn't login into [" + databaseNameDest + "] " + DBConnection.getInstance().getErrorMsg());
    }

    convLogger.setIndexTitle(databaseNameDest + " Verify "
            + (new SimpleDateFormat("yyy-MM-dd hh:mm:ss")).format(Calendar.getInstance().getTime()));

    //MEG WHY IS THIS COMMENTED OUT???
    //DataBuilder.setSession(HibernateUtil.getNewSession());

    log.debug("DESTINATION driver class: " + driverInfoDest.getDriverClassName());
    log.debug("DESTINATION dialect class: " + driverInfoDest.getDialectClassName());
    log.debug("DESTINATION Connection String: "
            + driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest,
                    databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest));

    // This will log us in and return true/false
    if (!UIHelper.tryLogin(driverInfoDest.getDriverClassName(), driverInfoDest.getDialectClassName(),
            databaseNameDest,
            driverInfoDest.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open, databaseHostDest,
                    databaseNameDest, itUsrPwd.first, itUsrPwd.second, driverNameDest),
            itUsrPwd.first, itUsrPwd.second)) {
        throw new RuntimeException(
                "Couldn't login into [" + databaseNameDest + "] " + DBConnection.getInstance().getErrorMsg());
    }

    String srcConStr = driverInfoSource.getConnectionStr(DatabaseDriverInfo.ConnectionType.Open,
            databaseHostSource, databaseNameSource, itUsrPwd.first, itUsrPwd.second, driverNameSource);
    DBConnection oldDB = DBConnection.createInstance(driverInfoSource.getDriverClassName(), null,
            databaseNameSource, srcConStr, itUsrPwd.first, itUsrPwd.second);
    oldDBConn = oldDB.getConnection();
    if (oldDBConn == null) {
        throw new RuntimeException(oldDB.getErrorMsg());
    }
    newDBConn = DBConnection.getInstance().createConnection();
    newDBStmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    oldDBStmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    IdMapperMgr.getInstance().setDBs(oldDBConn, newDBConn);

    long startTime = System.currentTimeMillis();

    String[] tableNames = { "CollectingEvent", "CollectingEvent", "Locality", "Locality" };
    for (int i = 0; i < tableNames.length; i += 2) {
        verifyTableCounts(tableNames[i].toLowerCase(), tableNames[i + 1].toLowerCase());
    }

    progressFrame = new ProgressFrame("Checking Catalog Objects....");
    progressFrame.adjustProgressFrame();

    String cntSQL = compareTo6DBs ? "SELECT COUNT(*) FROM collectionobject"
            : "SELECT COUNT(*) FROM collectionobjectcatalog WHERE CollectionObjectTypeID > 8 && CollectionObjectTypeID < 20";
    Integer numColObjs = BasicSQLUtils.getCount(oldDBConn, cntSQL);

    progressFrame.setProcess(0, numColObjs);
    //progressFrame.setDesc("Checking Catalog Objects....");

    progressFrame.setOverall(0, numColObjs * 4);
    progressFrame.setOverall(0);
    progressFrame.setDesc("");

    UIHelper.centerAndShow(progressFrame);

    SwingUtilities.invokeLater(new Runnable() {
        public void run() {
            UIHelper.centerAndShow(progressFrame);
        }
    });

    HashMap<Integer, TableWriter> tblWriterHash = new HashMap<Integer, TableWriter>();
    for (int i = 1; i < labels.length - 1; i++) {
        tblWriter = convLogger.getWriter(labels[i] + ".html", labels[i]);
        //printVerifyHeader(labels[i]);
        tblWriter.startTable();
        tblWriter.logHdr("ID", "Desc");
        tblWriterHash.put(codes[i], tblWriter);
        System.out.println(codes[i] + " - " + labels[i]);
    }

    boolean nullCEOk = false;
    File ceFile = new File(databaseNameDest + ".ce_all");
    if (ceFile.exists()) {
        nullCEOk = true;
        //ceFile.delete();
    }

    nullCEOk = true;

    // For Debug
    coOptions = DO_CO_ALL;

    //if (coOptions > NO_OPTIONS)
    {
        int i = 0;
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        String sql = compareTo6DBs ? "SELECT CatalogNumber FROM collectionobject ORDER BY CatalogNumber ASC"
                : "SELECT CatalogNumber FROM collectionobjectcatalog WHERE CollectionObjectTypeID > 8 && CollectionObjectTypeID < 20 AND SubNumber >= 0 ORDER BY CatalogNumber ASC";
        ResultSet rs = stmt.executeQuery(sql);
        while (rs.next()) {
            int oldCatNum = rs.getInt(1);
            String newCatNum = convertCatNum(oldCatNum);

            //if (oldCatNum < 1643) continue;

            if (isCOOn(DO_CO_DETERMINER)) {
                tblWriter = tblWriterHash.get(DO_CO_DETERMINER);
                if (!verifyDeterminer(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_DETERMINER]++;
                }
            }

            if (isCOOn(DO_CO_CATLOGER)) {
                tblWriter = tblWriterHash.get(DO_CO_CATLOGER);
                if (!verifyCataloger(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_CATLOGER]++;
                }
            }

            if (isCOOn(DO_CO_COLLECTORS)) {
                tblWriter = tblWriterHash.get(DO_CO_COLLECTORS);
                if (!verifyCollector(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_COLLECTORS]++;
                }
            }

            if (isCOOn(DO_CO_GEO)) {
                tblWriter = tblWriterHash.get(DO_CO_GEO);
                if (!verifyGeography(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_GEO]++;
                }
            }

            if (isCOOn(DO_CO_CE)) {
                tblWriter = tblWriterHash.get(DO_CO_CE);
                if (!verifyCollectingEvent(oldCatNum, newCatNum, nullCEOk)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_CE]++;
                }
            }

            if (isCOOn(DO_CO_TAXON)) {
                tblWriter = tblWriterHash.get(DO_CO_TAXON);
                if (!verifyTaxon(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_TAXON]++;
                }
            }

            if (isCOOn(DO_CO_LOCALITY)) {
                tblWriter = tblWriterHash.get(DO_CO_LOCALITY);
                if (!verifyCOToLocality(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_LOCALITY]++;
                }
            }

            if (isCOOn(DO_CO_PREPARATION)) {
                tblWriter = tblWriterHash.get(DO_CO_PREPARATION);
                if (!verifyPreparation(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_PREPARATION]++;
                }
            }

            if (isCOOn(DO_CO_PREPARER)) {
                tblWriter = tblWriterHash.get(DO_CO_PREPARER);
                if (!verifyPreparer(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_CO_PREPARER]++;
                }
            }

            if (isCOOn(DO_TAXON_CIT)) {
                tblWriter = tblWriterHash.get(DO_TAXON_CIT);
                if (!verifyTaxonCitations(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_TAXON_CIT]++;
                }
            }

            if (isCOOn(DO_OTHER_IDENT)) {
                tblWriter = tblWriterHash.get(DO_OTHER_IDENT);
                if (!verifyOtherIdentifier(oldCatNum, newCatNum)) {
                    catNumsInErrHash.put(newCatNum, oldCatNum);
                    errorCnts[DO_OTHER_IDENT]++;
                }
            }

            if ((i % 100) == 0) {
                System.out.println(i + "  " + oldCatNum);
                progressFrame.setProcess(i);
                progressFrame.setOverall(i);
            }

            if ((i % 1000) == 0) {
                for (TableWriter tw : tblWriterHash.values()) {
                    tw.flush();
                }
            }
            i++;
        }

        rs.close();
        stmt.close();
    }

    for (int i = 0; i < errorCnts.length; i++) {
        if (errorCnts[i] > 0) {
            System.out.println(i + " -> " + errorCnts[i]);
        }
    }

    progressFrame.setProcess(numColObjs);

    if (isCOOn(DO_COLLECTORS)) {
        tblWriter = tblWriterHash.get(DO_COLLECTORS);
        //verifyCollectors();
    }

    if (isCOOn(DO_AGENTS)) {
        tblWriter = tblWriterHash.get(DO_AGENTS);
        verifyAgents();
    }

    progressFrame.setOverall(numColObjs * 2);
    if (isCOOn(DO_COLLEVENTS)) {
        tblWriter = tblWriterHash.get(DO_COLLEVENTS);
        verifyCEs();
    }

    //progressFrame.setOverall(numColObjs*2);
    if (isCOOn(DO_COLLEVENTS)) {
        tblWriter = tblWriterHash.get(DO_COLLEVENTS);
        verifyShipments();
    }

    if (isCOOn(DO_LOANS)) {
        tblWriter = tblWriterHash.get(DO_LOANS);
        verifyLoans();
        verifyGifts();
        verifyLoanRetPreps();
    }

    for (TableWriter tw : tblWriterHash.values()) {
        tw.endTable();
    }

    progressFrame.setOverall(numColObjs * 3);

    tblWriter = convLogger.getWriter("CatalogNumberSummary.html", "Catalog Nummber Summary");
    tblWriter.startTable();
    tblWriter.logHdr("Number", "Description");
    tblWriter.logErrors(Integer.toString(numErrors), "All Errors");
    tblWriter.logErrors(Integer.toString(catNumsInErrHash.size()), "Catalog Number with Errors");
    tblWriter.endTable();

    tblWriter.println("<BR>");
    tblWriter.println("Catalog Summary:<BR>");
    Vector<String> catNumList = new Vector<String>(catNumsInErrHash.keySet());
    Collections.sort(catNumList);
    for (String catNum : catNumList) {
        tblWriter.println(catNum + "<BR>");
    }
    tblWriter.println("<BR>");
    numErrors = 0;

    //-----------------------------------------------------------------------------------------------------------
    // Accessions
    //-----------------------------------------------------------------------------------------------------------
    // For Debug
    acOptions = DO_AC_ALL;

    HashMap<Long, TableWriter> accTblWriterHash = new HashMap<Long, TableWriter>();
    for (int i = 1; i < accLabels.length; i++) {
        long id = (long) Math.pow(2, i - 1);
        id = Math.max(id, 1);
        tblWriter = convLogger.getWriter("accession_" + accLabels[i] + ".html", "Accession " + accLabels[i]);
        tblWriter.startTable();
        tblWriter.logHdr("ID", "Desc");
        accTblWriterHash.put(id, tblWriter);
    }

    if (acOptions > NO_OPTIONS) {
        int i = 0;
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        ResultSet rs = stmt.executeQuery("SELECT Number FROM accession ORDER BY Number ASC");
        while (rs.next()) {
            String oldAccNum = rs.getString(1);
            String newAccNum = oldAccNum;

            if (isACOn(DO_ACCESSIONS)) {
                tblWriter = accTblWriterHash.get(DO_ACCESSIONS);
                if (!verifyAccessions(oldAccNum, newAccNum)) {
                    log.error("Accession Num: " + oldAccNum);
                    accNumsInErrHash.put(newAccNum, oldAccNum);
                }
                //log.error("New SQL: "+newSQL);
                //log.error("Old SQL: "+oldSQL);
                //break;
            }

            if (isACOn(DO_AC_AGENTS)) {
                tblWriter = accTblWriterHash.get(DO_AC_AGENTS);
                if (!verifyAccessionAgents(oldAccNum, newAccNum)) {
                    log.error("Accession Num: " + oldAccNum);
                    accNumsInErrHash.put(newAccNum, oldAccNum);
                }
                //log.error("New SQL: "+newSQL);
                //log.error("Old SQL: "+oldSQL);
                //break;
            }

            if ((i % 100) == 0) {
                System.out.println(i + "  " + oldAccNum);
            }
            i++;
        }

        rs.close();
        stmt.close();
    }
    progressFrame.setOverall(numColObjs * 4);
    newDBConn.close();
    oldDBConn.close();

    for (TableWriter tw : accTblWriterHash.values()) {
        tw.endTable();
    }

    printAccessionTotal("Accession");

    File indexFile = convLogger.closeAll();

    long endTime = System.currentTimeMillis();

    int convertTimeInSeconds = (int) ((endTime - startTime) / 1000.0);

    //ConvertStatSender sender = new ConvertStatSender("verify.php");
    //sender.senConvertInfo(databaseNameDest, numColObjs, convertTimeInSeconds);

    log.info("Done.");

    progressFrame.setVisible(false);

    AttachmentUtils.openURI(indexFile.toURI());

    System.exit(0);
}

From source file:org.integratedmodelling.sql.SQLServer.java

/**
 * Return one string corresponding to field 0 of row 0 of the result after
 * executing the passed query. Return null if no results are returned or
 * query generates errors./*from   w w  w  .ja v a  2  s.c o  m*/
 * 
 * @param sql
 * @return
 * @throws ThinklabStorageException 
 */
public String getResult(String sql) throws ThinklabStorageException {

    String ret = null;

    Connection conn = null;
    Statement stmt = null;
    ResultSet rset = null;

    try {
        conn = getConnection();
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        rset = stmt.executeQuery(sql);

        if (rset.first()) {
            for (; !rset.isAfterLast(); rset.next()) {
                ret = rset.getString(1);
                break;
            }
        }
    } catch (SQLException e) {
        throw new ThinklabStorageException(e);
    } finally {
        try {
            rset.close();
        } catch (Exception e) {
        }
        try {
            stmt.close();
        } catch (Exception e) {
        }
        try {
            conn.close();
        } catch (Exception e) {
        }
    }
    return ret;
}

From source file:org.wso2.carbon.repository.core.jdbc.dao.JDBCLogsDAO.java

private List<ResourceActivity> internalGetLogs(boolean paginate, String resourcePath, int action,
        String userName, Date from, Date to, boolean descending, Connection conn) throws RepositoryException {
    try {//from w ww  .j  av  a 2s  .  c om
        String dbName = conn.getMetaData().getDatabaseProductName();
        if (dbName.contains("Microsoft") || dbName.equals("Oracle")) {
            enableApiPagination = "false";
        }
    } catch (SQLException e) {
        throw new RepositoryDBException("Failed to get Database product name ", e);
    }

    if (conn == null) {
        log.fatal(
                "Failed to get Logs. Communications link failure. The connection to the database could not be acquired.");
        throw new RepositoryDBException(
                "Failed to get Logs. Communications link failure. The connection to the database could not be acquired.");
    }

    PreparedStatement s = null;
    ResultSet results = null;

    boolean paginated = false;
    int start = 0;
    int count = 0;
    //        String sortOrder ="";
    //        String sortBy  ="";
    //        MessageContext messageContext = null;
    /*
    //   enableApiPagination is the value of system property - enable.registry.api.paginating
    if (enableApiPagination == null || enableApiPagination.equals("true")) {
    messageContext = MessageContext.getCurrentMessageContext();
    if (messageContext != null && PaginationUtils.isPaginationHeadersExist(messageContext)) {
            
        PaginationContext paginationContext = PaginationUtils.initPaginationContext(messageContext);
        start = paginationContext.getStart();
        count = paginationContext.getCount();
        if(start == 0){
            start =1;
        }
        sortBy = paginationContext.getSortBy();
        sortOrder = paginationContext.getSortOrder();
        paginated = paginate;
    }
    }*/
    String sql = "SELECT REG_PATH, REG_USER_ID, REG_LOGGED_TIME, REG_ACTION, REG_ACTION_DATA FROM " + "REG_LOG";

    boolean queryStarted = false;
    sql = addWherePart(resourcePath, queryStarted, sql, userName, from, to, action);

    if (descending) {
        sql = sql + " ORDER BY REG_LOGGED_TIME DESC";
    }
    try {
        if (enableApiPagination == null || enableApiPagination.equals("true")) {
            s = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        } else {
            s = conn.prepareStatement(sql);
        }
        int paramNumber = 1;

        if (resourcePath != null) {
            s.setString(paramNumber, resourcePath);
            paramNumber++;
        }

        if (userName != null) {
            s.setString(paramNumber, userName);
            paramNumber++;
        }

        if (from != null) {
            s.setTimestamp(paramNumber, new Timestamp(from.getTime()));
            paramNumber++;
        }

        if (to != null) {
            s.setTimestamp(paramNumber, new Timestamp(to.getTime()));
            paramNumber++;
        }

        if (action != -1) {
            s.setInt(paramNumber, action);
            paramNumber++;
        }
        s.setInt(paramNumber, CurrentContext.getTenantId());

        results = s.executeQuery();

        List<ResourceActivity> resultList = new ArrayList<ResourceActivity>();
        if (paginated) {
            if (results.relative(start)) {
                //This is to get cursor to correct position to execute results.next().
                results.previous();
                int i = 0;
                while (results.next() && i < count) {
                    i++;
                    resultList.add(getActivity(results));
                }
            } else {
                log.debug("start index doesn't exist in the result set");
            }
            //move the cursor to the last index
            if (results.last()) {
                log.debug("cursor move to the last index of result set");
            } else {
                log.debug("cursor doesn't move to the last index of result set");
            }
            //set row count to the message context.
            //                PaginationUtils.setRowCount(messageContext, Integer.toString(results.getRow()));

        } else {
            while (results.next()) {
                resultList.add(getActivity(results));
            }
            //                Activity[] logEntries = getPaginatedLogs(resultList.toArray(new Activity[resultList.size()]));
            //                resultList = Arrays.asList(logEntries);
        }
        return resultList;

    } catch (SQLException e) {

        String msg = "Failed to get logs. " + e.getMessage();
        log.error(msg, e);
        throw new RepositoryDBException(msg, e);
    } finally {
        try {
            try {
                if (results != null) {
                    results.close();
                }
            } finally {
                if (s != null) {
                    s.close();
                }
            }
        } catch (SQLException ex) {
            String msg = InternalConstants.RESULT_SET_PREPARED_STATEMENT_CLOSE_ERROR;
            log.error(msg, ex);
        }
    }
}

From source file:no.polaric.aprsdb.MyDBSession.java

public Tracker getTracker(String id) throws java.sql.SQLException {
    PreparedStatement stmt = getCon().prepareStatement(" SELECT * from \"Tracker\" " + " WHERE id=?",
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    stmt.setString(1, id);//from   w  ww  .  j  av  a 2 s.  c  o m
    return new DbList<Tracker>(stmt.executeQuery(), rs -> {
        return new Tracker(_api.getDB(), rs.getString("id"), rs.getString("userid"), rs.getString("alias"),
                rs.getString("icon"));
    }).next();
}

From source file:org.sakaiproject.webservices.SakaiReport.java

protected String getQueryAsString(String query, Object[] args, int rowCount, String type) {

    Connection conn = null;//from w  ww .  j  a  v a2  s. c o m
    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        conn = sqlService.borrowConnection();
        conn.setReadOnly(true);

        ps = conn.prepareStatement(query, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);

        if (rowCount > 0) {
            ps.setMaxRows(rowCount);
        }

        for (int i = 0; i < args.length; i++) {
            if (args[i] instanceof String) {
                ps.setString(i + 1, (String) args[i]);
            } else if (args[i] instanceof java.util.Date) {
                // select * from sakai_event where event_date between to_date('2001-12-12 12:12','YYYY-MM-DD HH24:MI') and to_date('2017-12-12 12:12','YYYY-MM-DD HH24:MI')
                if (sqlService.getVendor().equals("oracle")) {
                    ps.setString(i + 1, df.format(args[i]));
                    // select * from sakai_event where event_date between '2001-12-12 12:12' and '2017-12-12 12:12';
                } else {
                    ps.setString(i + 1, df.format(args[i]));
                }
            }
        }
        LOG.info("preparing query: " + ps.toString());

        rs = ps.executeQuery();
        //return toJsonString(rs);
        if (type == TYPE_CSV) {
            return stripInvalidXmlCharacters(toCsvString(rs));
        }
        if (type == TYPE_CSV_WITH_HEADER_ROW) {
            return stripInvalidXmlCharacters(toCsvString(rs, true));
        }

        if (type == TYPE_JSON) {
            return stripInvalidXmlCharacters(toJsonString(rs));
        }

        return Xml.writeDocumentToString(toDocument(rs));

    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        throw new RuntimeException(e.getMessage(), e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
            }
        }

        if (ps != null) {
            try {
                ps.close();
            } catch (SQLException e) {
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException e) {
            }
        }
    }
}

From source file:edu.ku.brc.specify.conversion.StratToGTP.java

/**
 * @throws SQLException/*from  w w w . j  a  v  a2  s .c o  m*/
 */
public void convertStratToGTPKUIVP() throws SQLException {
    Statement stmt = null;
    ResultSet rs = null;

    try {
        // get a Hibernate session for saving the new records
        Session localSession = HibernateUtil.getCurrentSession();
        HibernateUtil.beginTransaction();

        int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM stratigraphy");
        if (count < 1)
            return;

        if (hasFrame) {
            setProcess(0, count);
        }

        IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod",
                "GeologicTimePeriodID");

        Hashtable<Integer, Integer> ceToNewStratIdHash = new Hashtable<Integer, Integer>();

        IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID");

        // get all of the old records
        //  Future GTP                           System        Series       Stage
        String sql = "SELECT s.StratigraphyID, s.Formation, s.SuperGroup, s.Text1 FROM stratigraphy s ORDER BY s.StratigraphyID";

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(sql);

        int counter = 0;
        // for each old record, convert the record
        while (rs.next()) {
            if (counter % 500 == 0) {
                if (hasFrame) {
                    setProcess(counter);

                } else {
                    log.info("Converted " + counter + " Stratigraphy records");
                }
            }

            // grab the important data fields from the old record
            int oldStratId = rs.getInt(1);
            String system = rs.getString(2);
            String series = rs.getString(3);
            String stage = rs.getString(4);

            if (StringUtils.isNotEmpty(stage)) {
                if (StringUtils.isNotEmpty(series)) {
                    series += ' ' + stage;

                } else {
                    series = stage;
                }
            }

            if (StringUtils.isEmpty(series)) {
                series = "(Empty)";
            }

            // create a new Geography object from the old data
            GeologicTimePeriod newStrat = convertOldStratRecord(localSession, eraNode, null, null, null, system,
                    series, stage);

            counter++;

            // Map Old GeologicTimePeriod ID to the new Tree Id
            gtpIdMapper.put(oldStratId, newStrat.getGeologicTimePeriodId());

            // Convert Old CEId to new CEId, then map the new CEId -> new StratId
            Integer ceId = ceMapper.get(oldStratId);
            if (ceId != null) {
                ceToNewStratIdHash.put(ceId, newStrat.getGeologicTimePeriodId());
            } else {
                String msg = String.format("No CE mapping for Old StratId %d, when they are a one-to-one.",
                        oldStratId);
                tblWriter.logError(msg);
                log.error(msg);
            }
        }
        stmt.close();

        if (hasFrame) {
            setProcess(counter);

        } else {
            log.info("Converted " + counter + " Stratigraphy records");
        }

        TreeHelper.fixFullnameForNodeAndDescendants(eraNode);
        eraNode.setNodeNumber(1);
        fixNodeNumbersFromRoot(eraNode);
        rs.close();

        HibernateUtil.commitTransaction();
        log.info("Converted " + counter + " Stratigraphy records");

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    // Now in this Step we Add the PaleoContext to the Collecting Events

}

From source file:org.wso2.carbon.registry.core.jdbc.dao.JDBCLogsDAO.java

private List<LogEntry> internalGetLogs(boolean paginate, String resourcePath, int action, String userName,
        Date from, Date to, boolean descending, Connection conn) throws RegistryException {
    try {//from   w w w .  j  a va2  s  .  c  o m
        String dbName = conn.getMetaData().getDatabaseProductName();
        if (dbName.contains("Microsoft") || dbName.equals("Oracle")) {
            enableApiPagination = "false";
        }
    } catch (SQLException e) {
        throw new RegistryException("Failed to get Database product name ", e);
    }

    if (conn == null) {
        log.fatal(
                "Failed to get Logs. Communications link failure. The connection to the database could not be acquired.");
        throw new RegistryException(
                "Failed to get Logs. Communications link failure. The connection to the database could not be acquired.");
    }

    PreparedStatement s = null;
    ResultSet results = null;

    boolean paginated = false;
    int start = 0;
    int count = 0;
    String sortOrder = "";
    String sortBy = "";
    MessageContext messageContext = null;
    //   enableApiPagination is the value of system property - enable.registry.api.paginating
    if (enableApiPagination == null || enableApiPagination.equals("true")) {
        messageContext = MessageContext.getCurrentMessageContext();
        if (messageContext != null && PaginationUtils.isPaginationHeadersExist(messageContext)) {

            PaginationContext paginationContext = PaginationUtils.initPaginationContext(messageContext);
            start = paginationContext.getStart();
            count = paginationContext.getCount();
            if (start == 0) {
                start = 1;
            }
            sortBy = paginationContext.getSortBy();
            sortOrder = paginationContext.getSortOrder();
            paginated = paginate;
        }
    }
    String sql = "SELECT REG_PATH, REG_USER_ID, REG_LOGGED_TIME, REG_ACTION, REG_ACTION_DATA FROM " + "REG_LOG";

    boolean queryStarted = false;
    sql = addWherePart(resourcePath, queryStarted, sql, userName, from, to, action);

    if (descending) {
        sql = sql + " ORDER BY REG_LOGGED_TIME DESC";
    }
    try {
        if (enableApiPagination == null || enableApiPagination.equals("true")) {
            // TYPE_SCROLL_INSENSITIVE and CONCUR_UPDATABLE should be set to move the cursor through the resultSet
            s = conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        } else {
            s = conn.prepareStatement(sql);
        }
        int paramNumber = 1;

        if (resourcePath != null) {
            s.setString(paramNumber, resourcePath);
            paramNumber++;
        }

        if (userName != null) {
            s.setString(paramNumber, userName);
            paramNumber++;
        }

        if (from != null) {
            s.setTimestamp(paramNumber, new Timestamp(from.getTime()));
            paramNumber++;
        }

        if (to != null) {
            s.setTimestamp(paramNumber, new Timestamp(to.getTime()));
            paramNumber++;
        }

        if (action != -1) {
            s.setInt(paramNumber, action);
            paramNumber++;
        }
        s.setInt(paramNumber, CurrentSession.getTenantId());

        results = s.executeQuery();

        List<LogEntry> resultList = new ArrayList<LogEntry>();
        if (paginated) {
            //Check start index is a valid one
            if (results.relative(start)) {
                //This is to get cursor to correct position to execute results.next().
                results.previous();
                int i = 0;
                while (results.next() && i < count) {
                    i++;
                    resultList.add(getLogEntry(results));
                }
            } else {
                log.debug("start index doesn't exist in the result set");
            }
            //move the cursor to the last index
            if (results.last()) {
                log.debug("cursor move to the last index of result set");
            } else {
                log.debug("cursor doesn't move to the last index of result set");
            }
            //set row count to the message context.
            PaginationUtils.setRowCount(messageContext, Integer.toString(results.getRow()));

        } else {
            while (results.next()) {
                resultList.add(getLogEntry(results));
            }
            LogEntry[] logEntries = resultList.toArray(new LogEntry[resultList.size()]);
            resultList = Arrays.asList(logEntries);
        }
        return resultList;

    } catch (SQLException e) {

        String msg = "Failed to get logs. " + e.getMessage();
        log.error(msg, e);
        throw new RegistryException(msg, e);
    } finally {
        try {
            try {
                if (results != null) {
                    results.close();
                }
            } finally {
                if (s != null) {
                    s.close();
                }
            }
        } catch (SQLException ex) {
            String msg = RegistryConstants.RESULT_SET_PREPARED_STATEMENT_CLOSE_ERROR;
            log.error(msg, ex);
        }
    }
}

From source file:no.polaric.aprsdb.MyDBSession.java

public DbList<Tracker> getTrackers(String user) throws java.sql.SQLException {
    PreparedStatement stmt = getCon().prepareStatement(
            " SELECT id, alias, icon FROM \"Tracker\"" + " WHERE userid=? ORDER BY id ASC",
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
    stmt.setString(1, user);//from w w  w  . j  av  a 2s.  c  o m
    return new DbList(stmt.executeQuery(), rs -> {
        return new Tracker(_api.getDB(), rs.getString("id"), user, rs.getString("alias"), rs.getString("icon"));
    });
}