Example usage for java.sql ResultSet CONCUR_READ_ONLY

List of usage examples for java.sql ResultSet CONCUR_READ_ONLY

Introduction

In this page you can find the example usage for java.sql ResultSet CONCUR_READ_ONLY.

Prototype

int CONCUR_READ_ONLY

To view the source code for java.sql ResultSet CONCUR_READ_ONLY.

Click Source Link

Document

The constant indicating the concurrency mode for a ResultSet object that may NOT be updated.

Usage

From source file:com.itemanalysis.jmetrik.graph.irt.IrtPlotAnalysis.java

private void summarizeResponseData() throws SQLException {
    this.firePropertyChange("progress-ind-on", null, null);

    Statement stmt = null;//www  . ja v  a2s.com
    ResultSet rs = null;

    //IRT observed score distribution
    NormalDistributionApproximation latentDist = new NormalDistributionApproximation(min, max, points);
    irtDist = new IrtObservedScoreDistribution(itemResponseModels, latentDist);
    irtDist.compute();
    nscores = irtDist.getNumberOfScores();
    eapScore = new double[nscores];
    for (int i = 0; i < nscores; i++) {
        eapScore[i] = irtDist.getEAP(i);
    }

    //Summarize item response vectors
    try {
        int nrow = dao.getRowCount(conn, responseTableName);
        responseVector = new ItemResponseVector[nrow];

        VariableTableName variableTableName = new VariableTableName(responseTableName.toString());
        ArrayList<VariableAttributes> variableAttributes = dao.getSelectedVariables(conn, variableTableName,
                variables);

        //Query the db. Variables include the select items and the grouping variable is one is available.
        Table sqlTable = new Table(responseTableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        for (VariableAttributes v : variableAttributes) {
            select.addColumn(sqlTable, v.getName().nameForDatabase());
        }
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        int i = 0;
        int c = 0;
        int ncol = itemResponseModels.length;
        byte[] rv = null;
        Object response = null;
        ItemResponseVector iVec = null;
        while (rs.next()) {
            c = 0;
            rv = new byte[ncol];

            for (VariableAttributes v : variableAttributes) {
                response = rs.getObject(v.getName().nameForDatabase());
                if ((response == null || response.equals("") || response.equals("NA"))) {
                    rv[c] = -1;//code for omitted responses
                } else {
                    rv[c] = (byte) v.getItemScoring().computeItemScore(response);
                }
                c++;
            }
            iVec = new ItemResponseVector(rv, 1.0);
            responseVector[i] = iVec;
            i++;
        } //end data summary

    } catch (SQLException ex) {
        throw (ex);
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }
}

From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8AccessFrequencyTable.java

public MSSBamAccessFrequencyBuff[] readBuffByUNameIdx(MSSBamAuthorization Authorization, String Name) {
    final String S_ProcName = "readBuffByUNameIdx";
    try {//w  w  w  . j  a va2  s.c  o m
        Connection cnx = schema.getCnx();
        String sql = S_sqlSelectAccessFrequencyBuff + "WHERE " + "afrq.Name = "
                + MSSBamPg8Schema.getQuotedString(Name) + " " + "ORDER BY " + "afrq.Id ASC";
        Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        ResultSet resultSet = stmt.executeQuery(sql);
        List<MSSBamAccessFrequencyBuff> buffList = new ArrayList<MSSBamAccessFrequencyBuff>();
        while (resultSet.next()) {
            MSSBamAccessFrequencyBuff buff = unpackAccessFrequencyResultSetToBuff(resultSet);
            buffList.add(buff);
        }
        return (buffList.toArray(new MSSBamAccessFrequencyBuff[0]));
    } catch (SQLException e) {
        throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e);
    }
}

From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8AttachmentTagTable.java

public MSSBamAttachmentTagBuff readBuffByIdIdx(MSSBamAuthorization Authorization, long AttachmentId,
        long TagId) {
    final String S_ProcName = "readBuffByIdIdx";
    try {//from  www  . jav a2 s.  co m
        Connection cnx = schema.getCnx();
        String sql = S_sqlSelectAttachmentTagBuff + "WHERE " + "attg.AttachmentId = "
                + Long.toString(AttachmentId) + " " + "AND " + "attg.TagId = " + Long.toString(TagId) + " ";
        Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        ResultSet resultSet = stmt.executeQuery(sql);
        if (resultSet.next()) {
            MSSBamAttachmentTagBuff buff = unpackAttachmentTagResultSetToBuff(resultSet);
            if (resultSet.next()) {
                resultSet.last();
                throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName,
                        "Did not expect multi-buff response, " + resultSet.getRow() + " rows selected");
            }
            return (buff);
        } else {
            return (null);
        }
    } catch (SQLException e) {
        throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e);
    }
}

From source file:edu.ku.brc.specify.tools.webportal.BuildSearchIndex.java

/**
 * /*from   ww w . jav  a  2 s  .c om*/
 */
public void index() {
    //    0            1           2              3                4               5      6     7
    // CatalogNumber, CountAmt, StartDate, StationFieldNumber TypeStatusName, FullName, Name, RankID,
    //    8          9            10            11            12          13       14     15       16        17         18           19
    // Latitude1, Longitude1, LocalityName, MaxElevation, MinElevation, FullName, Name, RankID, LastName, FirstName, MiddleInitial, Text1
    //         20              21            22              23           24           25         26          27          28
    //collectionObjectId, DeterminationID, TaxonID, CollectingEventID, LocalityID, GeographyID, AgentID, tx.ParentID, geo.ParentID

    //      0            1              2                3               4           5           6          7               8         9          10        11
    // CatalogNumber, StartDate, StationFieldNumber TypeStatusName, tx.FullName, Latitude1, Longitude1, LocalityName, geo.FullName, LastName, FirstName, MiddleInitial
    //                  0  1   2   3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9   20  1  2   3  4  5  6  7  8
    int[] colToTblId = { 1, 1, 10, 10, 4, 4, 4, 4, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5, 5, 1, 1, 9, 4, 10, 2, 3, 5, 4,
            3 };
    int[] includeCol = { 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0 };

    // Index for ResultSet (which is one more than the array index)
    int idIndex = 20;
    int taxIndex = 23;
    int geoIndex = 26;
    int ceIndex = 24;
    int geoNameIndex = 15;
    int taxNameIndex = 7;
    int collDateIndex = 3;

    int taxParentIndex = 28;
    int geoParentIndex = 29;

    Calendar cal = Calendar.getInstance();

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_30);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM collectionobject");
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        //PreparedStatement pStmt = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]), analyzers[i], true,
                        IndexWriter.MaxFieldLength.LIMITED);
            }

            System.out.println("Total Records: " + totalRecs);

            stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery();
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql);
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            while (rs.next()) {
                String id = rs.getString(idIndex + 1);
                Document doc = new Document();

                doc.add(new Field("id", id.toString(), Field.Store.YES, Field.Index.ANALYZED));

                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);

                int cnt = 0;
                for (int i = 0; i < idIndex; i++) {
                    if (includeCol[i] == 1) {
                        String val = rs.getString(i + 1);
                        if (i == 0) {
                            val = val.replaceFirst("^0+(?!$)", "");
                        }

                        //System.out.println(i+" "+cnt+"  "+md.getColumnName(i+1)+" ["+(StringUtils.isNotEmpty(val) ? val : " ")+"] ");
                        contents.append(StringUtils.isNotEmpty(val) ? val : " ");
                        contents.append('\t');
                        cnt++;
                    }
                }

                indexStr.append(contents);

                Date collDate = rs.getDate(collDateIndex);
                if (collDate != null) {
                    cal.setTime(collDate);
                    String yearStr = Integer.toString(cal.get(Calendar.YEAR));
                    indexStr.append(yearStr);
                    indexStr.append('\t');
                    doc.add(new Field("yr", yearStr, Field.Store.YES, Field.Index.ANALYZED));
                }

                sb.setLength(0);
                for (int i = idIndex; i < colToTblId.length; i++) {
                    //if (i>idIndex) sb.append(',');
                    //sb.append(String.format("%d=%d", colToTblId[i], rs.getInt(i+1)));
                    doc.add(new Field(Integer.toString(colToTblId[i]), Integer.toString(rs.getInt(i + 1)),
                            Field.Store.YES, Field.Index.NOT_ANALYZED));
                }
                doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                //writers[0].addDocument(doc);

                ///////////////////////////////////////////////
                // Catalog Number
                ///////////////////////////////////////////////
                String catNum = rs.getString(1);
                if (StringUtils.isNotEmpty(catNum)) {
                    doc.add(new Field("cn", catNum, Field.Store.YES, Field.Index.ANALYZED));
                }

                ///////////////////////////////////////////////
                // Image Name in Text1
                ///////////////////////////////////////////////
                boolean hasName = false;
                /*try
                {
                int idd = Integer.parseInt(id);
                //pStmt.setInt(1, idd);
                //ResultSet rsp = pStmt.executeQuery();
                ResultSet rsp = stmt3.executeQuery(String.format("SELECT Text1 FROM preparation WHERE CollectionObjectID = %d AND Text1 IS NOT NULL", idd));
                if (rsp.next())
                {
                    String imgName = rsp.getString(1);
                    if (StringUtils.isNotEmpty(imgName))
                    {
                        String nm = FilenameUtils.getName(imgName);
                        doc.add(new Field("im", nm, Field.Store.NO, Field.Index.ANALYZED));
                        contents.append(nm);
                        hasName = true;
                    }
                }
                rsp.close();
                } catch (SQLException e) {e.printStackTrace();}
                */
                if (!hasName) {
                    contents.append(" ");
                }
                contents.append('\t');

                ///////////////////////////////////////////////
                // Collector  (Agent)
                ///////////////////////////////////////////////
                String dataStr = buildStr(rs, sb, 17, 18, 19);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("ag", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Locality 
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 9, 10, 11, 12, 13, 14);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("lc", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[2].addDocument(doc);

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Taxon
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 5, 6);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("tx", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[3].addDocument(doc);

                int taxId = rs.getInt(taxIndex);
                boolean taxOK = !rs.wasNull();
                int taxPId = rs.getInt(taxParentIndex);
                taxOK = taxOK && !rs.wasNull();

                int geoId = rs.getInt(geoIndex);
                boolean geoOK = !rs.wasNull();
                int geoPId = rs.getInt(geoParentIndex);
                geoOK = geoOK && !rs.wasNull();

                int ceId = rs.getInt(ceIndex);
                boolean ceOK = !rs.wasNull();

                if (taxOK) {
                    addHigherTaxa(stmt2, doc, indexStr, taxId, taxPId, rs.getInt(taxNameIndex + 1),
                            rs.getString(taxNameIndex));
                    addAuthor(stmt2, doc, indexStr, taxId);
                }

                if (geoOK) {
                    addCountry(stmt2, doc, indexStr, geoId, geoPId, rs.getInt(geoNameIndex + 1),
                            rs.getString(geoNameIndex));
                }

                if (ceOK) {
                    addHost(stmt2, doc, indexStr, ceId);
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                doc.add(new Field("cs", indexStr.toString(), Field.Store.NO, Field.Index.ANALYZED));
                doc.add(new Field("contents", contents.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    writers[0].optimize();
                }
            }
            rs.close();

        } catch (SQLException sqlex) {
            sqlex.printStackTrace();

        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("IOException adding Lucene Document: " + e.getMessage());

        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

        }

    } catch (IOException e) {
        e.printStackTrace();

        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (CorruptIndexException e) {
                e.printStackTrace();

            } catch (IOException e) {
                e.printStackTrace();
            }
            writer = null;
        }

        long endTime = System.currentTimeMillis();
        System.out.println("Time: " + (endTime - startTime) / 1000);
    }
}

From source file:com.linkedin.pinot.integration.tests.BaseClusterIntegrationTest.java

/**
 * Run equivalent Pinot and H2 query and compare the results.
 * <p>LIMITATIONS:/*w w  w .  ja va 2 s.  c  om*/
 * <ul>
 *   <li>Skip comparison for selection and aggregation group-by when H2 results are too large to exhaust.</li>
 *   <li>Do not examine the order of result records.</li>
 * </ul>
 *
 * @param pqlQuery Pinot PQL query.
 * @param sqlQueries H2 SQL queries.
 * @throws Exception
 */
protected void runQuery(String pqlQuery, @Nullable List<String> sqlQueries) throws Exception {
    try {
        _queryCount++;

        // Run the query.
        // TODO Use Pinot client API for this
        JSONObject response = postQuery(pqlQuery);

        // Check exceptions.
        JSONArray exceptions = response.getJSONArray("exceptions");
        if (exceptions.length() > 0) {
            String failureMessage = "Got exceptions: " + exceptions;
            failure(pqlQuery, sqlQueries, failureMessage, null);
            return;
        }

        // Check total docs.
        long numTotalDocs = response.getLong("totalDocs");
        if (numTotalDocs != TOTAL_DOCS) {
            String failureMessage = "Number of total documents does not match, expected: " + TOTAL_DOCS
                    + ", got: " + numTotalDocs;
            failure(pqlQuery, sqlQueries, failureMessage, null);
            return;
        }

        // Skip comparison if SQL queries not specified.
        if (sqlQueries == null) {
            return;
        }

        // Check results.
        Statement h2statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY,
                ResultSet.CONCUR_READ_ONLY);
        int numDocsScanned = response.getInt("numDocsScanned");
        if (response.has("aggregationResults")) {
            // Aggregation and Group-by results.

            // Get results type.
            JSONArray aggregationResultsArray = response.getJSONArray("aggregationResults");
            int numAggregationResults = aggregationResultsArray.length();
            int numSqlQueries = sqlQueries.size();
            if (numAggregationResults != numSqlQueries) {
                String failureMessage = "Number of aggregation results: " + numAggregationResults
                        + " does not match number of SQL queries: " + numSqlQueries;
                failure(pqlQuery, sqlQueries, failureMessage);
            }
            JSONObject firstAggregationResult = aggregationResultsArray.getJSONObject(0);

            if (firstAggregationResult.has("value")) {
                // Aggregation results.

                // Check over all aggregation functions.
                for (int i = 0; i < numAggregationResults; i++) {
                    // Get expected value for the aggregation.
                    h2statement.execute(sqlQueries.get(i));
                    ResultSet sqlResultSet = h2statement.getResultSet();
                    sqlResultSet.first();
                    String sqlValue = sqlResultSet.getString(1);

                    // If SQL value is null, it means no record selected in H2.
                    if (sqlValue == null) {
                        // Check number of documents scanned is 0.
                        if (numDocsScanned != 0) {
                            String failureMessage = "No record selected in H2 but number of records selected in Pinot: "
                                    + numDocsScanned;
                            failure(pqlQuery, sqlQueries, failureMessage);
                        }
                        // Skip further comparison.
                        return;
                    }

                    // Get actual value for the aggregation.
                    String pqlValue = aggregationResultsArray.getJSONObject(i).getString("value");

                    // Fuzzy compare expected value and actual value.
                    double expectedValue = Double.parseDouble(sqlValue);
                    double actualValue = Double.parseDouble(pqlValue);
                    if (Math.abs(actualValue - expectedValue) >= 1.0) {
                        String failureMessage = "Value: " + i + " does not match, expected: " + sqlValue
                                + ", got: " + pqlValue;
                        failure(pqlQuery, sqlQueries, failureMessage);
                        return;
                    }
                }
            } else if (firstAggregationResult.has("groupByResult")) {
                // Group-by results.

                // Get number of groups and number of group keys in each group.
                JSONArray firstGroupByResults = aggregationResultsArray.getJSONObject(0)
                        .getJSONArray("groupByResult");
                int numGroups = firstGroupByResults.length();
                // If no group-by result returned by Pinot, set numGroupKeys to 0 since no comparison needed.
                int numGroupKeys;
                if (numGroups == 0) {
                    numGroupKeys = 0;
                } else {
                    numGroupKeys = firstGroupByResults.getJSONObject(0).getJSONArray("group").length();
                }

                // Check over all aggregation functions.
                for (int i = 0; i < numAggregationResults; i++) {
                    // Get number of group keys.
                    JSONArray groupByResults = aggregationResultsArray.getJSONObject(i)
                            .getJSONArray("groupByResult");

                    // Construct expected result map from group keys to value.
                    h2statement.execute(sqlQueries.get(i));
                    ResultSet sqlResultSet = h2statement.getResultSet();
                    Map<String, String> expectedValues = new HashMap<>();
                    int sqlNumGroups;
                    for (sqlNumGroups = 0; sqlResultSet.next()
                            && sqlNumGroups < MAX_COMPARISON_LIMIT; sqlNumGroups++) {
                        if (numGroupKeys != 0) {
                            StringBuilder groupKey = new StringBuilder();
                            for (int groupKeyIndex = 1; groupKeyIndex <= numGroupKeys; groupKeyIndex++) {
                                // Convert boolean value to lower case.
                                groupKey.append(
                                        convertBooleanToLowerCase(sqlResultSet.getString(groupKeyIndex)))
                                        .append(' ');
                            }
                            expectedValues.put(groupKey.toString(), sqlResultSet.getString(numGroupKeys + 1));
                        }
                    }

                    if (sqlNumGroups == 0) {
                        // No record selected in H2.

                        // Check if no record selected in Pinot.
                        if (numGroups != 0) {
                            String failureMessage = "No group returned in H2 but number of groups returned in Pinot: "
                                    + numGroups;
                            failure(pqlQuery, sqlQueries, failureMessage);
                            return;
                        }

                        // Check if number of documents scanned is 0.
                        if (numDocsScanned != 0) {
                            String failureMessage = "No group returned in Pinot but number of records selected: "
                                    + numDocsScanned;
                            failure(pqlQuery, sqlQueries, failureMessage);
                        }

                        // Skip further comparison.
                        return;
                    } else if (sqlNumGroups < MAX_COMPARISON_LIMIT) {
                        // Only compare exhausted results.

                        // Check that all Pinot results are contained in the H2 results.
                        for (int resultIndex = 0; resultIndex < numGroups; resultIndex++) {
                            // Fetch Pinot group keys.
                            JSONObject groupByResult = groupByResults.getJSONObject(resultIndex);
                            JSONArray group = groupByResult.getJSONArray("group");
                            StringBuilder groupKeyBuilder = new StringBuilder();
                            for (int groupKeyIndex = 0; groupKeyIndex < numGroupKeys; groupKeyIndex++) {
                                groupKeyBuilder.append(group.getString(groupKeyIndex)).append(' ');
                            }
                            String groupKey = groupKeyBuilder.toString();

                            // Check if Pinot group keys contained in H2 results.
                            if (!expectedValues.containsKey(groupKey)) {
                                String failureMessage = "Group returned in Pinot but not in H2: " + groupKey;
                                failure(pqlQuery, sqlQueries, failureMessage);
                                return;
                            }

                            // Fuzzy compare expected value and actual value.
                            String sqlValue = expectedValues.get(groupKey);
                            String pqlValue = groupByResult.getString("value");
                            double expectedValue = Double.parseDouble(sqlValue);
                            double actualValue = Double.parseDouble(pqlValue);
                            if (Math.abs(actualValue - expectedValue) >= 1.0) {
                                String failureMessage = "Value: " + i + " does not match, expected: " + sqlValue
                                        + ", got: " + pqlValue + ", for group: " + groupKey;
                                failure(pqlQuery, sqlQueries, failureMessage);
                                return;
                            }
                        }
                    } else {
                        // Cannot get exhausted results.

                        // Skip further comparison.
                        LOGGER.debug("SQL: {} returned at least {} rows, skipping comparison.",
                                sqlQueries.get(0), MAX_COMPARISON_LIMIT);
                        return;
                    }
                }
            } else {
                // Neither aggregation or group-by results.
                String failureMessage = "Inside aggregation results, no aggregation or group-by results found";
                failure(pqlQuery, sqlQueries, failureMessage);
            }
        } else if (response.has("selectionResults")) {
            // Selection results.

            // Construct expected result set.
            h2statement.execute(sqlQueries.get(0));
            ResultSet sqlResultSet = h2statement.getResultSet();
            ResultSetMetaData sqlMetaData = sqlResultSet.getMetaData();

            Set<String> expectedValues = new HashSet<>();
            Map<String, String> reusableExpectedValueMap = new HashMap<>();
            Map<String, List<String>> reusableMultiValuesMap = new HashMap<>();
            List<String> reusableColumnOrder = new ArrayList<>();
            int numResults;
            for (numResults = 0; sqlResultSet.next() && numResults < MAX_COMPARISON_LIMIT; numResults++) {
                reusableExpectedValueMap.clear();
                reusableMultiValuesMap.clear();
                reusableColumnOrder.clear();

                int numColumns = sqlMetaData.getColumnCount();
                for (int i = 1; i <= numColumns; i++) {
                    String columnName = sqlMetaData.getColumnName(i);

                    // Handle null result and convert boolean value to lower case.
                    String columnValue = sqlResultSet.getString(i);
                    if (columnValue == null) {
                        columnValue = "null";
                    } else {
                        columnValue = convertBooleanToLowerCase(columnValue);
                    }

                    // Handle multi-value columns.
                    int length = columnName.length();
                    if (length > 5 && columnName.substring(length - 5, length - 1).equals("__MV")) {
                        // Multi-value column.
                        String multiValueColumnName = columnName.substring(0, length - 5);
                        List<String> multiValue = reusableMultiValuesMap.get(multiValueColumnName);
                        if (multiValue == null) {
                            multiValue = new ArrayList<>();
                            reusableMultiValuesMap.put(multiValueColumnName, multiValue);
                            reusableColumnOrder.add(multiValueColumnName);
                        }
                        multiValue.add(columnValue);
                    } else {
                        // Single-value column.
                        reusableExpectedValueMap.put(columnName, columnValue);
                        reusableColumnOrder.add(columnName);
                    }
                }

                // Add multi-value column results to the expected values.
                // The reason for this step is that Pinot does not maintain order of elements in multi-value columns.
                for (Map.Entry<String, List<String>> entry : reusableMultiValuesMap.entrySet()) {
                    List<String> multiValue = entry.getValue();
                    Collections.sort(multiValue);
                    reusableExpectedValueMap.put(entry.getKey(), multiValue.toString());
                }

                // Build expected value String.
                StringBuilder expectedValue = new StringBuilder();
                for (String column : reusableColumnOrder) {
                    expectedValue.append(column).append(':').append(reusableExpectedValueMap.get(column))
                            .append(' ');
                }

                expectedValues.add(expectedValue.toString());
            }

            JSONObject selectionColumnsAndResults = response.getJSONObject("selectionResults");
            JSONArray selectionColumns = selectionColumnsAndResults.getJSONArray("columns");
            JSONArray selectionResults = selectionColumnsAndResults.getJSONArray("results");
            int numSelectionResults = selectionResults.length();

            if (numResults == 0) {
                // No record selected in H2.

                // Check if no record selected in Pinot.
                if (numSelectionResults != 0) {
                    String failureMessage = "No record selected in H2 but number of records selected in Pinot: "
                            + numSelectionResults;
                    failure(pqlQuery, sqlQueries, failureMessage);
                    return;
                }

                // Check if number of documents scanned is 0.
                if (numDocsScanned != 0) {
                    String failureMessage = "No selection result returned in Pinot but number of records selected: "
                            + numDocsScanned;
                    failure(pqlQuery, sqlQueries, failureMessage);
                }
            } else if (numResults < MAX_COMPARISON_LIMIT) {
                // Only compare exhausted results.

                // Check that Pinot results are contained in the H2 results.
                int numColumns = selectionColumns.length();
                for (int i = 0; i < numSelectionResults; i++) {
                    // Build actual value String.
                    StringBuilder actualValueBuilder = new StringBuilder();
                    JSONArray selectionResult = selectionResults.getJSONArray(i);

                    for (int columnIndex = 0; columnIndex < numColumns; columnIndex++) {
                        // Convert column name to all uppercase to make it compatible with H2.
                        String columnName = selectionColumns.getString(columnIndex).toUpperCase();

                        Object columnResult = selectionResult.get(columnIndex);
                        if (columnResult instanceof JSONArray) {
                            // Multi-value column.
                            JSONArray columnResultsArray = (JSONArray) columnResult;
                            List<String> multiValue = new ArrayList<>();
                            int length = columnResultsArray.length();
                            for (int elementIndex = 0; elementIndex < length; elementIndex++) {
                                multiValue.add(columnResultsArray.getString(elementIndex));
                            }
                            for (int elementIndex = length; elementIndex < MAX_ELEMENTS_IN_MULTI_VALUE; elementIndex++) {
                                multiValue.add("null");
                            }
                            Collections.sort(multiValue);
                            actualValueBuilder.append(columnName).append(':').append(multiValue.toString())
                                    .append(' ');
                        } else {
                            // Single-value column.
                            actualValueBuilder.append(columnName).append(':').append((String) columnResult)
                                    .append(' ');
                        }
                    }
                    String actualValue = actualValueBuilder.toString();

                    // Check actual value in expected values set.
                    if (!expectedValues.contains(actualValue)) {
                        String failureMessage = "Selection result returned in Pinot but not in H2: "
                                + actualValue;
                        failure(pqlQuery, sqlQueries, failureMessage);
                        return;
                    }
                }
            } else {
                // Cannot get exhausted results.
                LOGGER.debug("SQL: {} returned at least {} rows, skipping comparison.", sqlQueries.get(0),
                        MAX_COMPARISON_LIMIT);
            }
        } else {
            // Neither aggregation or selection results.
            String failureMessage = "No aggregation or selection results found for query: " + pqlQuery;
            failure(pqlQuery, sqlQueries, failureMessage);
        }
    } catch (Exception e) {
        String failureMessage = "Caught exception while running query.";
        failure(pqlQuery, sqlQueries, failureMessage, e);
    }
}

From source file:edu.ku.brc.specify.conversion.StratToGTP.java

/**
 * @throws SQLException/*from   www .  j  a va 2  s.  c o m*/
 */
public void convertStratToGTP() throws SQLException {
    Statement stmt = null;
    ResultSet rs = null;

    try {
        // get a Hibernate session for saving the new records
        Session localSession = HibernateUtil.getCurrentSession();
        HibernateUtil.beginTransaction();

        int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM stratigraphy");
        if (count < 1)
            return;

        if (hasFrame) {
            setProcess(0, count);
        }

        IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod",
                "GeologicTimePeriodID");

        Hashtable<Integer, Integer> ceToNewStratIdHash = new Hashtable<Integer, Integer>();

        IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID");

        // get all of the old records
        //  Future GTP                           Period        Epoch       EML        Age    EML(age)    Text1   Text2     Remarks
        String sql = "SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed,    s.Text1, s.Text2,  s.Remarks FROM stratigraphy s ORDER BY s.StratigraphyID";

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(sql);

        int counter = 0;
        // for each old record, convert the record
        while (rs.next()) {
            if (counter % 500 == 0) {
                if (hasFrame) {
                    setProcess(counter);

                } else {
                    log.info("Converted " + counter + " Stratigraphy records");
                }
            }

            // grab the important data fields from the old record
            int oldStratId = rs.getInt(1);
            String period = rs.getString(2);
            String epoch = rs.getString(3);
            String eml = rs.getString(4);
            String age = rs.getString(5);
            String emlAge = rs.getString(6);

            String text1 = rs.getString(7);
            String text2 = rs.getString(8);
            String remarks = rs.getString(9);

            if (StringUtils.isNotEmpty(text2) && text2.length() > 128) {
                remarks += "; " + text2;
                text2 = text2.substring(0, 128);
            }

            if (StringUtils.isNotEmpty(eml)) {
                if (StringUtils.isNotEmpty(epoch)) {
                    epoch += ' ' + eml;

                } else {
                    epoch = eml;
                }
            }

            if (StringUtils.isEmpty(epoch)) {
                epoch = "(Empty)";
            }

            // create a new Geography object from the old data
            GeologicTimePeriod newStrat = convertOldStratRecord(localSession, eraNode, text1, text2, remarks,
                    period, epoch, eml, age, emlAge);

            counter++;

            // Map Old GeologicTimePeriod ID to the new Tree Id
            gtpIdMapper.put(oldStratId, newStrat.getGeologicTimePeriodId());

            // Convert Old CEId to new CEId, then map the new CEId -> new StratId
            Integer ceId = ceMapper.get(oldStratId);
            if (ceId != null) {
                ceToNewStratIdHash.put(ceId, newStrat.getGeologicTimePeriodId());
            } else {
                String msg = String.format("No CE mapping for Old StratId %d, when they are a one-to-one.",
                        oldStratId);
                tblWriter.logError(msg);
                log.error(msg);
            }
        }
        stmt.close();

        if (hasFrame) {
            setProcess(counter);

        } else {
            log.info("Converted " + counter + " Stratigraphy records");
        }

        TreeHelper.fixFullnameForNodeAndDescendants(eraNode);
        eraNode.setNodeNumber(1);
        fixNodeNumbersFromRoot(eraNode);
        rs.close();

        HibernateUtil.commitTransaction();
        log.info("Converted " + counter + " Stratigraphy records");

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    // Now in this Step we Add the PaleoContext to the Collecting Events

}

From source file:dk.netarkivet.common.utils.DBUtils.java

/**
 * Prepare a statement for iteration given a query string, fetch size
 * and some args./*from   w w  w. j  a v  a 2s  . c o m*/
 *
 * NB: the provided connection is not closed.
 *
 * @param c a Database connection
 * @param fetchSize hint to JDBC driver on number of results to cache
 * @param query a query string  (must not be null or empty)
 * @param args some args to insert into this query string (must not be null)
 * @return a prepared statement
 * @throws SQLException If unable to prepare a statement
 * @throws ArgumentNotValid If unable to handle type of one the args, or
 * the arguments are either null or an empty String.
 */
public static PreparedStatement prepareStatement(Connection c, int fetchSize, String query, Object... args)
        throws SQLException {
    ArgumentNotValid.checkNotNull(c, "Connection c");
    ArgumentNotValid.checkPositive(fetchSize, "int fetchSize");
    ArgumentNotValid.checkNotNullOrEmpty(query, "String query");
    ArgumentNotValid.checkNotNull(args, "Object... args");
    c.setAutoCommit(false);
    PreparedStatement s = c.prepareStatement(query, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    s.setFetchSize(fetchSize);
    int i = 1;
    for (Object arg : args) {
        if (arg instanceof String) {
            s.setString(i, (String) arg);
        } else if (arg instanceof Integer) {
            s.setInt(i, (Integer) arg);
        } else if (arg instanceof Long) {
            s.setLong(i, (Long) arg);
        } else if (arg instanceof Boolean) {
            s.setBoolean(i, (Boolean) arg);
        } else if (arg instanceof Date) {
            s.setTimestamp(i, new Timestamp(((Date) arg).getTime()));
        } else {
            throw new ArgumentNotValid("Cannot handle type '" + arg.getClass().getName()
                    + "'. We can only handle string, " + "int, long, date or boolean args for query: " + query);
        }
        i++;
    }
    return s;
}

From source file:rems.Global.java

public static ResultSet selectDataNoParams(String selSql) {
    ResultSet selDtSt = null;/*from w  w  w  . j  a v  a  2  s .  c  om*/
    Statement stmt = null;
    try {
        Connection mycon = null;
        Class.forName("org.postgresql.Driver");
        mycon = DriverManager.getConnection(Global.connStr, Global.Uname, Global.Pswd);
        mycon.setAutoCommit(false);
        //System.out.println("Opened database successfully");

        stmt = mycon.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        selDtSt = stmt.executeQuery(selSql);
        //stmt.close();
        //mycon.close();
        return selDtSt;
    } catch (Exception ex) {
        Global.errorLog = selSql + "\r\n" + ex.getMessage();
        Global.writeToLog();
        return selDtSt;
    } finally {
    }
}

From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8AtomDefTable.java

public void createAtomDef(MSSBamAuthorization Authorization, MSSBamAtomDefBuff Buff) {
    final String S_ProcName = "createAtomDef ";
    try {/*from   www.  ja  v a  2  s.co m*/
        Connection cnx = schema.getCnx();
        long Id = Buff.getRequiredId();
        String DbName = Buff.getOptionalDbName();
        String sql = "INSERT INTO mssbam110.atom_def( " + "id, " + "dbname" + " )" + "VALUES ( " + Id + ", "
                + MSSBamPg8Schema.getQuotedString(DbName) + " )";
        Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        int rowsAffected = stmt.executeUpdate(sql);
        if (rowsAffected != 1) {
            throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName,
                    "Expected 1 row to be affected by insert, not " + rowsAffected);
        }
    } catch (SQLException e) {
        throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e);
    }
}

From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java

public void evaluateDIF() throws SQLException {
    Statement stmt = null;//from ww w.  ja  va  2s.c  om
    ResultSet rs = null;

    //create focal map
    focalRegression = new TreeMap<VariableAttributes, KernelRegressionItem>();
    for (VariableAttributes v : variables) {
        KernelRegressionItem kItem = new KernelRegressionItem(v, kernelFunction, bandwidth,
                uniformDistributionApproximation);
        focalRegression.put(v, kItem);
    }

    //create reference map
    if (hasGroupVariable) {
        referenceRegression = new TreeMap<VariableAttributes, KernelRegressionItem>();
        for (VariableAttributes v : variables) {
            KernelRegressionItem kItem = new KernelRegressionItem(v, kernelFunction, bandwidth,
                    uniformDistributionApproximation);
            referenceRegression.put(v, kItem);
        }
    }

    //determine whether group variable is double or not
    boolean groupVariableIsDouble = false;
    if (groupByVariable.getType().getDataType() == DataType.DOUBLE)
        groupVariableIsDouble = true;

    try {
        //connect to db
        Table sqlTable = new Table(tableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        for (VariableAttributes v : variables) {
            select.addColumn(sqlTable, v.getName().nameForDatabase());
        }
        select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase());
        select.addColumn(sqlTable, groupByVariable.getName().nameForDatabase());

        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        KernelRegressionItem kernelRegressionItem;
        Object itemResponse;
        Double score;
        Object tempGroup;
        String group;

        //analyze by groups
        while (rs.next()) {
            tempGroup = rs.getObject(groupByVariable.getName().nameForDatabase());
            if (tempGroup == null) {
                group = "";//will not be counted if does not match focal or reference code
            } else {
                if (groupVariableIsDouble) {
                    group = Double.valueOf((Double) tempGroup).toString();
                } else {
                    group = ((String) tempGroup).trim();
                }
            }

            //get independent variable value
            //omit examinees with missing data
            //examinees with missing group code omitted
            score = rs.getDouble(regressorVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                if (focalCode.equals(group)) {
                    for (VariableAttributes v : focalRegression.keySet()) {
                        kernelRegressionItem = focalRegression.get(v);
                        itemResponse = rs.getObject(v.getName().nameForDatabase());
                        if (itemResponse != null)
                            kernelRegressionItem.increment(score, itemResponse);
                    }
                } else if (referenceCode.equals(group)) {
                    for (VariableAttributes v : referenceRegression.keySet()) {
                        kernelRegressionItem = referenceRegression.get(v);
                        itemResponse = rs.getObject(v.getName().nameForDatabase());
                        if (itemResponse != null)
                            kernelRegressionItem.increment(score, itemResponse);
                    }
                }
            }
            updateProgress();
        }
    } catch (SQLException ex) {
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

    this.firePropertyChange("progress-ind-on", null, null);
}