Example usage for java.sql Statement setFetchSize

List of usage examples for java.sql Statement setFetchSize

Introduction

In this page you can find the example usage for java.sql Statement setFetchSize.

Prototype

void setFetchSize(int rows) throws SQLException;

Source Link

Document

Gives the JDBC driver a hint as to the number of rows that should be fetched from the database when more rows are needed for ResultSet objects generated by this Statement.

Usage

From source file:edu.ku.brc.specify.toycode.mexconabio.CopyFromGBIF.java

/**
 * /*from  w ww.ja  v a  2 s. c  o  m*/
 */
public void process() {
    boolean doQueryForCollNum = true;

    String pSQL = "INSERT INTO raw (old_id,data_provider_id,data_resource_id,resource_access_point_id, institution_code, collection_code, "
            + "catalogue_number, scientific_name, author, rank, kingdom, phylum, class, order_rank, family, genus, species, subspecies, latitude, longitude,  "
            + "lat_long_precision, max_altitude, min_altitude, altitude_precision, min_depth, max_depth, depth_precision, continent_ocean, country, state_province, county, collector_name, "
            + "locality,year, month, day, basis_of_record, identifier_name, identification_date,unit_qualifier, created, modified, deleted, collector_num) "
            + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";

    String gbifSQLBase = "SELECT r.id, r.data_provider_id, r.data_resource_id, r.resource_access_point_id, r.institution_code, r.collection_code, "
            + "r.catalogue_number, r.scientific_name, r.author, r.rank, r.kingdom, r.phylum, r.class, r.order_rank, r.family, r.genus, r.species, r.subspecies, "
            + "r.latitude, r.longitude, r.lat_long_precision, r.max_altitude, r.min_altitude, r.altitude_precision, r.min_depth, r.max_depth, r.depth_precision, "
            + "r.continent_ocean, r.country, r.state_province, r.county, r.collector_name, r.locality, r.year, r.month, r.day, r.basis_of_record, r.identifier_name, "
            + "r.identification_date, r.unit_qualifier, r.created, r.modified, r.deleted";

    String gbifSQL;
    if (doQueryForCollNum) {
        gbifSQL = gbifSQLBase + " FROM raw_occurrence_record r";
    } else {
        gbifSQL = gbifSQLBase
                + ", i.identifier FROM raw_occurrence_record r, identifier_record i WHERE r.id = i.occurrence_id AND i.identifier_type = 3";
    }

    BasicSQLUtils.update(srcDBConn, "DELETE FROM raw WHERE id > 0");

    long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM raw_occurrence_record");
    long procRecs = 0;
    long startTime = System.currentTimeMillis();
    int secsThreshold = 0;

    PrintWriter pw = null;

    final double HRS = 1000.0 * 60.0 * 60.0;

    Statement gStmt = null;
    PreparedStatement pStmt = null;
    PreparedStatement stmt = null;

    try {
        pw = new PrintWriter("gbif.log");

        pStmt = srcDBConn.prepareStatement(pSQL);

        stmt = dbConn2.prepareStatement(
                "SELECT identifier FROM identifier_record WHERE occurrence_id = ? AND identifier_type = 3");
        //stmt.setFetchSize(Integer.MIN_VALUE);

        System.out.println("Total Records: " + totalRecs);
        pw.println("Total Records: " + totalRecs);

        gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        gStmt.setFetchSize(Integer.MIN_VALUE);

        String fullSQL = gbifSQL;
        System.out.println(fullSQL);

        ResultSet gRS = gStmt.executeQuery(fullSQL);
        ResultSetMetaData rsmd = gRS.getMetaData();
        int lastColInx = rsmd.getColumnCount() + (doQueryForCollNum ? 1 : 0);

        while (gRS.next()) {
            int id = gRS.getInt(1);
            pStmt.setObject(1, id);

            for (int i = 2; i <= rsmd.getColumnCount(); i++) {
                Object obj = gRS.getObject(i);
                pStmt.setObject(i, obj);
            }

            String collNum = null;
            if (doQueryForCollNum) {
                //String tmpSQL = String.format("SELECT identifier FROM identifier_record WHERE occurrence_id = %d AND identifier_type = 3", id);
                //System.out.println(tmpSQL);
                stmt.setInt(1, id);
                ResultSet rs = stmt.executeQuery();
                if (rs.next()) {
                    collNum = rs.getString(1);
                }
                rs.close();
            } else {
                collNum = gRS.getString(lastColInx - 1);
            }

            if (StringUtils.isNotEmpty(collNum)) {
                if (collNum.length() < 256) {
                    pStmt.setString(lastColInx, collNum);

                } else {
                    pStmt.setString(lastColInx, collNum.substring(0, 255));
                }
            } else {
                pStmt.setObject(lastColInx, null);
            }

            try {
                pStmt.executeUpdate();

            } catch (Exception ex) {
                System.err.println("For ID[" + gRS.getObject(1) + "]");
                ex.printStackTrace();
                pw.print("For ID[" + gRS.getObject(1) + "] " + ex.getMessage());
                pw.flush();
            }

            procRecs++;
            if (procRecs % 10000 == 0) {
                long endTime = System.currentTimeMillis();
                long elapsedTime = endTime - startTime;

                double avergeTime = (double) elapsedTime / (double) procRecs;

                double hrsLeft = (((double) elapsedTime / (double) procRecs) * (double) totalRecs - procRecs)
                        / HRS;

                int seconds = (int) (elapsedTime / 60000.0);
                if (secsThreshold != seconds) {
                    secsThreshold = seconds;

                    String msg = String.format(
                            "Elapsed %8.2f hr.mn   Ave Time: %5.2f    Percent: %6.3f  Hours Left: %8.2f ",
                            ((double) (elapsedTime)) / HRS, avergeTime,
                            100.0 * ((double) procRecs / (double) totalRecs), hrsLeft);
                    System.out.println(msg);
                    pw.println(msg);
                    pw.flush();
                }
            }
        }

    } catch (Exception ex) {
        ex.printStackTrace();

    } finally {
        try {
            if (gStmt != null) {
                gStmt.close();
            }
            if (pStmt != null) {
                pStmt.close();
            }
            if (stmt != null) {
                stmt.close();
            }
            pw.close();

        } catch (Exception ex) {

        }
    }
    System.out.println("Done transferring.");
    pw.println("Done transferring.");

    /*
    int     count = 0;
    boolean cont = true;
    while (cont)
    {
    long start = System.currentTimeMillis();
            
    Statement         gStmt = null;
    PreparedStatement pStmt = null;
            
    try
    {
        gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY,ResultSet.CONCUR_READ_ONLY);
                
        String fullSQL = gbifSQL + String.format(" AND r.id >= %d AND r.id < %d", count, count+recordStep);
        //System.out.println(fullSQL);
                
        int               cnt  = 0;
        ResultSet         rs   = gStmt.executeQuery(fullSQL);
        ResultSetMetaData rsmd = rs.getMetaData();
                
        //System.out.println("Done with query.");
                
        pStmt = srcDBConn.prepareStatement(pSQL);
        count += recordStep;
                
        while (rs.next())
        {
            Integer id  = rs.getInt(1);
            pStmt.setInt(1, id);
                    
            for (int i=2;i<=rsmd.getColumnCount();i++)
            {
                Object obj = rs.getObject(i);
                pStmt.setObject(i, obj);
            }
                    
            pStmt.executeUpdate();
                
            cnt++;
            procRecs++;
        }
        rs.close();
                
        if (count == 0)
        {
            break;
        }
                
    } catch (Exception ex)
    {
        ex.printStackTrace();
                
    } finally 
    {
        try
        {
            if (gStmt != null)
            {
                gStmt.close();
            }
            if (pStmt != null)
            {
                pStmt.close();
            }
        } catch (Exception ex)
        {
                    
        }
    }
            
    long endTime   = System.currentTimeMillis();
    long deltaTime = endTime - start;
               
    long elapsedTime = endTime - startTime;
            
    double avergeTime = (double)elapsedTime / (double)procRecs;
            
    double hrsLeft = (((double)procRecs / (double)elapsedTime) * (double)totalRecs)  / 3600000.0;
            
    int seconds = (int)(elapsedTime / 60000.0);
    if (secsThreshold != seconds)
    {
        secsThreshold = seconds;
                
        System.out.println(String.format("Elapsed %8.2f hr.mn   Time: %5.2f  Ave Time: %5.2f    Percent: %6.3f  Hours Left: Elapsed %8.2f ", 
                ((double)(elapsedTime)) / 3600000.0, 
                ((double)(deltaTime)) / 1000.0, 
                avergeTime,
                100.0 * ((double)procRecs / (double)totalRecs),
                hrsLeft));
    }
    }
    System.out.println("Done transferring.");*/

    /*Statement uStmt = null;
    try
    {
    uStmt = srcDBConn.createStatement();
    int rv = uStmt.executeUpdate("ALTER TABLE raw ADD FULLTEXT(catalogue_number, genus, species, subspecies, collector_num)");
            
    System.out.println("Indexing rv = "+rv);
            
    } catch (Exception ex)
    {
    ex.printStackTrace();
            
    } finally 
    {
    try
    {
        if (uStmt != null)
        {
            uStmt.close();
        }
    } catch (Exception ex)
    {
        ex.printStackTrace();
    }
    }
    System.out.println("Done Indexing.");*/
}

From source file:lib.JdbcTemplate.java

/**
 * Prepare the given JDBC Statement (or PreparedStatement or CallableStatement),
 * applying statement settings such as fetch size, max rows, and query timeout.
 * @param stmt the JDBC Statement to prepare
 * @throws SQLException if thrown by JDBC API
 * @see #setFetchSize/*from  ww  w .  j  a  v a2 s.  co m*/
 * @see #setMaxRows
 * @see #setQueryTimeout
 * @see org.springframework.jdbc.datasource.DataSourceUtils#applyTransactionTimeout
 */
protected void applyStatementSettings(Statement stmt) throws SQLException {
    int fetchSize = getFetchSize();
    if (fetchSize >= 0) {
        stmt.setFetchSize(fetchSize);
    }
    int maxRows = getMaxRows();
    if (maxRows >= 0) {
        stmt.setMaxRows(maxRows);
    }
    DataSourceUtils.applyTimeout(stmt, getDataSource(), getQueryTimeout());
}

From source file:edu.ku.brc.specify.toycode.BuildSearchIndex.java

/**
 * //from w  ww .j  a v a 2  s.  c om
 */
public void index() {
    //    0            1           2              3                4               5      6     7
    // CatalogNumber, CountAmt, StartDate, StationFieldNumber TypeStatusName, FullName, Name, RankID,
    //    8          9            10            11            12          13       14     15       16        17         18           19
    // Latitude1, Longitude1, LocalityName, MaxElevation, MinElevation, FullName, Name, RankID, LastName, FirstName, MiddleInitial, Text1
    //         20              21            22              23           24           25         26          27          28
    //collectionObjectId, DeterminationID, TaxonID, CollectingEventID, LocalityID, GeographyID, AgentID, tx.ParentID, geo.ParentID

    //      0            1              2                3               4           5           6          7               8         9          10        11
    // CatalogNumber, StartDate, StationFieldNumber TypeStatusName, tx.FullName, Latitude1, Longitude1, LocalityName, geo.FullName, LastName, FirstName, MiddleInitial
    //                  0  1   2   3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9   20  1  2   3  4  5  6  7  8
    int[] colToTblId = { 1, 1, 10, 10, 4, 4, 4, 4, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5, 5, 1, 1, 9, 4, 10, 2, 3, 5, 4,
            3 };
    int[] includeCol = { 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0 };

    // Index for ResultSet (which is one more than the array index)
    int idIndex = 20;
    int taxIndex = 23;
    int geoIndex = 26;
    int ceIndex = 24;
    int geoNameIndex = 15;
    int taxNameIndex = 7;
    int collDateIndex = 3;

    int taxParentIndex = 28;
    int geoParentIndex = 29;

    Calendar cal = Calendar.getInstance();

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_30);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM collectionobject");
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        //PreparedStatement pStmt = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]), analyzers[i], true,
                        IndexWriter.MaxFieldLength.LIMITED);
            }

            System.out.println("Total Records: " + totalRecs);

            stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery();
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql);
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            while (rs.next()) {
                String id = rs.getString(idIndex + 1);
                Document doc = new Document();

                doc.add(new Field("id", id.toString(), Field.Store.YES, Field.Index.ANALYZED));

                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);

                int cnt = 0;
                for (int i = 0; i < idIndex; i++) {
                    if (includeCol[i] == 1) {
                        String val = rs.getString(i + 1);
                        if (i == 0) {
                            val = val.replaceFirst("^0+(?!$)", "");
                        }

                        //System.out.println(i+" "+cnt+"  "+md.getColumnName(i+1)+" ["+(StringUtils.isNotEmpty(val) ? val : " ")+"] ");
                        contents.append(StringUtils.isNotEmpty(val) ? val : " ");
                        contents.append('\t');
                        cnt++;
                    }
                }

                indexStr.append(contents);

                Date collDate = rs.getDate(collDateIndex);
                if (collDate != null) {
                    cal.setTime(collDate);
                    String yearStr = Integer.toString(cal.get(Calendar.YEAR));
                    indexStr.append(yearStr);
                    indexStr.append('\t');
                    doc.add(new Field("yr", yearStr, Field.Store.YES, Field.Index.ANALYZED));
                }

                sb.setLength(0);
                for (int i = idIndex; i < colToTblId.length; i++) {
                    if (i > idIndex)
                        sb.append(',');
                    sb.append(String.format("%d=%d", colToTblId[i], rs.getInt(i + 1)));
                }
                doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                //writers[0].addDocument(doc);

                ///////////////////////////////////////////////
                // Catalog Number
                ///////////////////////////////////////////////
                String catNum = rs.getString(1);
                if (StringUtils.isNotEmpty(catNum)) {
                    doc.add(new Field("cn", catNum, Field.Store.YES, Field.Index.ANALYZED));
                }

                ///////////////////////////////////////////////
                // Image Name in Text1
                ///////////////////////////////////////////////
                boolean hasName = false;
                /*try
                {
                int idd = Integer.parseInt(id);
                //pStmt.setInt(1, idd);
                //ResultSet rsp = pStmt.executeQuery();
                ResultSet rsp = stmt3.executeQuery(String.format("SELECT Text1 FROM preparation WHERE CollectionObjectID = %d AND Text1 IS NOT NULL", idd));
                if (rsp.next())
                {
                    String imgName = rsp.getString(1);
                    if (StringUtils.isNotEmpty(imgName))
                    {
                        String nm = FilenameUtils.getName(imgName);
                        doc.add(new Field("im", nm, Field.Store.NO, Field.Index.ANALYZED));
                        contents.append(nm);
                        hasName = true;
                    }
                }
                rsp.close();
                } catch (SQLException e) {e.printStackTrace();}
                */
                if (!hasName) {
                    contents.append(" ");
                }
                contents.append('\t');

                ///////////////////////////////////////////////
                // Collector  (Agent)
                ///////////////////////////////////////////////
                String dataStr = buildStr(rs, sb, 17, 18, 19);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("ag", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Locality 
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 9, 10, 11, 12, 13, 14);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("lc", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[2].addDocument(doc);

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Taxon
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 5, 6);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("tx", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[3].addDocument(doc);

                int taxId = rs.getInt(taxIndex);
                boolean taxOK = !rs.wasNull();
                int taxPId = rs.getInt(taxParentIndex);
                taxOK = taxOK && !rs.wasNull();

                int geoId = rs.getInt(geoIndex);
                boolean geoOK = !rs.wasNull();
                int geoPId = rs.getInt(geoParentIndex);
                geoOK = geoOK && !rs.wasNull();

                int ceId = rs.getInt(ceIndex);
                boolean ceOK = !rs.wasNull();

                if (taxOK) {
                    addHigherTaxa(stmt2, doc, indexStr, taxId, taxPId, rs.getInt(taxNameIndex + 1),
                            rs.getString(taxNameIndex));
                    addAuthor(stmt2, doc, indexStr, taxId);
                }

                if (geoOK) {
                    addCountry(stmt2, doc, indexStr, geoId, geoPId, rs.getInt(geoNameIndex + 1),
                            rs.getString(geoNameIndex));
                }

                if (ceOK) {
                    addHost(stmt2, doc, indexStr, ceId);
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                doc.add(new Field("cs", indexStr.toString(), Field.Store.NO, Field.Index.ANALYZED));
                //doc.add(new Field("contents", contents.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    writers[0].optimize();
                }
            }
            rs.close();

        } catch (SQLException sqlex) {
            sqlex.printStackTrace();

        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("IOException adding Lucene Document: " + e.getMessage());

        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

        }

    } catch (IOException e) {
        e.printStackTrace();

        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (CorruptIndexException e) {
                e.printStackTrace();

            } catch (IOException e) {
                e.printStackTrace();
            }
            writer = null;
        }

        long endTime = System.currentTimeMillis();
        System.out.println("Time: " + (endTime - startTime) / 1000);
    }
}

From source file:edu.ku.brc.specify.tools.webportal.BuildSearchIndex.java

/**
 * //  ww w  .j  a v  a2  s. c om
 */
public void index() {
    //    0            1           2              3                4               5      6     7
    // CatalogNumber, CountAmt, StartDate, StationFieldNumber TypeStatusName, FullName, Name, RankID,
    //    8          9            10            11            12          13       14     15       16        17         18           19
    // Latitude1, Longitude1, LocalityName, MaxElevation, MinElevation, FullName, Name, RankID, LastName, FirstName, MiddleInitial, Text1
    //         20              21            22              23           24           25         26          27          28
    //collectionObjectId, DeterminationID, TaxonID, CollectingEventID, LocalityID, GeographyID, AgentID, tx.ParentID, geo.ParentID

    //      0            1              2                3               4           5           6          7               8         9          10        11
    // CatalogNumber, StartDate, StationFieldNumber TypeStatusName, tx.FullName, Latitude1, Longitude1, LocalityName, geo.FullName, LastName, FirstName, MiddleInitial
    //                  0  1   2   3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9   20  1  2   3  4  5  6  7  8
    int[] colToTblId = { 1, 1, 10, 10, 4, 4, 4, 4, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5, 5, 1, 1, 9, 4, 10, 2, 3, 5, 4,
            3 };
    int[] includeCol = { 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0 };

    // Index for ResultSet (which is one more than the array index)
    int idIndex = 20;
    int taxIndex = 23;
    int geoIndex = 26;
    int ceIndex = 24;
    int geoNameIndex = 15;
    int taxNameIndex = 7;
    int collDateIndex = 3;

    int taxParentIndex = 28;
    int geoParentIndex = 29;

    Calendar cal = Calendar.getInstance();

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_30);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM collectionobject");
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        //PreparedStatement pStmt = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]), analyzers[i], true,
                        IndexWriter.MaxFieldLength.LIMITED);
            }

            System.out.println("Total Records: " + totalRecs);

            stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery();
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql);
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            while (rs.next()) {
                String id = rs.getString(idIndex + 1);
                Document doc = new Document();

                doc.add(new Field("id", id.toString(), Field.Store.YES, Field.Index.ANALYZED));

                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);

                int cnt = 0;
                for (int i = 0; i < idIndex; i++) {
                    if (includeCol[i] == 1) {
                        String val = rs.getString(i + 1);
                        if (i == 0) {
                            val = val.replaceFirst("^0+(?!$)", "");
                        }

                        //System.out.println(i+" "+cnt+"  "+md.getColumnName(i+1)+" ["+(StringUtils.isNotEmpty(val) ? val : " ")+"] ");
                        contents.append(StringUtils.isNotEmpty(val) ? val : " ");
                        contents.append('\t');
                        cnt++;
                    }
                }

                indexStr.append(contents);

                Date collDate = rs.getDate(collDateIndex);
                if (collDate != null) {
                    cal.setTime(collDate);
                    String yearStr = Integer.toString(cal.get(Calendar.YEAR));
                    indexStr.append(yearStr);
                    indexStr.append('\t');
                    doc.add(new Field("yr", yearStr, Field.Store.YES, Field.Index.ANALYZED));
                }

                sb.setLength(0);
                for (int i = idIndex; i < colToTblId.length; i++) {
                    //if (i>idIndex) sb.append(',');
                    //sb.append(String.format("%d=%d", colToTblId[i], rs.getInt(i+1)));
                    doc.add(new Field(Integer.toString(colToTblId[i]), Integer.toString(rs.getInt(i + 1)),
                            Field.Store.YES, Field.Index.NOT_ANALYZED));
                }
                doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                //writers[0].addDocument(doc);

                ///////////////////////////////////////////////
                // Catalog Number
                ///////////////////////////////////////////////
                String catNum = rs.getString(1);
                if (StringUtils.isNotEmpty(catNum)) {
                    doc.add(new Field("cn", catNum, Field.Store.YES, Field.Index.ANALYZED));
                }

                ///////////////////////////////////////////////
                // Image Name in Text1
                ///////////////////////////////////////////////
                boolean hasName = false;
                /*try
                {
                int idd = Integer.parseInt(id);
                //pStmt.setInt(1, idd);
                //ResultSet rsp = pStmt.executeQuery();
                ResultSet rsp = stmt3.executeQuery(String.format("SELECT Text1 FROM preparation WHERE CollectionObjectID = %d AND Text1 IS NOT NULL", idd));
                if (rsp.next())
                {
                    String imgName = rsp.getString(1);
                    if (StringUtils.isNotEmpty(imgName))
                    {
                        String nm = FilenameUtils.getName(imgName);
                        doc.add(new Field("im", nm, Field.Store.NO, Field.Index.ANALYZED));
                        contents.append(nm);
                        hasName = true;
                    }
                }
                rsp.close();
                } catch (SQLException e) {e.printStackTrace();}
                */
                if (!hasName) {
                    contents.append(" ");
                }
                contents.append('\t');

                ///////////////////////////////////////////////
                // Collector  (Agent)
                ///////////////////////////////////////////////
                String dataStr = buildStr(rs, sb, 17, 18, 19);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("ag", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Locality 
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 9, 10, 11, 12, 13, 14);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("lc", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[2].addDocument(doc);

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                ///////////////////////////////////////////////
                // Taxon
                ///////////////////////////////////////////////
                dataStr = buildStr(rs, sb, 5, 6);
                if (StringUtils.isNotEmpty(dataStr)) {
                    doc.add(new Field("tx", dataStr, Field.Store.NO, Field.Index.ANALYZED));
                }
                //writers[3].addDocument(doc);

                int taxId = rs.getInt(taxIndex);
                boolean taxOK = !rs.wasNull();
                int taxPId = rs.getInt(taxParentIndex);
                taxOK = taxOK && !rs.wasNull();

                int geoId = rs.getInt(geoIndex);
                boolean geoOK = !rs.wasNull();
                int geoPId = rs.getInt(geoParentIndex);
                geoOK = geoOK && !rs.wasNull();

                int ceId = rs.getInt(ceIndex);
                boolean ceOK = !rs.wasNull();

                if (taxOK) {
                    addHigherTaxa(stmt2, doc, indexStr, taxId, taxPId, rs.getInt(taxNameIndex + 1),
                            rs.getString(taxNameIndex));
                    addAuthor(stmt2, doc, indexStr, taxId);
                }

                if (geoOK) {
                    addCountry(stmt2, doc, indexStr, geoId, geoPId, rs.getInt(geoNameIndex + 1),
                            rs.getString(geoNameIndex));
                }

                if (ceOK) {
                    addHost(stmt2, doc, indexStr, ceId);
                }

                //sb.setLength(0);
                //sb.append(String.format("%d=%d", 1, rs.getInt(17))); // Collection Object
                //doc.add(new Field("xref", sb.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); 

                doc.add(new Field("cs", indexStr.toString(), Field.Store.NO, Field.Index.ANALYZED));
                doc.add(new Field("contents", contents.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    writers[0].optimize();
                }
            }
            rs.close();

        } catch (SQLException sqlex) {
            sqlex.printStackTrace();

        } catch (IOException e) {
            e.printStackTrace();
            System.out.println("IOException adding Lucene Document: " + e.getMessage());

        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

        }

    } catch (IOException e) {
        e.printStackTrace();

        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (CorruptIndexException e) {
                e.printStackTrace();

            } catch (IOException e) {
                e.printStackTrace();
            }
            writer = null;
        }

        long endTime = System.currentTimeMillis();
        System.out.println("Time: " + (endTime - startTime) / 1000);
    }
}

From source file:edu.ku.brc.specify.tools.webportal.BuildSearchIndex2.java

License:asdf

/**
 * // www. ja  v a  2  s  .co m
 */
public boolean index(QBDataSourceListenerIFace progressListener) {

    if (progressListener != null) {
        progressListener.loading();
    }

    long startTime = System.currentTimeMillis();

    IndexWriter[] writers = null;
    long totalRecs = 0;
    List<String> solrFldXml = null;
    List<String> portalFldJson = null;

    try {
        for (int i = 0; i < analyzers.length; i++) {
            files[i] = new File(fileNames[i]);
            analyzers[i] = new StandardAnalyzer(Version.LUCENE_47, CharArraySet.EMPTY_SET);
            FileUtils.deleteDirectory(files[i]);
        }

        System.out.println("Indexing to directory '" + INDEX_DIR + "'...");

        ExportMappingHelper map = new ExportMappingHelper(dbConn, mapping.getId());

        Map<Integer, String> shortNames = getShortNamesForFields(map);

        totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM " + map.getCacheTblName());
        if (progressListener != null) {
            progressListener.loaded();
            progressListener.rowCount(totalRecs);
        }
        long procRecs = 0;

        Statement stmt = null;
        Statement stmt2 = null;
        Statement stmt3 = null;
        try {
            writers = new IndexWriter[analyzers.length];
            for (int i = 0; i < files.length; i++) {
                writers[i] = new IndexWriter(FSDirectory.open(files[i]),
                        new IndexWriterConfig(Version.LUCENE_47, analyzers[i]));
            }

            System.out.println("Total Records: " + totalRecs);

            //stmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY,ResultSet.CONCUR_READ_ONLY);
            stmt = dbConn.createStatement();
            stmt.setFetchSize(Integer.MIN_VALUE);

            stmt2 = dbConn2.createStatement();

            stmt3 = dbConn3.createStatement();
            stmt3.setFetchSize(Integer.MIN_VALUE);

            //pStmt = dbConn3.prepareStatement("SELECT Text1 FROM preparation WHERE CollectionObjectID = ? AND Text1 IS NOT NULL");

            String sql = createQuery(map.getCacheTblName());
            System.out.println(sql);

            ResultSet rs = stmt.executeQuery(sql); //may consume all memory for giant caches
            ResultSetMetaData md = rs.getMetaData();

            StringBuilder indexStr = new StringBuilder();
            StringBuilder contents = new StringBuilder();
            StringBuilder sb = new StringBuilder();
            String lat1 = null, lng1 = null, lat2 = null, lng2 = null;
            solrFldXml = getFldsXmlForSchema(map, shortNames);
            portalFldJson = getModelInJson(map, shortNames);
            while (rs.next()) {
                Document doc = new Document();
                indexStr.setLength(0);
                contents.setLength(0);
                sb.setLength(0);
                lat1 = null;
                lng1 = null;
                lat2 = null;
                lng2 = null;
                for (int c = 1; c <= md.getColumnCount(); c++) {
                    if (includeColumn(c)) {
                        String value = "";
                        try {
                            value = rs.getString(c);
                        } catch (Exception ex) {
                            ex.printStackTrace();
                        }
                        if (c == 1) {
                            //doc.add(new Field("spid", value, Field.Store.YES, Field.Index.ANALYZED));
                            doc.add(new StringField("spid", value, Field.Store.YES));
                        } else {
                            if (value != null) {
                                ExportMappingInfo info = map.getMappingByColIdx(c - 2);
                                String fldType = getSolrFldType(info);
                                if (fldType.equals("string")) {
                                    if (info.isFullTextSearch()) {
                                        doc.add(new TextField(shortNames.get(c - 2), value, Field.Store.YES));
                                    } else {
                                        doc.add(new StringField(shortNames.get(c - 2), value, Field.Store.YES));
                                    }
                                } else if (fldType.equals("boolean")) {
                                    doc.add(new StringField(shortNames.get(c - 2), value, Field.Store.YES));
                                } else {
                                    if (fldType.endsWith("int")) {
                                        doc.add(new IntField(shortNames.get(c - 2), rs.getInt(c),
                                                Field.Store.YES));
                                    } else if (fldType.endsWith("double")) {
                                        doc.add(new DoubleField(shortNames.get(c - 2), rs.getDouble(c),
                                                Field.Store.YES));
                                    } else if (fldType.endsWith("long")) {
                                        doc.add(new LongField(shortNames.get(c - 2), rs.getLong(c),
                                                Field.Store.YES));
                                    } else if (fldType.endsWith("float")) {
                                        doc.add(new FloatField(shortNames.get(c - 2), rs.getFloat(c),
                                                Field.Store.YES));
                                    }
                                }
                                contents.append(StringUtils.isNotEmpty(value) ? value : " ");
                                contents.append('\t');
                                if ("latitude1"
                                        .equalsIgnoreCase(map.getMappingByColIdx(c - 2).getSpFldName())) {
                                    lat1 = value;
                                } else if ("latitude2"
                                        .equalsIgnoreCase(map.getMappingByColIdx(c - 2).getSpFldName())) {
                                    lat2 = value;
                                } else if ("longitude1"
                                        .equalsIgnoreCase(map.getMappingByColIdx(c - 2).getSpFldName())) {
                                    lng1 = value;
                                } else if ("longitude2"
                                        .equalsIgnoreCase(map.getMappingByColIdx(c - 2).getSpFldName())) {
                                    lng2 = value;
                                }
                            }
                        }
                    }
                }
                indexStr.append(contents);

                //XXX what, exactly, are the reasons for the store/tokenize settings on these 2?
                //Ditto for store setting for geoc and img below?
                doc.add(new TextField("cs", indexStr.toString(), Field.Store.NO));
                doc.add(new StringField("contents", contents.toString(), Field.Store.YES));

                if (lat1 != null && lng1 != null) {
                    String geoc = lat1 + " " + lng1;
                    //                       if (lat2 != null && lng2 != null)
                    //                       {
                    //                          geoc += " " + lat2 + " " + lng2;
                    //                       }
                    doc.add(new StringField("geoc", geoc, Field.Store.NO));
                }

                String attachments = getAttachments(dbConn2, "collectionobject", rs.getInt(1), false);
                if (attachments != null && attachments.length() > 0) {
                    doc.add(new StringField("img", attachments, Field.Store.YES));
                }
                writers[0].addDocument(doc);

                //System.out.println(procRecs+" "+rs.getString(1));
                procRecs++;
                if (procRecs % 1000 == 0) {
                    System.out.println(procRecs);
                    if (progressListener != null) {
                        progressListener.currentRow(procRecs - 1);
                    }
                }

                if (procRecs % 100000 == 0) {
                    System.out.println("Optimizing...");
                    //writers[0].optimize();
                }
            }
            rs.close();

            writePortalJsonToFile(portalFldJson);
            writeSolrFldXmlToFile(solrFldXml);
            writePortalInstanceJsonToFile();

        } catch (Exception ex) {
            UsageTracker.incrHandledUsageCount();
            edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(this.getClass(), ex);
            return false;
        } finally {

            if (stmt != null) {
                try {
                    if (stmt != null)
                        stmt.close();
                    if (stmt2 != null)
                        stmt2.close();
                    if (stmt3 != null)
                        stmt3.close();

                } catch (SQLException e) {
                    UsageTracker.incrHandledUsageCount();
                    edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(this.getClass(), e);
                    return false;
                }
            }

        }

    } catch (Exception ex) {
        UsageTracker.incrHandledUsageCount();
        edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(this.getClass(), ex);
        return false;

    } finally {
        for (Analyzer a : analyzers) {
            a.close();
        }
        analyzers = null;

        for (IndexWriter writer : writers) {
            try {
                System.out.println("Optimizing...");
                //writer.optimize();
                writer.close();
                System.out.println("Done Optimizing.");

            } catch (Exception ex) {
                UsageTracker.incrHandledUsageCount();
                edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(this.getClass(), ex);
                return false;

            }
            writer = null;
        }

    }

    buildZipFile();

    if (progressListener != null) {
        progressListener.done(totalRecs);
    }
    long endTime = System.currentTimeMillis();
    System.out.println("Time: " + (endTime - startTime) / 1000);
    return true;

}

From source file:com.feedzai.commons.sql.abstraction.engine.AbstractDatabaseEngine.java

@Override
public ResultIterator iterator(String query, int fetchSize) throws DatabaseEngineException {
    try {//from w ww  .java  2  s  .  com
        getConnection();
        Statement stmt = conn.createStatement();
        stmt.setFetchSize(fetchSize);
        logger.trace(query);
        return createResultIterator(stmt, query);

    } catch (final Exception e) {
        throw new DatabaseEngineException("Error querying", e);
    }
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.JdbcSource.java

@Override
public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker) throws StageException {
    int batchSize = Math.min(this.commonSourceConfigBean.maxBatchSize, maxBatchSize);
    String nextSourceOffset = lastSourceOffset == null ? initialOffset : lastSourceOffset;

    long now = System.currentTimeMillis();
    long delay = Math.max(0, (lastQueryCompletedTime + queryIntervalMillis) - now);

    if (delay > 0) {
        // Sleep in one second increments so we don't tie up the app.
        LOG.debug("{}ms remaining until next fetch.", delay);
        ThreadUtil.sleep(Math.min(delay, 1000));
    } else {//from w  w  w.  ja  va 2s . co m
        Statement statement = null;
        Hasher hasher = HF.newHasher();
        try {
            if (null == resultSet || resultSet.isClosed()) {
                // The result set got closed outside of us, so we also clean up the connection (if any)
                closeQuietly(connection);

                connection = dataSource.getConnection();

                if (!txnColumnName.isEmpty()) {
                    // CDC requires scrollable cursors.
                    statement = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                            ResultSet.CONCUR_READ_ONLY);
                } else {
                    statement = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY,
                            ResultSet.CONCUR_READ_ONLY);
                }

                int fetchSize = batchSize;
                // MySQL does not support cursors or fetch size except 0 and "streaming" (1 at a time).
                if (hikariConfigBean.getConnectionString().toLowerCase().contains("mysql")) {
                    // Enable MySQL streaming mode.
                    fetchSize = Integer.MIN_VALUE;
                }
                LOG.debug("Using query fetch size: {}", fetchSize);
                statement.setFetchSize(fetchSize);

                if (getContext().isPreview()) {
                    statement.setMaxRows(batchSize);
                }
                preparedQuery = prepareQuery(query, lastSourceOffset);
                LOG.trace("Executing query: " + preparedQuery);
                hashedQuery = hasher.putString(preparedQuery, Charsets.UTF_8).hash().toString();
                LOG.debug("Executing query: " + hashedQuery);
                resultSet = statement.executeQuery(preparedQuery);
                queryRowCount = 0;
                numQueryErrors = 0;
                firstQueryException = null;
            }

            // Read Data and track last offset
            int rowCount = 0;
            String lastTransactionId = "";
            boolean haveNext = true;
            while (continueReading(rowCount, batchSize) && (haveNext = resultSet.next())) {
                final Record record = processRow(resultSet, rowCount);

                if (null != record) {
                    if (!txnColumnName.isEmpty()) {
                        String newTransactionId = resultSet.getString(txnColumnName);
                        if (lastTransactionId.isEmpty()) {
                            lastTransactionId = newTransactionId;
                            batchMaker.addRecord(record);
                        } else if (lastTransactionId.equals(newTransactionId)) {
                            batchMaker.addRecord(record);
                        } else {
                            // The Transaction ID Column Name config should not be used with MySQL as it
                            // does not provide a change log table and the JDBC driver may not support scrollable cursors.
                            resultSet.relative(-1);
                            break; // Complete this batch without including the new record.
                        }
                    } else {
                        batchMaker.addRecord(record);
                    }
                }

                // Get the offset column value for this record
                if (isIncrementalMode) {
                    nextSourceOffset = resultSet.getString(offsetColumn);
                } else {
                    nextSourceOffset = initialOffset;
                }
                ++rowCount;
                ++queryRowCount;
                ++noMoreDataRecordCount;
                shouldFire = true;
            }
            LOG.debug("Processed rows: " + rowCount);

            if (!haveNext || rowCount == 0) {
                // We didn't have any data left in the cursor. Close everything
                // We may not have the statement here if we're not producing the
                // same batch as when we got it, so get it from the result set
                // Get it before we close the result set, just to be safe!
                statement = resultSet.getStatement();
                closeQuietly(resultSet);
                closeQuietly(statement);
                closeQuietly(connection);
                lastQueryCompletedTime = System.currentTimeMillis();
                LOG.debug("Query completed at: {}", lastQueryCompletedTime);
                QUERY_SUCCESS.create(getContext()).with(QUERY, preparedQuery)
                        .with(TIMESTAMP, lastQueryCompletedTime).with(ROW_COUNT, queryRowCount)
                        .with(SOURCE_OFFSET, nextSourceOffset).createAndSend();

                // In case of non-incremental mode, we need to generate no-more-data event as soon as we hit end of the
                // result set. Incremental mode will try to run the query again and generate the event if and only if
                // the next query results in zero rows.
                if (!isIncrementalMode) {
                    generateNoMoreDataEvent();
                }
            }

            /*
             * We want to generate no-more data event on next batch if:
             * 1) We run a query in this batch and returned empty.
             * 2) We consumed at least some data since last time (to not generate the event all the time)
             */

            if (isIncrementalMode && rowCount == 0 && !haveNext && shouldFire && !firstTime) {
                generateNoMoreDataEvent();
                shouldFire = false;
            }
            firstTime = false;

        } catch (SQLException e) {
            if (++numQueryErrors == 1) {
                firstQueryException = e;
            }
            String formattedError = jdbcUtil.formatSqlException(e);
            LOG.error(formattedError, e);
            if (resultSet != null) {
                try {
                    statement = resultSet.getStatement();
                } catch (SQLException e1) {
                    LOG.debug("Error while getting statement from result set: {}", e1.toString(), e1);
                }
                closeQuietly(resultSet);
                closeQuietly(statement);
            }
            closeQuietly(connection);
            lastQueryCompletedTime = System.currentTimeMillis();
            QUERY_FAILURE.create(getContext()).with(QUERY, preparedQuery)
                    .with(TIMESTAMP, lastQueryCompletedTime).with(ERROR, formattedError)
                    .with(ROW_COUNT, queryRowCount).with(SOURCE_OFFSET, nextSourceOffset).createAndSend();
            LOG.debug("Query '{}' failed at: {}; {} errors so far", preparedQuery, lastQueryCompletedTime,
                    numQueryErrors);
            if (numQueryErrors > commonSourceConfigBean.numSQLErrorRetries) {
                throw new StageException(JdbcErrors.JDBC_77, e.getClass().getSimpleName(), preparedQuery,
                        numQueryErrors, jdbcUtil.formatSqlException(firstQueryException));
            } // else allow nextSourceOffset to be returned, to retry
        }
    }
    return nextSourceOffset;
}

From source file:jade.domain.DFDBKB.java

/**
 * Retrieve the DFDs matching the given template
 *//*from   w  w  w .  j a v a  2s. co m*/
protected List searchSingle(Object template, int maxResult) throws SQLException {
    List matchingAIDs = new ArrayList();

    // Get the names of all DFDs matching the template
    String select = null;
    ResultSet rs = null;
    Statement s = null;

    try {
        select = createSelect((DFAgentDescription) template);

        s = getConnectionWrapper().getConnection().createStatement();
        if (maxResult >= 0) {
            s.setMaxRows(maxResult);
            s.setFetchSize(maxResult);
        }
        rs = s.executeQuery(select);

        while (rs.next()) {
            String aidS = rs.getString("aid");
            matchingAIDs.add(aidS);
        }
    } catch (SQLException sqle) {
        // Let it through
        throw sqle;
    } catch (Exception e) {
        logger.log(Logger.SEVERE, "Couldn't create the SQL SELECT statement.", e);
        throw new SQLException("Couldn't create the SQL SELECT statement. " + e.getMessage());
    } finally {
        closeResultSet(rs);
        closeStatement(s);
    }

    // For each matching AID reconstruct the complete DFD
    List dfds = new ArrayList(matchingAIDs.size());
    Iterator it = matchingAIDs.iterator();
    // FIXME: Define a proper constant and possibly a proper configuration option
    if (matchingAIDs.size() < 10) {
        while (it.hasNext()) {
            dfds.add(getDFD((String) it.next()));
        }
    } else {
        // If we found several matching agents we preload protocols languages and ontologies once for all 
        // instead of making several queries one per agent.
        PreparedStatements pss = getPreparedStatements();
        Map allLanguages = preloadIdValueTable(pss.stm_selCountAllLanguages, pss.stm_selAllLanguages);
        Map allOntologies = preloadIdValueTable(pss.stm_selCountAllOntologies, pss.stm_selAllOntologies);
        Map allProtocols = preloadIdValueTable(pss.stm_selCountAllProtocols, pss.stm_selAllProtocols);
        while (it.hasNext()) {
            dfds.add(getDFD((String) it.next(), allLanguages, allOntologies, allProtocols));
        }
    }

    return dfds;
}

From source file:net.sf.farrago.namespace.jdbc.MedJdbcDataServer.java

public Object getRuntimeSupport(Object param) throws SQLException {
    String sql = (String) param;

    FarragoStatementAllocation stmtAlloc;
    Statement stmt;
    if (disableConnectionPool) {
        Connection conn = getConnection();
        stmt = conn.createStatement();// w  w  w.  j  a v  a2s.  c  om

        // Leave connection open (closed by release resources)
        stmtAlloc = new FarragoStatementAllocation(stmt);
    } else {
        // N.B.: do not invoke getConnection(): We want to obtain multiple
        // connections if there are multiple XOs requiring runtime support.
        // MySQL (with streaming results) and loopback connections require
        // this behavior.
        Connection conn = newConnection();
        stmt = conn.createStatement();

        // Closes connection when no longer needed, which returns it to the
        // pool.
        stmtAlloc = new FarragoStatementAllocation(conn, stmt);
    }

    try {
        if (fetchSize != DEFAULT_FETCH_SIZE) {
            stmt.setFetchSize(fetchSize);
        }
        stmtAlloc.setSql(sql);
        stmt = null;
        return stmtAlloc;
    } finally {
        if (stmt != null) {
            stmtAlloc.closeAllocation();
        }
    }
}

From source file:com.hangum.tadpole.rdb.core.editors.main.composite.ResultSetComposite.java

/**
 *  .//  w  ww  . j  a  v  a  2 s.  c  om
 * 
 * @param reqQuery
 * @param queryTimeOut
 * @param strUserEmail
 * @param intSelectLimitCnt
 * @param intStartCnt
 * @param strNullValue 
 * @return
 * @throws Exception
 */
public QueryExecuteResultDTO runSelect(final RequestQuery reqQuery, final int queryTimeOut,
        final String strUserEmail, final int intSelectLimitCnt, final int intStartCnt) throws Exception {
    String strSQL = reqQuery.getSql();
    if (!PermissionChecker.isExecute(getDbUserRoleType(), getUserDB(), strSQL)) {
        throw new Exception(Messages.get().MainEditor_21);
    }
    if (logger.isDebugEnabled())
        logger.debug("==> real execute query : " + strSQL);

    tadpole_system_message = "";
    QueryExecuteResultDTO queryResultDAO = null;

    //  ??   ???  .
    IMainEditorExtension[] extensions = getRdbResultComposite().getMainEditor().getMainEditorExtions();
    if (extensions != null) {
        for (IMainEditorExtension iMainEditorExtension : extensions) {
            String strCostumSQL = iMainEditorExtension.sqlCostume(strSQL);
            if (!strCostumSQL.equals(strSQL)) {
                if (logger.isDebugEnabled())
                    logger.debug("** extension costume sql is : " + strCostumSQL); //$NON-NLS-1$
                strSQL = strCostumSQL;
            }
        }
    }

    //  ??   ???  .
    ResultSet resultSet = null;
    java.sql.Connection javaConn = null;
    Statement statement = null;
    PreparedStatement preparedStatement = null;

    try {
        if (DBGroupDefine.TAJO_GROUP == getUserDB().getDBGroup()) {
            javaConn = ConnectionPoolManager.getDataSource(getUserDB()).getConnection();
        } else {
            if (reqQuery.isAutoCommit()) {
                SqlMapClient client = TadpoleSQLManager.getInstance(getUserDB());
                javaConn = client.getDataSource().getConnection();
            } else {
                javaConn = TadpoleSQLTransactionManager.getInstance(strUserEmail, getUserDB());
            }
        }
        if (javaConn == null) {
            throw new Exception("Cann't create session. Please check system.");
        }

        // if statement type is prepared statement?
        if (reqQuery.getSqlStatementType() == SQL_STATEMENT_TYPE.NONE) {
            statement = javaConn.createStatement();

            statement.setFetchSize(intSelectLimitCnt);
            if (DBGroupDefine.HIVE_GROUP != getUserDB().getDBGroup()) {
                statement.setQueryTimeout(queryTimeOut);
                statement.setMaxRows(intSelectLimitCnt);
            }

            // check stop thread
            esCheckStop = Executors.newSingleThreadExecutor();
            CheckStopThread cst = new CheckStopThread(statement);
            cst.setName("TDB Query Stop checker"); //$NON-NLS-1$
            esCheckStop.execute(cst);

            // execute query
            execServiceQuery = Executors.newSingleThreadExecutor();
            if (intStartCnt == 0) {
                resultSet = _runSQLSelect(statement, strSQL);
            } else {
                strSQL = PartQueryUtil.makeSelect(getUserDB(), strSQL, intStartCnt, intSelectLimitCnt);

                if (logger.isDebugEnabled())
                    logger.debug("part sql called : " + strSQL);
                resultSet = _runSQLSelect(statement, strSQL);
            }

        } else if (reqQuery.getSqlStatementType() == SQL_STATEMENT_TYPE.PREPARED_STATEMENT) {
            preparedStatement = javaConn.prepareStatement(strSQL);

            preparedStatement.setFetchSize(intSelectLimitCnt);
            if (DBGroupDefine.HIVE_GROUP != getUserDB().getDBGroup()) {
                preparedStatement.setQueryTimeout(queryTimeOut);
                preparedStatement.setMaxRows(intSelectLimitCnt);
            }

            // check stop thread
            esCheckStop = Executors.newSingleThreadExecutor();
            CheckStopThread cst = new CheckStopThread(preparedStatement);
            cst.setName("TDB Query Stop checker"); //$NON-NLS-1$
            esCheckStop.execute(cst);

            // execute query
            execServiceQuery = Executors.newSingleThreadExecutor();
            if (intStartCnt == 0) {
                resultSet = _runSQLSelect(preparedStatement, reqQuery.getStatementParameter());
            } else {
                strSQL = PartQueryUtil.makeSelect(getUserDB(), strSQL, intStartCnt, intSelectLimitCnt);

                if (logger.isDebugEnabled())
                    logger.debug("part sql called : " + strSQL);
                resultSet = _runSQLSelect(preparedStatement, reqQuery.getStatementParameter());
            }
        }

        queryResultDAO = new QueryExecuteResultDTO(getUserDB(), reqQuery.getSql(), true, resultSet,
                intSelectLimitCnt, intStartCnt);
        if (resultSet == null) {
            if (StringUtils.isEmpty(StringUtils.deleteWhitespace(tadpole_system_message))) {
                tadpole_system_message = CMD_COMPLETE_MSG;
            }

        }
        queryResultDAO.setQueryMsg(tadpole_system_message);

    } catch (Exception e) {
        throw e;
    } finally {
        isCheckRunning = false;
        try {
            if (preparedStatement != null)
                preparedStatement.close();
        } catch (Exception e) {
        }
        try {
            if (statement != null)
                statement.close();
        } catch (Exception e) {
        }
        try {
            if (resultSet != null)
                resultSet.close();
        } catch (Exception e) {
        }

        if (reqQuery.isAutoCommit()) {
            try {
                if (javaConn != null)
                    javaConn.close();
            } catch (Exception e) {
            }
        }
    }

    return queryResultDAO;
}