List of usage examples for java.sql ResultSet CONCUR_READ_ONLY
int CONCUR_READ_ONLY
To view the source code for java.sql ResultSet CONCUR_READ_ONLY.
Click Source Link
ResultSet
object that may NOT be updated. From source file:com.cloudera.sqoop.TestAvroImportExportRoundtrip.java
private void deleteTableData() throws SQLException { Connection conn = getConnection(); PreparedStatement statement = conn.prepareStatement("DELETE FROM " + getTableName(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try {//from w w w. ja v a 2 s. co m statement.executeUpdate(); conn.commit(); } finally { statement.close(); } }
From source file:fr.gael.dhus.server.http.webapp.symmetricDS.SymmetricDSWebapp.java
@Override public void checkInstallation() throws Exception { if (!scalabilityManager.isMaster()) return;/*from w w w .j a v a 2s .c o m*/ // Check database is ready for SymmetricDS PreparedStatement ps = datasource.getConnection().prepareStatement( "SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_NAME='SYM_FK_TRGPLT_2_TR';", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); while (!ps.executeQuery().first()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } ps.close(); // Check if init.sql has already been executed ps = datasource.getConnection().prepareStatement( "SELECT node_group_id FROM SYM_NODE_GROUP WHERE node_group_id = 'dhus-replica-group';", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); if (!ps.executeQuery().first()) { // Wait for master group to be inserted PreparedStatement ps2 = datasource.getConnection().prepareStatement( "SELECT * FROM SYM_NODE_GROUP WHERE node_group_id='dhus-master-group';", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); while (!ps2.executeQuery().first()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } ps2.close(); ScriptUtils.executeSqlScript(datasource.getConnection(), new ClassPathResource("fr/gael/dhus/server/http/webapp/symmetricDS/init.sql")); LOGGER.info("SymmetricDS initialization script loaded"); // Force the synchronizers to be reloaded HttpClient httpclient = HttpClients.createDefault(); HttpPost httppost = new HttpPost( cfgManager.getServerConfiguration().getLocalUrl() + "/sync/api/engine/synctriggers"); httpclient.execute(httppost); } ps.close(); }
From source file:com.norconex.collector.http.db.impl.derby.DerbyCrawlURLDatabase.java
@Override public Iterator<CrawlURL> getCacheIterator() { try {//w w w .j ava 2 s .c om final Connection conn = datasource.getConnection(); final Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); final ResultSet rs = stmt.executeQuery( "SELECT url, depth, smLastMod, smChangeFreq, smPriority " + "FROM " + TABLE_CACHE); if (rs == null || !rs.first()) { return null; } rs.beforeFirst(); return new CrawlURLIterator(rs, conn, stmt); } catch (SQLException e) { throw new CrawlURLDatabaseException("Problem getting database cache iterator.", e); } }
From source file:edu.ku.brc.specify.toycode.mexconabio.MakeGBIFProcessHash.java
@Override public void process(final int type, final int options) { final double HRS = 1000.0 * 60.0 * 60.0; final long PAGE_CNT = 1000000; totalRecs = BasicSQLUtils.getCount(dbGBIFConn, "SELECT COUNT(*) FROM raw"); int minIndex = BasicSQLUtils.getCount(dbGBIFConn, "SELECT MIN(id) FROM raw"); //int maxIndex = BasicSQLUtils.getCount(dbGBIFConn, "SELECT MAX(id) FROM raw"); int segs = (int) (totalRecs / PAGE_CNT) + 1; try {/*from w w w . j a v a2 s .c om*/ pw = new PrintWriter("GroupHash.log"); } catch (FileNotFoundException e) { e.printStackTrace(); } long procRecs = 0; long startTime = System.currentTimeMillis(); int secsThreshold = 0; try { String idsInsert = "INSERT INTO group_hash_ids (GrpID, RawID) VALUES (?,?)"; insertIds = dbDstConn.prepareStatement(idsInsert); String gbifsnibInsert = "INSERT INTO group_hash (collnum, genus, year, mon, cnt) VALUES (?,?,?,?,?)"; insertStmt = dbDstConn.prepareStatement(gbifsnibInsert); String gbifsnibUpdate = "UPDATE group_hash SET cnt=? WHERE id = ?"; updateStmt = dbDstConn.prepareStatement(gbifsnibUpdate); String gbifsnibCheck = "SELECT id FROM group_hash WHERE collnum=? AND genus=? AND year=?"; checkStmt = dbDstConn.prepareStatement(gbifsnibCheck); } catch (SQLException ex) { ex.printStackTrace(); } for (int pc = 0; pc < segs; pc++) { try { String clause = String.format(" FROM raw WHERE id > %d AND id < %d", (pc * PAGE_CNT) + minIndex, ((pc + 1) * PAGE_CNT) + minIndex + 1); String gbifSQL = "SELECT id, collector_num, genus, year, month " + clause; System.out.println(gbifSQL); pw.println(gbifSQL); stmt = dbGBIFConn.createStatement(ResultSet.FETCH_FORWARD, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); String msg = "Starting Query... " + totalRecs; System.out.println(msg); pw.println(msg); ResultSet rs = stmt.executeQuery(gbifSQL); msg = String.format("Starting Processing... Total Records %d Max Score: %d Threshold: %d", totalRecs, maxScore, thresholdScore); System.out.println(msg); pw.println(msg); while (rs.next()) { procRecs++; String year = rs.getString(4); year = StringUtils.isNotEmpty(year) ? year.trim() : null; if (StringUtils.isNotEmpty(year) && !StringUtils.isNumeric(year)) { continue; } int rawId = rs.getInt(1); String collnum = rs.getString(2); String genus = rs.getString(3); String mon = rs.getString(5); collnum = StringUtils.isNotEmpty(collnum) ? collnum.trim() : null; genus = StringUtils.isNotEmpty(genus) ? genus.trim() : null; mon = StringUtils.isNotEmpty(mon) ? mon.trim() : null; int c = 0; if (collnum == null) c++; if (genus == null) c++; if (year == null) c++; if (c == 2) { continue; } collnum = collnum != null ? collnum : ""; genus = genus != null ? genus : ""; year = year != null ? year : ""; mon = mon != null ? mon : ""; if (collnum.length() > 64) { collnum = collnum.substring(0, 63); } if (genus.length() > 64) { genus = genus.substring(0, 63); } if (year.length() > 8) { year = year.substring(0, 8); } if (mon.length() > 8) { mon = year.substring(0, 8); } String name = String.format("%s_%s_%s", collnum, genus, year); DataEntry de = groupHash.get(name); if (de != null) { de.cnt++; } else { de = getDataEntry(collnum, genus, year, mon); groupHash.put(name, de); } de.ids.add(rawId); if (groupHash.size() > MAX_RECORDS_SEG) { writeHash(); } } rs.close(); if (groupHash.size() > 0) { writeHash(); } System.out.println("Done with seg " + pc); pw.println("Done with seg " + pc); } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (stmt != null) { stmt.close(); } } catch (Exception ex) { } } long endTime = System.currentTimeMillis(); long elapsedTime = endTime - startTime; double timePerRecord = (elapsedTime / procRecs); double hrsLeft = ((totalRecs - procRecs) * timePerRecord) / HRS; int seconds = (int) (elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; String msg = String.format("Elapsed %8.2f hr.mn Percent: %6.3f Hours Left: %8.2f ", ((double) (elapsedTime)) / HRS, 100.0 * ((double) procRecs / (double) totalRecs), hrsLeft); System.out.println(msg); pw.println(msg); pw.flush(); } } try { if (insertStmt != null) { insertStmt.close(); } if (updateStmt != null) { updateStmt.close(); } if (checkStmt != null) { checkStmt.close(); } } catch (SQLException ex) { ex.printStackTrace(); } String msg = String.format("Done - Writes: %d Updates: %d", writeCnt, updateCnt); System.out.println(msg); pw.println(msg); pw.flush(); pw.close(); }
From source file:br.org.indt.ndg.server.client.TemporaryOpenRosaBussinessDelegate.java
public Map<String, String> getSurveyIdToUrlMap() { Map<String, String> surveyIdsToUrlMap = new HashMap<String, String>(); PreparedStatement listSurveysStmt = null; Connection conn = null;//from ww w .jav a2 s . c om try { conn = getDbConnection(); listSurveysStmt = conn.prepareStatement(SELECT_ALL_SURVEYS_STATEMENT, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet surveysSet = listSurveysStmt.executeQuery(); boolean isValidRow = surveysSet.first(); final String FAKE_DEVICE_ID = "0"; while (isValidRow) { String surveyId = surveysSet.getString(SURVEY_ID_COLUMN); surveyIdsToUrlMap.put(surveyId, getSurveyDownloadUrl(FAKE_DEVICE_ID, surveyId)); isValidRow = surveysSet.next(); } } catch (SQLException e) { e.printStackTrace(); } finally { try { listSurveysStmt.close(); conn.close(); } catch (Exception e) { } } return surveyIdsToUrlMap; }
From source file:com.taobao.datax.plugins.reader.postgrereader.PostgreReader.java
@Override public int startRead(LineSender lineSender) { DBResultSetSender proxy = DBResultSetSender.newSender(lineSender); proxy.setMonitor(getMonitor());/*from w w w. j av a 2s .com*/ proxy.setDateFormatMap(genDateFormatMap()); String sql = param.getValue(ParamKey.sql); logger.info(String.format("PostgreReader start to query %s .", sql)); ResultSet rs = null; Connection connection = null; try { connection = getCon(); connection.setAutoCommit(false); Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); logger.info("set postgre statement fetch size"); stmt.setFetchSize(fetchSize); logger.info("connection info autoCommit:" + connection.getAutoCommit() + " fetchSize" + stmt.getFetchSize()); rs = DBUtils.query(stmt, sql); logger.info("get a ResultSet"); proxy.sendToWriter(rs); logger.info("send to writer"); proxy.flush(); getMonitor().setStatus(PluginStatus.READ_OVER); return PluginStatus.SUCCESS.value(); } catch (SQLException e) { logger.error(ExceptionTracker.trace(e)); throw new DataExchangeException(e); } finally { if (null != rs) { DBUtils.closeResultSet(rs); } if (connection != null) { try { connection.close(); } catch (Exception ignore) { } } } }
From source file:com.cloudera.sqoop.testutil.ExportJobTestCase.java
/** * Check that we got back the expected row set. * @param expectedNumRecords The number of records we expected to load * into the database.//from w w w . java 2s . c o m * @param conn the db connection to use. */ protected void verifyExport(int expectedNumRecords, Connection conn) throws IOException, SQLException { LOG.info("Verifying export: " + getTableName()); // Check that we got back the correct number of records. PreparedStatement statement = conn.prepareStatement("SELECT COUNT(*) FROM " + getTableName(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); int actualNumRecords = 0; ResultSet rs = null; try { rs = statement.executeQuery(); try { rs.next(); actualNumRecords = rs.getInt(1); } finally { rs.close(); } } finally { statement.close(); } assertEquals("Got back unexpected row count", expectedNumRecords, actualNumRecords); if (expectedNumRecords == 0) { return; // Nothing more to verify. } // Check that we start with row 0. int minVal = getMinRowId(conn); assertEquals("Minimum row was not zero", 0, minVal); // Check that the last row we loaded is numRows - 1 int maxVal = getMaxRowId(conn); assertEquals("Maximum row had invalid id", expectedNumRecords - 1, maxVal); // Check that the string values associated with these points match up. statement = conn.prepareStatement("SELECT msg FROM " + getTableName() + " WHERE id = " + minVal, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); String minMsg = ""; try { rs = statement.executeQuery(); try { rs.next(); minMsg = rs.getString(1); } finally { rs.close(); } } finally { statement.close(); } assertEquals("Invalid msg field for min value", getMsgPrefix() + minVal, minMsg); statement = conn.prepareStatement("SELECT msg FROM " + getTableName() + " WHERE id = " + maxVal, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); String maxMsg = ""; try { rs = statement.executeQuery(); try { rs.next(); maxMsg = rs.getString(1); } finally { rs.close(); } } finally { statement.close(); } assertEquals("Invalid msg field for min value", getMsgPrefix() + maxVal, maxMsg); }
From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8AccessSecurityTable.java
public MSSBamAccessSecurityBuff readBuff(MSSBamAuthorization Authorization, MSSBamAccessSecurityPKey PKey) { final String S_ProcName = "readBuff"; if (!schema.isTransactionOpen()) { throw CFLib.getDefaultExceptionFactory().newUsageException(getClass(), S_ProcName, "Transaction not open"); }/* w w w. jav a 2 s . co m*/ try { Connection cnx = schema.getCnx(); short Id = PKey.getRequiredId(); String sql = S_sqlSelectAccessSecurityBuff + "WHERE " + "asec.Id = " + Short.toString(Id) + " "; Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet resultSet = stmt.executeQuery(sql); if (resultSet.next()) { MSSBamAccessSecurityBuff buff = unpackAccessSecurityResultSetToBuff(resultSet); if (resultSet.next()) { resultSet.last(); throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName, "Did not expect multi-buff response, " + resultSet.getRow() + " rows selected"); } return (buff); } else { return (null); } } catch (SQLException e) { throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e); } }
From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8AccessFrequencyTable.java
public MSSBamAccessFrequencyBuff readBuff(MSSBamAuthorization Authorization, MSSBamAccessFrequencyPKey PKey) { final String S_ProcName = "readBuff"; if (!schema.isTransactionOpen()) { throw CFLib.getDefaultExceptionFactory().newUsageException(getClass(), S_ProcName, "Transaction not open"); }//from w w w. j ava 2s. com try { Connection cnx = schema.getCnx(); short Id = PKey.getRequiredId(); String sql = S_sqlSelectAccessFrequencyBuff + "WHERE " + "afrq.Id = " + Short.toString(Id) + " "; Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet resultSet = stmt.executeQuery(sql); if (resultSet.next()) { MSSBamAccessFrequencyBuff buff = unpackAccessFrequencyResultSetToBuff(resultSet); if (resultSet.next()) { resultSet.last(); throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName, "Did not expect multi-buff response, " + resultSet.getRow() + " rows selected"); } return (buff); } else { return (null); } } catch (SQLException e) { throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e); } }
From source file:edu.ku.brc.specify.toycode.mexconabio.CopyFromGBIF.java
/** * //ww w. j a va 2 s. c o m */ public void process() { boolean doQueryForCollNum = true; String pSQL = "INSERT INTO raw (old_id,data_provider_id,data_resource_id,resource_access_point_id, institution_code, collection_code, " + "catalogue_number, scientific_name, author, rank, kingdom, phylum, class, order_rank, family, genus, species, subspecies, latitude, longitude, " + "lat_long_precision, max_altitude, min_altitude, altitude_precision, min_depth, max_depth, depth_precision, continent_ocean, country, state_province, county, collector_name, " + "locality,year, month, day, basis_of_record, identifier_name, identification_date,unit_qualifier, created, modified, deleted, collector_num) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; String gbifSQLBase = "SELECT r.id, r.data_provider_id, r.data_resource_id, r.resource_access_point_id, r.institution_code, r.collection_code, " + "r.catalogue_number, r.scientific_name, r.author, r.rank, r.kingdom, r.phylum, r.class, r.order_rank, r.family, r.genus, r.species, r.subspecies, " + "r.latitude, r.longitude, r.lat_long_precision, r.max_altitude, r.min_altitude, r.altitude_precision, r.min_depth, r.max_depth, r.depth_precision, " + "r.continent_ocean, r.country, r.state_province, r.county, r.collector_name, r.locality, r.year, r.month, r.day, r.basis_of_record, r.identifier_name, " + "r.identification_date, r.unit_qualifier, r.created, r.modified, r.deleted"; String gbifSQL; if (doQueryForCollNum) { gbifSQL = gbifSQLBase + " FROM raw_occurrence_record r"; } else { gbifSQL = gbifSQLBase + ", i.identifier FROM raw_occurrence_record r, identifier_record i WHERE r.id = i.occurrence_id AND i.identifier_type = 3"; } BasicSQLUtils.update(srcDBConn, "DELETE FROM raw WHERE id > 0"); long totalRecs = BasicSQLUtils.getCount(dbConn, "SELECT COUNT(*) FROM raw_occurrence_record"); long procRecs = 0; long startTime = System.currentTimeMillis(); int secsThreshold = 0; PrintWriter pw = null; final double HRS = 1000.0 * 60.0 * 60.0; Statement gStmt = null; PreparedStatement pStmt = null; PreparedStatement stmt = null; try { pw = new PrintWriter("gbif.log"); pStmt = srcDBConn.prepareStatement(pSQL); stmt = dbConn2.prepareStatement( "SELECT identifier FROM identifier_record WHERE occurrence_id = ? AND identifier_type = 3"); //stmt.setFetchSize(Integer.MIN_VALUE); System.out.println("Total Records: " + totalRecs); pw.println("Total Records: " + totalRecs); gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); gStmt.setFetchSize(Integer.MIN_VALUE); String fullSQL = gbifSQL; System.out.println(fullSQL); ResultSet gRS = gStmt.executeQuery(fullSQL); ResultSetMetaData rsmd = gRS.getMetaData(); int lastColInx = rsmd.getColumnCount() + (doQueryForCollNum ? 1 : 0); while (gRS.next()) { int id = gRS.getInt(1); pStmt.setObject(1, id); for (int i = 2; i <= rsmd.getColumnCount(); i++) { Object obj = gRS.getObject(i); pStmt.setObject(i, obj); } String collNum = null; if (doQueryForCollNum) { //String tmpSQL = String.format("SELECT identifier FROM identifier_record WHERE occurrence_id = %d AND identifier_type = 3", id); //System.out.println(tmpSQL); stmt.setInt(1, id); ResultSet rs = stmt.executeQuery(); if (rs.next()) { collNum = rs.getString(1); } rs.close(); } else { collNum = gRS.getString(lastColInx - 1); } if (StringUtils.isNotEmpty(collNum)) { if (collNum.length() < 256) { pStmt.setString(lastColInx, collNum); } else { pStmt.setString(lastColInx, collNum.substring(0, 255)); } } else { pStmt.setObject(lastColInx, null); } try { pStmt.executeUpdate(); } catch (Exception ex) { System.err.println("For ID[" + gRS.getObject(1) + "]"); ex.printStackTrace(); pw.print("For ID[" + gRS.getObject(1) + "] " + ex.getMessage()); pw.flush(); } procRecs++; if (procRecs % 10000 == 0) { long endTime = System.currentTimeMillis(); long elapsedTime = endTime - startTime; double avergeTime = (double) elapsedTime / (double) procRecs; double hrsLeft = (((double) elapsedTime / (double) procRecs) * (double) totalRecs - procRecs) / HRS; int seconds = (int) (elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; String msg = String.format( "Elapsed %8.2f hr.mn Ave Time: %5.2f Percent: %6.3f Hours Left: %8.2f ", ((double) (elapsedTime)) / HRS, avergeTime, 100.0 * ((double) procRecs / (double) totalRecs), hrsLeft); System.out.println(msg); pw.println(msg); pw.flush(); } } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (gStmt != null) { gStmt.close(); } if (pStmt != null) { pStmt.close(); } if (stmt != null) { stmt.close(); } pw.close(); } catch (Exception ex) { } } System.out.println("Done transferring."); pw.println("Done transferring."); /* int count = 0; boolean cont = true; while (cont) { long start = System.currentTimeMillis(); Statement gStmt = null; PreparedStatement pStmt = null; try { gStmt = dbConn.createStatement(ResultSet.TYPE_FORWARD_ONLY,ResultSet.CONCUR_READ_ONLY); String fullSQL = gbifSQL + String.format(" AND r.id >= %d AND r.id < %d", count, count+recordStep); //System.out.println(fullSQL); int cnt = 0; ResultSet rs = gStmt.executeQuery(fullSQL); ResultSetMetaData rsmd = rs.getMetaData(); //System.out.println("Done with query."); pStmt = srcDBConn.prepareStatement(pSQL); count += recordStep; while (rs.next()) { Integer id = rs.getInt(1); pStmt.setInt(1, id); for (int i=2;i<=rsmd.getColumnCount();i++) { Object obj = rs.getObject(i); pStmt.setObject(i, obj); } pStmt.executeUpdate(); cnt++; procRecs++; } rs.close(); if (count == 0) { break; } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (gStmt != null) { gStmt.close(); } if (pStmt != null) { pStmt.close(); } } catch (Exception ex) { } } long endTime = System.currentTimeMillis(); long deltaTime = endTime - start; long elapsedTime = endTime - startTime; double avergeTime = (double)elapsedTime / (double)procRecs; double hrsLeft = (((double)procRecs / (double)elapsedTime) * (double)totalRecs) / 3600000.0; int seconds = (int)(elapsedTime / 60000.0); if (secsThreshold != seconds) { secsThreshold = seconds; System.out.println(String.format("Elapsed %8.2f hr.mn Time: %5.2f Ave Time: %5.2f Percent: %6.3f Hours Left: Elapsed %8.2f ", ((double)(elapsedTime)) / 3600000.0, ((double)(deltaTime)) / 1000.0, avergeTime, 100.0 * ((double)procRecs / (double)totalRecs), hrsLeft)); } } System.out.println("Done transferring.");*/ /*Statement uStmt = null; try { uStmt = srcDBConn.createStatement(); int rv = uStmt.executeUpdate("ALTER TABLE raw ADD FULLTEXT(catalogue_number, genus, species, subspecies, collector_num)"); System.out.println("Indexing rv = "+rv); } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (uStmt != null) { uStmt.close(); } } catch (Exception ex) { ex.printStackTrace(); } } System.out.println("Done Indexing.");*/ }