List of usage examples for java.sql ResultSet TYPE_SCROLL_INSENSITIVE
int TYPE_SCROLL_INSENSITIVE
To view the source code for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.
Click Source Link
ResultSet
object that is scrollable but generally not sensitive to changes to the data that underlies the ResultSet
. From source file:rems.Global.java
public static ResultSet selectDataNoParams(String selSql) { ResultSet selDtSt = null;// w w w .ja va 2 s . c o m Statement stmt = null; try { Connection mycon = null; Class.forName("org.postgresql.Driver"); mycon = DriverManager.getConnection(Global.connStr, Global.Uname, Global.Pswd); mycon.setAutoCommit(false); //System.out.println("Opened database successfully"); stmt = mycon.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); selDtSt = stmt.executeQuery(selSql); //stmt.close(); //mycon.close(); return selDtSt; } catch (Exception ex) { Global.errorLog = selSql + "\r\n" + ex.getMessage(); Global.writeToLog(); return selDtSt; } finally { } }
From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java
public void evaluateDIF() throws SQLException { Statement stmt = null;/*from w w w. j av a2 s. c o m*/ ResultSet rs = null; //create focal map focalRegression = new TreeMap<VariableAttributes, KernelRegressionItem>(); for (VariableAttributes v : variables) { KernelRegressionItem kItem = new KernelRegressionItem(v, kernelFunction, bandwidth, uniformDistributionApproximation); focalRegression.put(v, kItem); } //create reference map if (hasGroupVariable) { referenceRegression = new TreeMap<VariableAttributes, KernelRegressionItem>(); for (VariableAttributes v : variables) { KernelRegressionItem kItem = new KernelRegressionItem(v, kernelFunction, bandwidth, uniformDistributionApproximation); referenceRegression.put(v, kItem); } } //determine whether group variable is double or not boolean groupVariableIsDouble = false; if (groupByVariable.getType().getDataType() == DataType.DOUBLE) groupVariableIsDouble = true; try { //connect to db Table sqlTable = new Table(tableName.getNameForDatabase()); SelectQuery select = new SelectQuery(); for (VariableAttributes v : variables) { select.addColumn(sqlTable, v.getName().nameForDatabase()); } select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase()); select.addColumn(sqlTable, groupByVariable.getName().nameForDatabase()); stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery(select.toString()); KernelRegressionItem kernelRegressionItem; Object itemResponse; Double score; Object tempGroup; String group; //analyze by groups while (rs.next()) { tempGroup = rs.getObject(groupByVariable.getName().nameForDatabase()); if (tempGroup == null) { group = "";//will not be counted if does not match focal or reference code } else { if (groupVariableIsDouble) { group = Double.valueOf((Double) tempGroup).toString(); } else { group = ((String) tempGroup).trim(); } } //get independent variable value //omit examinees with missing data //examinees with missing group code omitted score = rs.getDouble(regressorVariable.getName().nameForDatabase()); if (!rs.wasNull()) { if (focalCode.equals(group)) { for (VariableAttributes v : focalRegression.keySet()) { kernelRegressionItem = focalRegression.get(v); itemResponse = rs.getObject(v.getName().nameForDatabase()); if (itemResponse != null) kernelRegressionItem.increment(score, itemResponse); } } else if (referenceCode.equals(group)) { for (VariableAttributes v : referenceRegression.keySet()) { kernelRegressionItem = referenceRegression.get(v); itemResponse = rs.getObject(v.getName().nameForDatabase()); if (itemResponse != null) kernelRegressionItem.increment(score, itemResponse); } } } updateProgress(); } } catch (SQLException ex) { throw ex; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } this.firePropertyChange("progress-ind-on", null, null); }
From source file:eu.stratosphere.api.io.jdbc.JDBCInputFormat.java
private void executeQuery() throws SQLException { statement = dbConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); resultSet = statement.executeQuery(this.query); }
From source file:BQJDBC.QueryResultTest.BQScrollableResultSetFunctionTest.java
public void QueryLoad() { final String sql = "SELECT TOP(word,10) AS word, COUNT(*) as count FROM publicdata:samples.shakespeare"; final String description = "The top 10 word from shakespeare #TOP #COUNT"; String[][] expectation = new String[][] { { "you", "yet", "would", "world", "without", "with", "your", "young", "words", "word" }, { "42", "42", "42", "42", "42", "42", "41", "41", "41", "41" } }; /** somehow the result changed with time { "you", "yet", "would", "world", "without", "with", "will", "why", "whose", "whom" },/*from www . ja v a2 s . c o m*/ { "42", "42", "42", "42", "42", "42", "42", "42", "42", "42" } }; */ this.logger.info("Test number: 01"); this.logger.info("Running query:" + sql); try { //Statement stmt = BQResultSetFunctionTest.con.createStatement(); Statement stmt = BQScrollableResultSetFunctionTest.con .createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setQueryTimeout(500); BQScrollableResultSetFunctionTest.Result = stmt.executeQuery(sql); } catch (SQLException e) { this.logger.error("SQLexception" + e.toString()); Assert.fail("SQLException" + e.toString()); } Assert.assertNotNull(BQScrollableResultSetFunctionTest.Result); this.logger.debug(description); HelperFunctions.printer(expectation); try { Assert.assertTrue("Comparing failed in the String[][] array", this.comparer(expectation, BQSupportMethods.GetQueryResult(BQScrollableResultSetFunctionTest.Result))); } catch (SQLException e) { this.logger.error("SQLexception" + e.toString()); Assert.fail(e.toString()); } }
From source file:edu.ku.brc.specify.toycode.mexconabio.MexConvToSQL.java
/** * //from ww w.j a v a2s.com */ public void process(final String databaseName) { boolean doingMapTable = false; cal.set(Calendar.SECOND, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.HOUR, 0); cal.set(Calendar.MILLISECOND, 0); for (int i = 0; i < histo.length; i++) { histo[i] = 0; } convLogger.initialize("mich_conabio", databaseName); StringBuilder extraStyle = new StringBuilder(); extraStyle.append("TD.yw { color: rgb(200,200,0); }\n"); extraStyle.append("TD.gr { color: rgb(200,0,200); }\n"); extraStyle.append("TD.bgr { color: rgb(0,255,0); }\n"); extraStyle.append("TD.byw { color: yellow; }\n"); extraStyle.append("TD.df { color: rgb(100,255,200); }\n"); String mapTableName = "maptable"; Statement stmt = null; try { conn = DriverManager.getConnection( "jdbc:mysql://localhost/" + databaseName + "?characterEncoding=UTF-8&autoReconnect=true", "root", "root"); BasicSQLUtils.setDBConnection(conn); int matches = BasicSQLUtils.getCountAsInt("SELECT COUNT(*) FROM maptable"); //tblWriter.log(String.format("Number of records that match - Genus, Species, CollectorNumber, CatalogNumber: "+matches)); mpStmt = conn.prepareStatement("INSERT INTO " + mapTableName + " VALUES (?,?)"); String sqlMain = "SELECT E.IdEjemplar, E.NumeroDeCatalogo, NumeroDeColecta, Grupo.DescripcionGpo AS Colector, " + "NombreLocalidad.NombreOriginal, CategoriaTaxonomica.NombreCategoriaTaxonomica, IF (CategoriaTaxonomica.IdNivel1<7,Nombre.Nombre, " + "IF (CategoriaTaxonomica.IdNivel3 = 0, CONCAT(Nombre_1.Nombre,\" \", Nombre.Nombre),CONCAT(Nombre_2.Nombre, \" \",Nombre_1.Nombre,\" \", " + "Nombre.Nombre))) AS Nombre, Nombre_2.Nombre AS N2, Nombre_1.Nombre AS N1, Nombre.Nombre AS N0, " + "AnioColecta, MesColecta, DiaColecta FROM Grupo " + "INNER JOIN (CategoriaTaxonomica INNER JOIN (NombreLocalidad " + "INNER JOIN (((Nombre INNER JOIN Nombre AS Nombre_1 ON Nombre.IdAscendObligatorio = Nombre_1.IdNombre) " + "INNER JOIN Nombre AS Nombre_2 ON Nombre_1.IdAscendObligatorio = Nombre_2.IdNombre) " + "INNER JOIN Ejemplar E ON Nombre.IdNombre = E.IdNombre) ON NombreLocalidad.IdNombreLocalidad = E.IdNombreLocalidad) " + "ON CategoriaTaxonomica.IdCategoriaTaxonomica = Nombre.IdCategoriaTaxonomica) ON Grupo.IdGrupo = E.IdColector " + "ORDER BY E.NumeroDeCatalogo "; String sql = sqlMain; //if (!doingMapTable) //{ // sql = "SELECT * FROM (" + sqlMain + ") T1 LEFT JOIN maptable ON IdEjemplar = OldID WHERE NewID IS NULL"; //} TableWriter tblWriter = convLogger.getWriter("MichConabio1.html", "Matches Cat No. / Collector No. / Genus / Species", extraStyle.toString()); tblWriter.startTable(); tblWriter.log("<TR><TH COLSPAN=\"7\">Conabio</TH><TH COLSPAN=\"7\">Michigan</TH><TD> </TH></TR>"); tblWriter.logHdr("Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Score"); fndCnt = 0; numRecs = 0; System.out.println(sql); stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { process(0, rs, tblWriter, doingMapTable); } rs.close(); tblWriter.endTable(); tblWriter.log("<BR>"); tblWriter.log(String.format("Number of records that match: %d of %d records.", fndCnt, numRecs)); tblWriter.log(String.format("Average Score: %5.2f", ((double) totalScore / (double) fndCnt))); tblWriter.log(String.format("Mode Score: %d", getModeScore())); totalScore = 0; sql = "SELECT * FROM (" + sqlMain + ") T1 LEFT JOIN maptable ON IdEjemplar = OldID WHERE NewID IS NULL"; fndCnt = 0; numRecs = 0; tblWriter = convLogger.getWriter("MichConabio2.html", "Matches Cat No. / Collector No. ", extraStyle.toString()); tblWriter.startTable(); tblWriter.log("<TR><TH COLSPAN=\"7\">Conabio</TH><TH COLSPAN=\"7\">Michigan</TH><TD> </TH></TR>"); tblWriter.logHdr("Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Score"); rs = stmt.executeQuery(sql); while (rs.next()) { process(1, rs, tblWriter, doingMapTable); } rs.close(); tblWriter.endTable(); tblWriter.log("<BR>"); tblWriter.log(String.format("Number of records that match: %d of %d records.", fndCnt, numRecs)); tblWriter.log(String.format("Average Score: %5.2f", ((double) totalScore / (double) fndCnt))); tblWriter.log(String.format("Mode Score: %d", getModeScore())); totalScore = 0; fndCnt = 0; numRecs = 0; tblWriter = convLogger.getWriter("MichConabio3.html", "Matches Collector No. ", extraStyle.toString()); tblWriter.startTable(); tblWriter.log("<TR><TH COLSPAN=\"7\">Conabio</TH><TH COLSPAN=\"7\">Michigan</TH><TD> </TH></TR>"); tblWriter.logHdr("Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Score"); rs = stmt.executeQuery(sql); while (rs.next()) { process(2, rs, tblWriter, doingMapTable); } rs.close(); tblWriter.endTable(); tblWriter.log("<BR>"); tblWriter.log(String.format("Number of records that match: %d of %d records.", fndCnt, numRecs)); tblWriter.log(String.format("Average Score: %5.2f", ((double) totalScore / (double) fndCnt))); tblWriter.log(String.format("Mode Score: %d", getModeScore())); totalScore = 0; fndCnt = 0; numRecs = 0; tblWriter = convLogger.getWriter("MichConabio4.html", "Matches Cat No. ", extraStyle.toString()); tblWriter.startTable(); tblWriter.log("<TR><TH COLSPAN=\"7\">Conabio</TH><TH COLSPAN=\"7\">Michigan</TH><TD> </TH></TR>"); tblWriter.logHdr("Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Col Num", "Cat Num", "Genus", "Species", "Collector", "Locality", "Date Collected", "Score"); rs = stmt.executeQuery(sql); while (rs.next()) { process(3, rs, tblWriter, doingMapTable); } rs.close(); tblWriter.endTable(); tblWriter.log("<BR>"); tblWriter.log(String.format("Number of records that match: %d of %d records.", fndCnt, numRecs)); tblWriter.log(String.format("Average Score: %5.2f", ((double) totalScore / (double) fndCnt))); tblWriter.log(String.format("Mode Score: %d", getModeScore())); totalScore = 0; tblWriter.flush(); } catch (Exception ex) { ex.printStackTrace(); } finally { try { stmt.close(); mpStmt.close(); conn.close(); } catch (Exception ex) { ex.printStackTrace(); } } File indexFile = convLogger.closeAll(); try { AttachmentUtils.openURI(indexFile.toURI()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //int n = StringUtils.getLevenshteinDistance("guatemaliense", "guatemalense"); //System.out.println(n); // tblWriter.println("<BR>"); // for (int i=0;i<histo.length;i++) // { // if (histo[i] > 0) // { // tblWriter.print(String.format("%3d - ", i)); // for (int x=0;x<histo[i]/2;x++) // { // tblWriter.print('*'); // } // tblWriter.println("<BR>"); // } // } // tblWriter.flush(); }
From source file:com.amazon.carbonado.repo.jdbc.JDBCRepository.java
/** * @param name name to give repository instance * @param isMaster when true, storables in this repository must manage * version properties and sequence properties * @param dataSource provides JDBC database connections * @param catalog optional catalog to search for tables -- actual meaning * is database independent// w w w. j a v a 2 s . c om * @param schema optional schema to search for tables -- actual meaning is * is database independent * @param forceStoredSequence tells the repository to use a stored sequence * even if the database supports native sequences */ @SuppressWarnings("unchecked") JDBCRepository(AtomicReference<Repository> rootRef, String name, boolean isMaster, Iterable<TriggerFactory> triggerFactories, DataSource dataSource, boolean dataSourceClose, String catalog, String schema, Integer fetchSize, Map<String, Boolean> autoVersioningMap, Map<String, Boolean> suppressReloadMap, String sequenceSelectStatement, boolean forceStoredSequence, boolean primaryKeyCheckDisabled, SchemaResolver resolver) throws RepositoryException { super(name); if (dataSource == null) { throw new IllegalArgumentException("DataSource cannot be null"); } mIsMaster = isMaster; mTriggerFactories = triggerFactories; mRootRef = rootRef; mDataSource = dataSource; mDataSourceClose = dataSourceClose; mCatalog = catalog; mSchema = schema; mFetchSize = fetchSize; mPrimaryKeyCheckDisabled = primaryKeyCheckDisabled; mAutoVersioningMap = autoVersioningMap; mSuppressReloadMap = suppressReloadMap; mResolver = resolver; mOpenConnections = new IdentityHashMap<Connection, Object>(); mOpenConnectionsLock = new ReentrantLock(true); // Temporarily set to generic one, in case there's a problem during initialization. mExceptionTransformer = new JDBCExceptionTransformer(); mTxnMgr = new JDBCTransactionManager(this); getLog().info("Opening repository \"" + getName() + '"'); // Test connectivity and get some info on transaction isolation levels. Connection con = getConnection(); try { DatabaseMetaData md = con.getMetaData(); if (md == null || !md.supportsTransactions()) { throw new RepositoryException("Database does not support transactions"); } mDatabaseProductName = md.getDatabaseProductName(); boolean supportsSavepoints; try { supportsSavepoints = md.supportsSavepoints(); } catch (AbstractMethodError e) { supportsSavepoints = false; } if (supportsSavepoints) { con.setAutoCommit(false); // Some JDBC drivers (HSQLDB) lie about their savepoint support. try { con.setSavepoint(); } catch (SQLException e) { mLog.warn("JDBC driver for " + mDatabaseProductName + " reports supporting savepoints, but it " + "doesn't appear to work: " + e); supportsSavepoints = false; } finally { con.rollback(); con.setAutoCommit(true); } } mSupportsSavepoints = supportsSavepoints; mSupportsSelectForUpdate = md.supportsSelectForUpdate(); mSupportsScrollInsensitiveReadOnly = md.supportsResultSetConcurrency(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); mJdbcDefaultIsolationLevel = md.getDefaultTransactionIsolation(); mDefaultIsolationLevel = mapIsolationLevelFromJdbc(mJdbcDefaultIsolationLevel); mReadUncommittedLevel = selectIsolationLevel(md, IsolationLevel.READ_UNCOMMITTED); mReadCommittedLevel = selectIsolationLevel(md, IsolationLevel.READ_COMMITTED); mRepeatableReadLevel = selectIsolationLevel(md, IsolationLevel.REPEATABLE_READ); mSerializableLevel = selectIsolationLevel(md, IsolationLevel.SERIALIZABLE); } catch (SQLException e) { throw toRepositoryException(e); } finally { try { closeConnection(con); } catch (SQLException e) { // Don't care. } } mSupportStrategy = JDBCSupportStrategy.createStrategy(this); if (forceStoredSequence) { mSupportStrategy.setSequenceSelectStatement(null); } else if (sequenceSelectStatement != null && sequenceSelectStatement.length() > 0) { mSupportStrategy.setSequenceSelectStatement(sequenceSelectStatement); } mSupportStrategy.setForceStoredSequence(forceStoredSequence); mExceptionTransformer = mSupportStrategy.createExceptionTransformer(); getLog().info("Opened repository \"" + getName() + '"'); setAutoShutdownEnabled(true); }
From source file:edu.ku.brc.specify.conversion.AgentConverter.java
/** * Specify 5.x points at AgentAdress instead of an Agent. The idea was that to point at an Agent * and possibly a differnt address that represents what that person does. This was really * confusing so we are changing it to point at an Agent instead. * //from ww w.j av a2 s . co m * So that means we need to pull apart these relationships and have all foreign keys that point * to an AgentAddress now point at an Agent and we then need to add in the Agents and then add * the Address to the Agents. * * The AgentAdress, Agent and Address (triple) can have a NULL Address but it cannot have a NULL * Agent. If there is a NULL Agent then this method will throw a RuntimeException. */ public boolean convertAgents(final boolean doFixAgents) { boolean debugAgents = false; log.debug("convert Agents"); BasicSQLUtils.removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType); // Create the mappers here, but fill them in during the AgentAddress Process IdTableMapper agentIDMapper = idMapperMgr.addTableMapper("agent", "AgentID"); IdTableMapper agentAddrIDMapper = idMapperMgr.addTableMapper("agentaddress", "AgentAddressID"); agentIDMapper.setInitialIndex(4); if (shouldCreateMapTables) { log.info("Mapping Agent Ids"); agentIDMapper.mapAllIds("SELECT AgentID FROM agent ORDER BY AgentID"); } // Just like in the conversion of the CollectionObjects we // need to build up our own SELECT clause because the MetaData of columns names returned // FROM // a query doesn't include the table names for all columns, this is far more predictable List<String> oldFieldNames = new ArrayList<String>(); StringBuilder agtAdrSQL = new StringBuilder("SELECT "); List<String> agentAddrFieldNames = getFieldNamesFromSchema(oldDBConn, "agentaddress"); agtAdrSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress")); agtAdrSQL.append(", "); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress"); List<String> agentFieldNames = getFieldNamesFromSchema(oldDBConn, "agent"); agtAdrSQL.append(buildSelectFieldList(agentFieldNames, "agent")); log.debug("MAIN: " + agtAdrSQL); agtAdrSQL.append(", "); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent"); List<String> addrFieldNames = getFieldNamesFromSchema(oldDBConn, "address"); log.debug(agtAdrSQL); agtAdrSQL.append(buildSelectFieldList(addrFieldNames, "address")); GenericDBConversion.addNamesWithTableName(oldFieldNames, addrFieldNames, "address"); // Create a Map FROM the full table/fieldname to the index in the resultset (start at 1 not zero) HashMap<String, Integer> indexFromNameMap = new HashMap<String, Integer>(); agtAdrSQL.append( " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID INNER JOIN address ON agentaddress.AddressID = address.AddressID Order By agentaddress.AgentAddressID Asc"); // These represent the New columns of Agent Table // So the order of the names are for the new table // the names reference the old table String[] agentColumns = { "agent.AgentID", "agent.TimestampModified", "agent.AgentType", "agentaddress.JobTitle", "agent.FirstName", "agent.LastName", "agent.MiddleInitial", "agent.Title", "agent.Interests", "agent.Abbreviation", "agentaddress.Email", "agentaddress.URL", "agent.Remarks", "agent.TimestampCreated", // User/Security changes "agent.ParentOrganizationID" }; HashMap<Integer, AddressInfo> addressHash = new HashMap<Integer, AddressInfo>(); // Create a HashMap to track which IDs have been handled during the conversion process try { log.info("Hashing Address Ids"); Integer agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AddressID) FROM address ORDER BY AddressID"); // So first we hash each AddressID and the value is set to 0 (false) Statement stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rsX = stmtX .executeQuery("SELECT AgentAddressID, AddressID FROM agentaddress ORDER BY AgentAddressID"); conv.setProcess(0, agentCnt); int cnt = 0; // Needed to add in case AgentAddress table wasn't used. while (rsX.next()) { int agentAddrId = rsX.getInt(1); int addrId = rsX.getInt(2); addressHash.put(addrId, new AddressInfo(agentAddrId, addrId)); if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); conv.setProcess(0, 0); // Next we hash all the Agents and set their values to 0 (false) log.info("Hashing Agent Ids"); stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(*) FROM agent ORDER BY AgentID"); rsX = stmtX.executeQuery( "SELECT AgentID, AgentType, LastName, Name, FirstName FROM agent ORDER BY AgentID"); conv.setProcess(0, agentCnt); cnt = 0; while (rsX.next()) { int agentId = rsX.getInt(1); agentHash.put(agentId, new AgentInfo(agentId, agentIDMapper.get(agentId), rsX.getByte(2), rsX.getString(3), rsX.getString(4), rsX.getString(5))); if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); conv.setProcess(0, 0); // Now we map all the Agents to their Addresses AND // All the Addresses to their Agents. // // NOTE: A single Address Record May be used by more than one Agent so // we will need to Duplicate the Address records later // log.info("Cross Mapping Agents and Addresses"); String post = " FROM agentaddress WHERE AddressID IS NOT NULL and AgentID IS NOT NULL"; agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AgentAddressID)" + post); stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); String asSQL = "SELECT AgentAddressID, AgentID" + post; log.debug(asSQL); rsX = stmtX.executeQuery(asSQL); conv.setProcess(0, agentCnt); cnt = 0; // Needed to add in case AgentAddress table wasn't used. while (rsX.next()) { int agentAddrId = rsX.getInt(1); int agentId = rsX.getInt(2); // /////////////////////// // Add Address to Agent // /////////////////////// AgentInfo agentInfo = agentHash.get(agentId); if (agentInfo == null) { String msg = "The AgentID [" + agentId + "] in AgentAddress table id[" + agentAddrId + "] desn't exist"; log.error(msg); tblWriter.logError(msg); } else { agentInfo.add(agentAddrId, agentAddrId); } if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); //dumpInfo("beforeInfo.txt", addressHash); conv.setProcess(0, 0); // It OK if the address is NULL, but the Agent CANNOT be NULL log.info("Checking for null Agents"); agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AgentAddressID) FROM agentaddress a where AddressID IS NOT NULL and AgentID is null"); // If there is a Single Record With a NULL Agent this would be BAD! if (agentCnt != null && agentCnt > 0) { showError("There are " + agentCnt + " AgentAddress Records where the AgentID is null and the AddressId IS NOT NULL!"); } // //////////////////////////////////////////////////////////////////////////////// // This does the part of AgentAddress where it has both an Address AND an Agent // //////////////////////////////////////////////////////////////////////////////// log.info(agtAdrSQL.toString()); Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); log.debug("AgentAddress: " + agtAdrSQL.toString()); // Create Map of column name to column index number int inx = 1; for (String fldName : oldFieldNames) { // log.info("["+fldName+"] "+inx+" ["+rsmd.getColumnName(inx)+"]"); indexFromNameMap.put(fldName, inx++); } Statement updateStatement = newDBConn.createStatement(); // Figure out certain column indexes we will need alter int agentIdInx = indexFromNameMap.get("agent.AgentID"); int agentTypeInx = indexFromNameMap.get("agent.AgentType"); int lastEditInx = indexFromNameMap.get("agent.LastEditedBy"); int nameInx = indexFromNameMap.get("agent.Name"); int lastNameInx = indexFromNameMap.get("agent.LastName"); int firstNameInx = indexFromNameMap.get("agent.FirstName"); int recordCnt = 0; ResultSet rs = stmt.executeQuery(agtAdrSQL.toString()); while (rs.next()) { int agentAddressId = rs.getInt(1); int agentId = rs.getInt(agentIdInx); String lastEditedBy = rs.getString(lastEditInx); AgentInfo agentInfo = agentHash.get(agentId); // Deal with Agent FirstName, LastName and Name] String lastName = rs.getString(lastNameInx); String name = rs.getString(nameInx); namePair.second = StringUtils.isNotEmpty(name) && StringUtils.isEmpty(lastName) ? name : lastName; namePair.first = rs.getString(firstNameInx); // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID if (shouldCreateMapTables) { agentAddrIDMapper.setShowLogErrors(false); if (debugAgents) log.info(String.format("Map - agentAddressId (Old) %d to Agent -> New ID: %d", agentAddressId, agentInfo.getNewAgentId())); if (agentAddrIDMapper.get(agentAddressId) == null) { agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId()); } else { log.debug(String.format("ERROR - agentAddressId %d Already mapped to New ID: %d", agentAddressId, agentInfo.getNewAgentId())); } agentAddrIDMapper.setShowLogErrors(true); } // Because of the old DB relationships we want to make sure we only add each agent // in one time // So start by checking the HashMap to see if it has already been added if (!agentInfo.wasAdded()) { agentInfo.setWasAdded(true); //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId()); BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); // It has not been added yet so Add it StringBuilder sqlStr = new StringBuilder(); sqlStr.append("INSERT INTO agent "); sqlStr.append( "(AgentID, DivisionId, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, "); sqlStr.append("Title, Interests, Abbreviation, Email, URL, Remarks, TimestampCreated, "); sqlStr.append("ParentOrganizationID, CreatedByAgentID, ModifiedByAgentID, Version)"); sqlStr.append(" VALUES ("); for (int i = 0; i < agentColumns.length; i++) { if (i > 0) { sqlStr.append(","); } if (i == 0) { if (debugAgents) log.info("Adding: " + agentColumns[i] + " New ID: " + agentInfo.getNewAgentId()); sqlStr.append(agentInfo.getNewAgentId()); sqlStr.append(","); sqlStr.append(conv.getCurDivisionID()); } else if (agentColumns[i].equals("agent.ParentOrganizationID")) { Object obj = rs.getObject(indexFromNameMap.get(agentColumns[i])); if (obj != null) { int oldId = rs.getInt(agentColumns[i]); Integer newID = agentIDMapper.get(oldId); if (newID == null) { log.error("Couldn't map ParentOrganizationID [" + oldId + "]"); } sqlStr.append(BasicSQLUtils.getStrValue(newID)); } else { sqlStr.append("NULL"); } } else if (agentColumns[i].equals("agent.LastName") || agentColumns[i].equals("LastName")) { int lastNameLen = 120; String lstName = namePair.second; lstName = lstName == null ? null : lstName.length() <= lastNameLen ? lstName : lstName.substring(0, lastNameLen); sqlStr.append(BasicSQLUtils.getStrValue(lstName)); } else if (agentColumns[i].equals("agent.FirstName") || agentColumns[i].equals("FirstName")) { sqlStr.append(BasicSQLUtils.getStrValue(namePair.first)); } else { inx = indexFromNameMap.get(agentColumns[i]); sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx))); } } sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + "," + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ",0"); sqlStr.append(")"); try { if (debugAgents) { log.info(sqlStr.toString()); } updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS); Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement); if (newAgentId == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } //conv.addAgentDisciplineJoin(newAgentId, conv.getDisciplineId()); } catch (SQLException e) { log.error(sqlStr.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); System.exit(0); throw new RuntimeException(e); } } BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); if (recordCnt % 250 == 0) { log.info("AgentAddress Records: " + recordCnt); } recordCnt++; } // while BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); log.info("AgentAddress Records: " + recordCnt); rs.close(); stmt.close(); // //////////////////////////////////////////////////////////////////////////////// // This does the part of AgentAddress where it has JUST Agent // //////////////////////////////////////////////////////////////////////////////// log.info("******** Doing AgentAddress JUST Agent"); int newRecordsAdded = 0; StringBuilder justAgentSQL = new StringBuilder(); justAgentSQL.setLength(0); justAgentSQL.append("SELECT "); justAgentSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress")); justAgentSQL.append(", "); getFieldNamesFromSchema(oldDBConn, "agent", agentFieldNames); justAgentSQL.append(buildSelectFieldList(agentFieldNames, "agent")); justAgentSQL.append( " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID WHERE agentaddress.AddressID IS NULL ORDER BY agentaddress.AgentAddressID ASC"); log.info(justAgentSQL.toString()); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery(justAgentSQL.toString()); oldFieldNames.clear(); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress"); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent"); indexFromNameMap.clear(); inx = 1; for (String fldName : oldFieldNames) { indexFromNameMap.put(fldName, inx++); } agentIdInx = indexFromNameMap.get("agent.AgentID"); lastEditInx = indexFromNameMap.get("agent.LastEditedBy"); agentTypeInx = indexFromNameMap.get("agent.AgentType"); recordCnt = 0; while (rs.next()) { byte agentType = rs.getByte(agentTypeInx); int agentAddressId = rs.getInt(1); int agentId = rs.getInt(agentIdInx); String lastEditedBy = rs.getString(lastEditInx); AgentInfo agentInfo = agentHash.get(agentId); // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID if (shouldCreateMapTables) { agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId()); } recordCnt++; if (!agentInfo.wasAdded()) { agentInfo.setWasAdded(true); BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); // Create Agent StringBuilder sqlStr = new StringBuilder("INSERT INTO agent "); sqlStr.append( "(AgentID, DivisionID, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, Title, Interests, "); sqlStr.append("Abbreviation, Email, URL, Remarks, TimestampCreated, ParentOrganizationID, "); sqlStr.append("CreatedByAgentID, ModifiedByAgentID, Version)"); sqlStr.append(" VALUES ("); for (int i = 0; i < agentColumns.length; i++) { if (i > 0) sqlStr.append(","); if (i == 0) { if (debugAgents) log.info(agentColumns[i]); sqlStr.append(agentInfo.getNewAgentId()); sqlStr.append(","); sqlStr.append(conv.getCurDivisionID()); } else if (i == lastEditInx) { // Skip the field } else if (agentColumns[i].equals("agent.LastName")) { if (debugAgents) log.info(agentColumns[i]); int srcColInx = agentType != 1 ? nameInx : lastNameInx; String lName = BasicSQLUtils.getStrValue(rs.getObject(srcColInx)); sqlStr.append(lName); } else { if (debugAgents) log.info(agentColumns[i]); inx = indexFromNameMap.get(agentColumns[i]); sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx))); } } sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + "," + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ", 0"); // '0' is Version sqlStr.append(")"); try { if (debugAgents) { log.info(sqlStr.toString()); } updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS); Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement); if (newAgentId == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } newRecordsAdded++; } catch (SQLException e) { log.error(sqlStr.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } if (recordCnt % 250 == 0) { log.info("AgentAddress (Agent Only) Records: " + recordCnt); } } // while log.info("AgentAddress (Agent Only) Records: " + recordCnt + " newRecordsAdded " + newRecordsAdded); rs.close(); updateStatement.close(); conv.setProcess(0, BasicSQLUtils.getNumRecords(oldDBConn, "agent")); conv.setDesc("Adding Agents"); // Now Copy all the Agents that where part of an Agent Address Conversions stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery("SELECT AgentID FROM agent"); recordCnt = 0; while (rs.next()) { Integer agentId = rs.getInt(1); AgentInfo agentInfo = agentHash.get(agentId); if (agentInfo == null || !agentInfo.wasAdded()) { copyAgentFromOldToNew(agentId, agentIDMapper); } recordCnt++; if (recordCnt % 50 == 0) { conv.setProcess(recordCnt); } } conv.setProcess(recordCnt); BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); //------------------------------------------------------------ // Now Copy all the Agents that where missed //------------------------------------------------------------ conv.setProcess(0); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery("SELECT AgentID FROM agent"); recordCnt = 0; while (rs.next()) { Integer agentId = rs.getInt(1); Integer newId = agentIDMapper.get(agentId); if (newId != null) { Integer isThere = BasicSQLUtils.getCount(newDBConn, "SELECT COUNT(*) FROM agent WHERE AgentID = " + newId); if (isThere == null || isThere == 0) { copyAgentFromOldToNew(agentId, agentIDMapper); } } else { tblWriter.logError("Mapping missing for old Agent id[" + agentId + "]"); } recordCnt++; if (recordCnt % 50 == 0) { conv.setProcess(recordCnt); } } conv.setProcess(recordCnt); if (doFixAgents) { fixAgentsLFirstLastName(); } //---------------------------------------------------------------------------------------------------------------------------------- // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be // duplicate in the second step. //---------------------------------------------------------------------------------------------------------------------------------- StringBuilder sqlStr1 = new StringBuilder("INSERT INTO address "); sqlStr1.append( "(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, "); sqlStr1.append( "IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, PositionHeld, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)"); sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); PreparedStatement pStmt = newDBConn.prepareStatement(sqlStr1.toString(), Statement.RETURN_GENERATED_KEYS); // 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 String addrSQL = "SELECT a.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, a.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.JobTitle " + "FROM address AS a " + "INNER JOIN agentaddress AS aa ON a.AddressID = aa.AddressID WHERE aa.AgentAddressID = %d"; BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); int fixCnt = 0; for (AgentInfo agentInfo : agentHash.values()) { HashMap<Integer, Integer> addrs = agentInfo.getAddrs(); for (Integer oldAgentAddrId : addrs.keySet()) { String adrSQL = String.format(addrSQL, oldAgentAddrId); rs = stmt.executeQuery(adrSQL); if (!rs.next()) { rs.close(); continue; } String lastEditedBy = rs.getString(14); String posHeld = rs.getString(15); if (posHeld != null && posHeld.length() > 32) { posHeld = posHeld.substring(0, 32); } String addr1 = rs.getString(2); String addr2 = null; if (addr1 != null && addr1.length() > 255) { addr1 = addr1.substring(0, 255); addr2 = addr1.substring(255); } pStmt.setTimestamp(1, rs.getTimestamp(1)); pStmt.setString(2, addr1); pStmt.setString(3, addr2); // Address 2 pStmt.setString(4, rs.getString(3)); pStmt.setString(5, rs.getString(4)); pStmt.setString(6, rs.getString(5)); pStmt.setString(7, rs.getString(6)); pStmt.setString(8, rs.getString(7)); pStmt.setTimestamp(9, rs.getTimestamp(8)); pStmt.setBoolean(10, rs.getByte(13) != 0); pStmt.setBoolean(11, rs.getByte(13) != 0); pStmt.setString(12, rs.getString(9)); pStmt.setString(13, rs.getString(10)); pStmt.setString(14, rs.getString(11)); pStmt.setString(15, rs.getString(12)); pStmt.setString(16, posHeld); pStmt.setInt(17, agentInfo.getNewAgentId()); pStmt.setInt(18, conv.getCreatorAgentIdForAgent(lastEditedBy)); pStmt.setInt(19, conv.getModifiedByAgentIdForAgent(lastEditedBy)); pStmt.setInt(20, 0); pStmt.setInt(21, agentInfo.addrOrd); Integer newID = BasicSQLUtils.getInsertedId(pStmt); log.debug(String.format("Saved New Id %d", newID)); //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId()); agentInfo.addrOrd++; rs.close(); try { if (debugAgents) { log.info(sqlStr1.toString()); } if (pStmt.executeUpdate() != 1) { log.error("Error inserting address.)"); } //addrInfo.setWasAdded(true); } catch (SQLException e) { log.error(sqlStr1.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } } log.info(String.format("Added %d new Addresses", fixCnt)); pStmt.close(); //------------------------------------------------------------------ // Step #2 - Now duplicate the addresses for the agents that had // already been written to the database //------------------------------------------------------------------ /*fixCnt = 0; for (AgentInfo agentInfo : agentHash.values()) { for (Integer oldAgentAddrId : agentInfo.getUnwrittenOldAddrIds()) { Integer oldAddrId = agentInfo.getAddrs().get(oldAgentAddrId); //AddressInfo addrInfo = addressHash.get(oldAddrId); System.out.println(String.format("%d %d", oldAgentAddrId, oldAddrId)); //duplicateAddress(newDBConn, addrInfo.getOldAddrId(), addrInfo.getNewAddrId(), agentInfo.getNewAgentId()); } } log.info(String.format("Duplicated %d new Addresses", fixCnt)); */ //---------------------------------------------------------------------------------------------------------------------------------- // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be // duplicate in the second step. //---------------------------------------------------------------------------------------------------------------------------------- /*BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); sqlStr1 = new StringBuilder("INSERT INTO address "); sqlStr1.append("(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, "); sqlStr1.append("IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)"); sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); pStmt = newDBConn.prepareStatement(sqlStr1.toString()); // 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 String addrOnlySQL = "SELECT aa.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, aa.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.AgentID " + "FROM agentaddress AS aa " + "LEFT JOIN address AS a ON a.AddressID = aa.AddressID " + "WHERE a.addressID IS NULL AND aa.AgentID IS NOT NULL"; fixCnt = 0; rs = stmt.executeQuery(addrOnlySQL); while (rs.next()) { int agentId = rs.getInt(15); int newAgentId = agentIDMapper.get(agentId); String lastEditedBy = rs.getString(14); pStmt.setTimestamp(1, rs.getTimestamp(1)); pStmt.setString(2, rs.getString(2)); pStmt.setString(3, null); // Address 2 pStmt.setString(4, rs.getString(3)); pStmt.setString(5, rs.getString(4)); pStmt.setString(6, rs.getString(5)); pStmt.setString(7, rs.getString(6)); pStmt.setString(8, rs.getString(7)); pStmt.setTimestamp(9, rs.getTimestamp(8)); pStmt.setBoolean(10, rs.getByte(13) != 0); pStmt.setBoolean(11, rs.getByte(13) != 0); pStmt.setString(12, rs.getString(9)); pStmt.setString(13, rs.getString(10)); pStmt.setString(14, rs.getString(11)); pStmt.setString(15, rs.getString(12)); pStmt.setInt(16, newAgentId); pStmt.setInt(17, conv.getCreatorAgentIdForAgent(lastEditedBy)); pStmt.setInt(18, conv.getModifiedByAgentIdForAgent(lastEditedBy)); pStmt.setInt(19, 0); pStmt.setInt(20, 1); try { if (debugAgents) { log.info(sqlStr1.toString()); } if (pStmt.executeUpdate() != 1) { log.error("Error inserting address.)"); } else { fixCnt++; } } catch (SQLException e) { log.error(sqlStr1.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } rs.close(); log.info(String.format("Added %d new Addresses", fixCnt)); pStmt.close();*/ stmt.close(); //dumpInfo("afterInfo.txt", addressHash); BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); return true; } catch (SQLException ex) { log.error(ex); ex.printStackTrace(); System.exit(0); throw new RuntimeException(ex); } }
From source file:com.wabacus.system.dataset.sqldataset.AbsGetAllDataSetBySQL.java
protected ResultSet executeQuery(ReportRequest rrequest, ReportBean rbean, String datasource, String sql) throws SQLException { if (Config.show_sql) { log.info("Execute sql: " + sql); }/*from w ww . jav a 2 s .c om*/ if (this.isPreparedStatement()) { PreparedStatement pstmt = rrequest.getConnection(datasource).prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); if (lstConditions.size() > 0) { AbsDatabaseType dbtype = rrequest.getDbType(datasource); for (int j = 0; j < lstConditions.size(); j++) { if (Config.show_sql) { log.info("param" + (j + 1) + "=" + lstConditions.get(j)); } lstConditionsTypes.get(j).setPreparedStatementValue(j + 1, lstConditions.get(j), pstmt, dbtype); } } rrequest.addUsedStatement(pstmt); return pstmt.executeQuery(); } else { Statement stmt = rrequest.getConnection(datasource).createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rrequest.addUsedStatement(stmt); return stmt.executeQuery(sql); } }
From source file:br.org.indt.ndg.server.client.TemporaryOpenRosaBussinessDelegate.java
/********** Downloading OpenRosa Surveys List and specific surveys **********/ public String getFormattedSurveyAvailableToDownloadList(String imei) { String result = null;//from w ww .j av a2s .c o m PreparedStatement listSurveysToDownloadStmt = null; Connection conn = null; try { conn = getDbConnection(); listSurveysToDownloadStmt = conn.prepareStatement(SELECT_SURVEYS_AVAILABLE_FOR_USER_STATEMENT, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); listSurveysToDownloadStmt.setString(1, imei); ResultSet results = listSurveysToDownloadStmt.executeQuery(); OpenRosaSurveysList list = new OpenRosaSurveysList(results); result = list.toString(); } catch (SQLException e) { e.printStackTrace(); } finally { try { listSurveysToDownloadStmt.close(); conn.close(); } catch (Exception e) { } } return result; }
From source file:no.polaric.aprsdb.MyDBSession.java
/** * Return a list of the last n APRS packets from a given call. * * @param src from callsign/*from w w w. ja v a 2 s . c om*/ * @param n number of elements of list */ public DbList<AprsPacket> getAprsPackets(String src, int n) throws java.sql.SQLException { _log.debug("MyDbSession", "getAprsPackets: " + src); PreparedStatement stmt = getCon().prepareStatement( " SELECT * FROM \"AprsPacket\"" + " WHERE src=?" + " ORDER BY time DESC LIMIT ?", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setString(1, src); stmt.setInt(2, n); return new DbList(stmt.executeQuery(), rs -> { AprsPacket p = new AprsPacket(); String path = rs.getString("path"); String ipath = rs.getString("ipath"); p.source = _api.getChanManager().get(rs.getString("channel")); p.from = rs.getString("src"); p.to = rs.getString("dest"); p.via = (path == null ? "" : rs.getString("path") + ", ") + rs.getString("ipath"); p.report = rs.getString("info"); p.time = rs.getTimestamp("time"); return p; }); }