List of usage examples for java.sql ResultSet first
boolean first() throws SQLException;
ResultSet
object. From source file:net.starschema.clouddb.jdbc.list.TreeBuilder.java
/** * Makes a JDBC call to get the columns of the specified table, and project, * and schema if it finds out that this query has already been run, * uses the stored results instead/*from w ww. ja v a 2 s . com*/ * * @param srcTable - the SourceTable name which columns we want to know<br> * <li>if it's project has been set we'll use it * <li>if it's schema has been set we'll use it * @return - The columns of the Table */ @SuppressWarnings("rawtypes") List<String> GetColumns(SourceTable srcTable) { // we make jdbc calls to get columns this.logger.debug("making a jdbc call to get the columns"); List<String> Columns = new ArrayList<String>(); try { // Try to get result from container first this.logger.debug("Try to get result from container first"); Class[] args = new Class[4]; args[0] = String.class; args[1] = String.class; args[2] = String.class; args[3] = String.class; Method method = null; try { this.logger.debug("getting the method: MetaData.getColumns"); method = this.connection.getMetaData().getClass().getMethod("getColumns", args); } catch (SecurityException e) { // Should not occur this.logger.warn("failed to get the method getColumns " + e); } catch (NoSuchMethodException e) { // Should not occur this.logger.warn("failed to get the method getColumns " + e); } List<Parameter> params = new ArrayList<Parameter>(); String projectName = null; // to be the 1st parameter of getColumns if (srcTable.project != null) { //if there's a projectname stored in the srctable projectName = srcTable.getProject().replace(".", "_").replace(":", "__"); } else { //else we use the connections default projectName = this.connection.getCatalog(); } String dataset; if (srcTable.dataset != null) { dataset = srcTable.dataset; } else { dataset = "%"; } params.add(new Parameter(projectName)); params.add(new Parameter(dataset)); params.add(new Parameter(srcTable.getName())); params.add(new Parameter("%")); ResultSet res = this.callContainer.getresult(method, params); if (res == null) { res = this.connection.getMetaData().getColumns(projectName, dataset, srcTable.getName(), "%"); this.callContainer.AddCall(res, method, params); } res.first(); // Iterating through the results while (!res.isAfterLast()) { Columns.add(res.getString(4)); res.next(); } } catch (SQLException e) { // should not happen this.logger.warn("failed to get columns for the table: \"" + srcTable.getName() + "\" using * instead of columns", e); Columns.add("*"); } return Columns; }
From source file:org.wso2.carbon.ml.model.internal.DatabaseHandler.java
/** * This helper class is used to extract model execution start/end time * * @param modelId//from w w w .jav a 2 s .c o m * @param query * @return * @throws DatabaseHandlerException */ public long getModelExecutionTime(String modelId, String query) throws DatabaseHandlerException { Connection connection = null; ResultSet result = null; PreparedStatement statement = null; try { connection = dataSource.getConnection(); statement = connection.prepareStatement(query); statement.setString(1, modelId); result = statement.executeQuery(); if (result.first()) { Timestamp time = result.getTimestamp(1); if (time != null) { return time.getTime(); } return 0; } else { throw new DatabaseHandlerException("No timestamp data associated with model id: " + modelId); } } catch (SQLException e) { throw new DatabaseHandlerException( " An error has occurred while reading execution time from the database: " + e.getMessage(), e); } finally { // closing database resources MLDatabaseUtils.closeDatabaseResources(connection, statement, result); } }
From source file:edu.ku.brc.specify.conversion.IdTableMapper.java
/** * Map all the old IDs to new IDs// w w w . j a v a2 s.co m * @param sqlArg the string to use to fill the map */ public void mapAllIds(final String sqlArg) { log.debug("mapAllIds with sql: " + sqlArg); this.sql = sqlArg; int mappingCount = getMapCount(mapTableName); wasEmpty = mappingCount == 0; if (doDelete || mappingCount == 0) { BasicSQLUtils.deleteAllRecordsFromTable(oldConn, mapTableName, BasicSQLUtils.myDestinationServerType); if (frame != null) { String dMsg = "Mapping " + mapTableName; frame.setDesc(dMsg); log.debug(dMsg); } try { log.debug("Executing: " + sql); PreparedStatement pStmt = oldConn.prepareStatement("INSERT INTO " + mapTableName + " VALUES (?,?)"); Statement stmtOld = oldConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmtOld.executeQuery(sql); if (rs.last()) { if (frame != null) { frame.setProcess(0, rs.getRow()); } } if (rs.first()) { int newIndex = initialIndex; do { int oldIndex = rs.getInt(1); //log.debug("map "+mapTableName+" old[" + oldIndex + "] new [" + newIndex +"]"); if (indexIncremeter != null) { newIndex = indexIncremeter.getNextIndex(); } pStmt.setInt(1, oldIndex); // Old Index pStmt.setInt(2, newIndex); // New Index if (pStmt.executeUpdate() != 1) { String msg = String.format("Error writing to Map table[%s] old: %d new: %d", mapTableName, oldIndex, newIndex); log.error(msg); throw new RuntimeException(msg); } newIndex++; // incrementing doesn't matter when there is an indexIncremeter if (frame != null) { if (newIndex % 1000 == 0) { frame.setProcess(newIndex); } } else { if (newIndex % 2000 == 0) { log.debug("Mapped " + newIndex + " records from " + tableName); } } } while (rs.next()); log.info("Mapped " + newIndex + " records from " + tableName); } else { log.info("No records to map in " + tableName); } rs.close(); stmtOld.close(); pStmt.close(); } catch (SQLException ex) { ex.printStackTrace(); edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(IdTableMapper.class, ex); log.error(ex); throw new RuntimeException(ex); } } else { log.debug("Skipping the build of mapper: " + mapTableName); } if (frame != null) { frame.setProcess(0, 0); } }
From source file:com.redoute.datamap.dao.impl.DatamapDAO.java
@Override public Datamap findDatamapByKey(String id) { Datamap result = null;/* w ww . java 2s. co m*/ final String query = "SELECT * FROM datamap WHERE id = ?"; Connection connection = this.databaseSpring.connect(); try { PreparedStatement preStat = connection.prepareStatement(query); try { preStat.setString(1, id); ResultSet resultSet = preStat.executeQuery(); try { if (resultSet.first()) { result = loadDatamapFromResultSet(resultSet); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { resultSet.close(); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { preStat.close(); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { Logger.log(DatamapDAO.class.getName(), Level.WARN, e.toString()); } } return result; }
From source file:edu.ku.brc.specify.conversion.IdTableMapper.java
/** * Map all the old IDs to new IDs/*w w w . j a v a 2 s.c om*/ * @param sqlArg the string to use to fill the map */ public void mapAllIdsNoIncrement(final String sqlArg, final Integer numRecords) { log.debug("mapAllIdsNoIncrement with sql: " + sqlArg); this.sql = sqlArg; int mappingCount = numRecords != null ? numRecords : getMapCount(mapTableName); wasEmpty = mappingCount == 0; if (doDelete || mappingCount == 0) { BasicSQLUtils.deleteAllRecordsFromTable(oldConn, mapTableName, BasicSQLUtils.myDestinationServerType); if (frame != null) { String dMsg = "Mapping " + mapTableName; frame.setDesc(dMsg); log.debug(dMsg); } try { log.debug("Executing: " + sql); PreparedStatement pStmt = oldConn.prepareStatement("INSERT INTO " + mapTableName + " VALUES (?,?)"); Statement stmtOld = oldConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmtOld.executeQuery(sql); if (rs.last()) { if (frame != null) { frame.setProcess(0, rs.getRow()); } } int cnt = 0; if (rs.first()) { do { int oldIndex = rs.getInt(1); int toIndex = rs.getInt(2); pStmt.setInt(1, oldIndex); // Old Index pStmt.setInt(2, toIndex); // New Index if (pStmt.executeUpdate() != 1) { String msg = String.format("Error writing to Map table[%s] old: %d new: %d", mapTableName, oldIndex, toIndex); log.error(msg); throw new RuntimeException(msg); } if (frame != null) { if (cnt % 1000 == 0) { frame.setProcess(cnt); } } else { if (cnt % 2000 == 0) { log.debug("Mapped " + cnt + " records from " + tableName); } } cnt++; } while (rs.next()); log.info("Mapped " + cnt + " records from " + tableName); } else { log.info("No records to map in " + tableName); } rs.close(); stmtOld.close(); pStmt.close(); } catch (SQLException ex) { ex.printStackTrace(); edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(IdTableMapper.class, ex); log.error(ex); throw new RuntimeException(ex); } } else { log.debug("Skipping the build of mapper: " + mapTableName); } if (frame != null) { frame.setProcess(0, 0); } }
From source file:com.darkenedsky.reddit.traders.RedditTraders.java
/** * Check if two users are banned, using the appropriate method defined in * the checkBan parameter//from w w w. ja va 2 s .c o m * * @param author * first user to check * @param tradeWith * second user to check * @param subreddit * subreddit we are checking in (if not ALL) * @param checkBan * checkban mode on the subreddit * @return true if either user is banned * @throws Exception */ public boolean checkBans(String author, String tradeWith, String subreddit, int checkBan) throws Exception { // checking is turned off switch (checkBan) { case 1: return this.isBanned(author, tradeWith, subreddit); case 2: PreparedStatement ps = config.getJDBC().prepareStatement("select subreddit from subreddits;"); ResultSet subs = ps.executeQuery(); if (subs.first()) { while (true) { if (isBanned(author, tradeWith, subs.getString("subreddit"))) { subs.close(); return true; } if (subs.isLast()) { break; } subs.next(); } } subs.close(); return false; // checking is turned off default: return false; } }
From source file:com.darkenedsky.reddit.traders.RedditTraders.java
/** * Internal method to do the actual setting of flair * /*from ww w. j a v a 2 s . c om*/ * * @param user * Redditor's username * @param subreddit * The subreddit being traded on * @param doTextFlair * 'true' if the TEXTFLAIR option is turned on for this subreddit * * @throws MalformedURLException * @throws IOException * @throws ParseException * @throws SQLException */ public void setUserFlair(String user, String subreddit, boolean doTextFlair) throws SQLException, MalformedURLException, IOException, ParseException { int trades = 0; String flair = null; PreparedStatement ps1 = config.getJDBC().prepareStatement( "select * from get_trade_count_with_countall((select redditorid from redditors where username ilike ?), (select redditid from subreddits where subreddit ilike ?));"); ps1.setString(1, user); ps1.setString(2, subreddit); ResultSet set1 = ps1.executeQuery(); if (set1.first()) { trades = set1.getInt("get_trade_count_with_countall"); } set1.close(); PreparedStatement ps2 = config.getJDBC().prepareStatement("select * from get_flair_class(?,?,?);"); ps2.setString(1, user); ps2.setString(2, subreddit); ps2.setBoolean(3, isModerator(user, subreddit)); ResultSet set2 = ps2.executeQuery(); if (set2.first()) { flair = set2.getString("get_flair_class"); } set2.close(); if (flair == null) { LOG.debug("No flair matching criteria for user."); return; } String tradeCount = ""; if (doTextFlair) { tradeCount = "&text=" + Integer.toString(trades) + " trade" + ((trades != 1) ? "s" : ""); } LOG.debug("Posting flair..."); String post = "uh=" + config.getBotUser().getModhash() + "&name=" + user + "&r=" + subreddit + "&css_class=" + flair + tradeCount; LOG.debug(post); Utils.post(post, new URL("http://www.reddit.com/api/flair"), config.getBotUser().getCookie()); LOG.debug("Flair posted."); }
From source file:com.redoute.datamap.dao.impl.DatamapDAO.java
@Override public Integer getNumberOfDatamapPerCrtiteria(String searchTerm, String inds) { Integer result = 0;//from w ww.j a va 2s . c o m StringBuilder query = new StringBuilder(); StringBuilder gSearch = new StringBuilder(); String searchSQL = ""; query.append("SELECT count(*) FROM datamap"); gSearch.append(" where (`id` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `page` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `application` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `stream` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `implemented` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `zone` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `picture` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `comment` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `locationTYpe` like '%"); gSearch.append(searchTerm); gSearch.append("%'"); gSearch.append(" or `locationValue` like '%"); gSearch.append(searchTerm); gSearch.append("%')"); if (!searchTerm.equals("") && !inds.equals("")) { searchSQL = gSearch.toString() + " and " + inds; } else if (!inds.equals("")) { searchSQL = " where " + inds; } else if (!searchTerm.equals("")) { searchSQL = gSearch.toString(); } query.append(searchSQL); Connection connection = this.databaseSpring.connect(); try { PreparedStatement preStat = connection.prepareStatement(query.toString()); try { ResultSet resultSet = preStat.executeQuery(); try { if (resultSet.first()) { result = resultSet.getInt(1); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { resultSet.close(); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { preStat.close(); } } catch (SQLException exception) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, exception.toString()); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { Logger.log(DatamapDAO.class.getName(), Level.ERROR, e.toString()); } } return result; }
From source file:org.wso2.carbon.ml.model.internal.DatabaseHandler.java
/** * This method returns machine learning model summary * * @param modelID Model ID//from www. j a v a 2 s . co m * @return Model summary * @throws DatabaseHandlerException */ public ModelSummary getModelSummary(String modelID) throws DatabaseHandlerException { Connection connection = null; ResultSet result = null; PreparedStatement getStatement = null; try { connection = dataSource.getConnection(); connection.setAutoCommit(false); getStatement = connection.prepareStatement(SQLQueries.GET_MODEL_SUMMARY); getStatement.setString(1, modelID); result = getStatement.executeQuery(); if (result.first()) { return (ModelSummary) result.getObject(1); } else { throw new DatabaseHandlerException("Invalid model ID: " + modelID); } } catch (SQLException e) { throw new DatabaseHandlerException("An error occured while reading model summary for " + modelID + " from the database: " + e.getMessage(), e); } finally { // enable auto commit MLDatabaseUtils.enableAutoCommit(connection); // Close the database resources. MLDatabaseUtils.closeDatabaseResources(connection, getStatement, result); } }
From source file:org.wso2.carbon.ml.project.mgt.DatabaseHandler.java
/** * Returns the ID of the data-set associated with the project. * * @param projectId Unique identifier for the project. * @return ID of the data-set associated with the project. * @throws DatabaseHandlerException. *///from w ww . j ava2 s . c o m public String getdatasetID(String projectId) throws DatabaseHandlerException { Connection connection = null; PreparedStatement getDatasetID = null; ResultSet result = null; try { connection = dataSource.getConnection(); connection.setAutoCommit(true); getDatasetID = connection.prepareStatement(SQLQueries.GET_DATASET_ID); getDatasetID.setString(1, projectId); result = getDatasetID.executeQuery(); result.first(); return result.getObject(1).toString(); } catch (SQLException e) { MLDatabaseUtil.rollBack(connection); throw new DatabaseHandlerException( "Error occured while retrieving the Dataset Id of project " + projectId + ": " + e.getMessage(), e); } finally { // close the database resources MLDatabaseUtil.closeDatabaseResources(connection, getDatasetID, result); } }