List of usage examples for java.sql ResultSet isClosed
boolean isClosed() throws SQLException;
ResultSet
object has been closed. From source file:de.tudarmstadt.lt.nlkg.DT.java
public Entry get(String word, int max_dt_words) { try {/* w w w.ja va2 s. co m*/ String query = String.format( "SELECT word2,count FROM `dt` WHERE word1 LIKE '%s' ORDER BY count DESC LIMIT %d;", word, max_dt_words + 1); final Connection c = connect(); final Statement s = c.createStatement(); final ResultSet r = s.executeQuery(query); if (!r.next()) return Entry.EMPTY; Entry result = new Entry() { { word = new Word() { { word = r.getString(1); significance = r.getDouble(2); } }; dtwords = new Iterator<Word>() { @Override public boolean hasNext() { try { if (r.isClosed()) return false; if (r.next()) return true; r.close(); s.close(); return false; } catch (SQLException e) { e.printStackTrace(); return false; } } @Override public Word next() { try { return new Word() { { word = r.getString(1); significance = r.getDouble(2); } }; } catch (SQLException e) { e.printStackTrace(); return Word.EMPTY; } } @Override public void remove() { throw new UnsupportedOperationException(); } @Override protected void finalize() throws Throwable { r.close(); r.close(); super.finalize(); } }; } }; return result; } catch (Exception e) { e.printStackTrace(); return Entry.EMPTY; } }
From source file:org.executequery.gui.resultset.ResultSetTableModel.java
private boolean isOpenAndValid(ResultSet resultSet) { try {//from ww w.j a v a2s . c o m return (resultSet != null && !resultSet.isClosed()); } catch (SQLException e) { Log.debug("Error checking if result set is open and valid - " + e.getMessage()); return false; } }
From source file:com.googlecode.fascinator.portal.services.impl.DatabaseServicesImpl.java
/** * Parse the results of the query into a basic Java data structure. Users * wanting the original result set should call getResultSet() directly * against the prepared statement./* w w w .j a va 2 s . c o m*/ * * @param sql The prepared statement to get the results from. * @return List<Map<String, String>> A list of result rows as key/value * pairs in HashMaps * @throws Exception if there is an error. */ @Override public List<Map<String, String>> getResults(PreparedStatement sql) throws Exception { // Prepare variables List<Map<String, String>> response = new ArrayList<Map<String, String>>(); ResultSet results = null; ResultSetMetaData columns = null; try { // Run the search results = sql.executeQuery(); // Process the results columns = results.getMetaData(); if (results.isClosed()) { log.error("!!! ResultSet is closed"); return response; } while (results.next()) { Map<String, String> row = new HashMap<String, String>(); for (int i = 1; i <= columns.getColumnCount(); i++) { // log.debug("getResults(): Storing '{}' ({}) => " + // results.getString(i), columns.getColumnName(i), // columns.getColumnLabel(i)); row.put(columns.getColumnName(i), results.getString(i)); } response.add(row); } // Finish up results.close(); return response; } catch (SQLException ex) { throw new Exception("Error executing query:", ex); } }
From source file:com.datatorrent.lib.db.jdbc.AbstractJdbcPollInputOperator.java
/** * Replays the tuples in sync mode for replayed windows *//*from www . j a v a 2 s . c om*/ public void emitReplayedTuples(PreparedStatement ps) { ResultSet rs = null; try (PreparedStatement pStat = ps;) { pStat.setFetchSize(getFetchSize()); rs = pStat.executeQuery(); if (rs == null || rs.isClosed()) { return; } while (rs.next()) { emitTuple(getTuple(rs)); lastEmittedRow++; } } catch (SQLException ex) { throw new RuntimeException(ex); } }
From source file:com.sqewd.open.dal.core.persistence.db.AbstractDbPersister.java
private ResultSet select(final String query, final List<KeyValuePair<Class<?>>> types, final int limit, final Connection conn) throws Exception { NativeJoinGraph jg = new NativeJoinGraph(types, query); // Make sure the type for the class is available. SQLQuery parser = new SQLQuery(jg); String selectsql = parser.parse("", limit); log.debug("SELECT SQL [" + selectsql + "]"); Statement stmnt = conn.createStatement(); LocalResultSet entities = new LocalResultSet(); try {//from w w w . j av a2 s. c o m log.debug("SELECT SQL [" + selectsql + "]"); ResultSet rs = stmnt.executeQuery(selectsql); try { entities.create(key, rs); } finally { if (rs != null && !rs.isClosed()) { rs.close(); } } return entities; } finally { if (stmnt != null && !stmnt.isClosed()) { stmnt.close(); } } }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Retrieves the number of dns queries per domain for each cluster * generated on a specific run date./*from w w w .ja v a2 s. c o m*/ * * @param log_date the run date * @return a table of values where the keys are cluster ids and the values * are the queries per domain value * @throws SQLException if there is an error retrieving the queries * per domain values */ private Hashtable<Integer, Double> getQueriesPerDomain(Date log_date) throws SQLException { Hashtable<Integer, Double> retval = new Hashtable<Integer, Double>(); StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); formatter.format(properties.getProperty(PREVCLUSTER_QUERY3KEY), df.format(log_date)); ResultSet rs = null; try { rs = dbi.executeQueryWithResult(querybuf.toString()); while (rs.next()) { retval.put(rs.getInt(1), rs.getDouble(2)); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs != null && !rs.isClosed()) { rs.close(); } formatter.close(); } return retval; }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Calculates the domains per network feature for each cluster generated * on a specific run date.// w ww. j av a 2 s. c om * * @param log_date the run date * @param window the number of days previous to use in feature calculation * @return a table of values where the keys are cluster ids and the values * are the feature values * @throws SQLException if there is an error calculating the feature values */ public Map<Integer, Double> calculateDomainsPerNetwork(Date log_date, int window) throws SQLException { HashMap<Integer, Double> retval = new HashMap<Integer, Double>(); ArrayList<Date> prevDates = getPrevDates(log_date, window); if (prevDates.size() > 0) { String logDateStr = df.format(log_date); StringBuffer add_query = new StringBuffer(); Formatter formatter = new Formatter(add_query); for (Date prevDate : prevDates) { String prevDateStr = df.format(prevDate); formatter.format(" " + properties.getProperty(DOMAINSPERNETWORK_QUERY1KEY) + " ", logDateStr, prevDateStr, prevDateStr); } formatter.close(); StringBuffer querybuf = new StringBuffer(); formatter = new Formatter(querybuf); formatter.format(properties.getProperty(DOMAINSPERNETWORK_QUERY2KEY), logDateStr, logDateStr, logDateStr, add_query.toString()); ResultSet rs = null; try { rs = dbi.executeQueryWithResult(querybuf.toString()); while (rs.next()) { retval.put(rs.getInt(1), rs.getDouble(2)); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs != null && !rs.isClosed()) { rs.close(); } formatter.close(); } } return retval; }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Gets run dates previous to a specific date within a window * of days from that date./*from ww w. ja va 2 s .c o m*/ * * @param log_date the run date * @param window the number of days previous to the current date * @return the list of previous run dates * @throws SQLException if there is an error retrieving the previous * run dates */ public ArrayList<Date> getPrevDates(Date log_date, int window) throws SQLException { ArrayList<Date> prevDates = new ArrayList<Date>(); if (prevDateBufDate != null && prevDateBuf != null && prevDateBufDate.equals(log_date) && prevDateBufWindow >= window) { //pull the dates within the day window from the prevDateBuf cache Date pd = null; int windowcount = 0; for (Date d : prevDateBuf) { if (windowcount >= window) { break; } if (pd == null) { pd = d; windowcount++; } else { DateTime morerecent = new DateTime(d.getTime()); DateTime lessrecent = new DateTime(pd.getTime()); Days days = Days.daysBetween(morerecent, lessrecent); windowcount += days.getDays(); pd = d; } prevDates.add(d); } } else { String domainsprefix = properties.getProperty(DOMAINSPREFIXKEY); String resipsprefix = properties.getProperty(RESIPSPREFIXKEY); ArrayList<String> tablenames = new ArrayList<String>(); ResultSet rs1 = null; try { rs1 = dbi.executeQueryWithResult(properties.getProperty(TABLES_QUERY1KEY)); while (rs1.next()) { tablenames.add(rs1.getString(1)); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs1 != null && !rs1.isClosed()) { rs1.close(); } } GregorianCalendar cal = new GregorianCalendar(); cal.setTime(log_date); for (int i = 0; i < window; i++) { cal.roll(Calendar.DAY_OF_YEAR, false); Date temp = cal.getTime(); String datestr = df.format(temp); if (tablenames.contains(domainsprefix + "_" + datestr) && tablenames.contains(resipsprefix + "_" + datestr)) { prevDates.add(temp); } } //cache the values for later if (prevDateBuf == null) { prevDateBuf = new ArrayList<Date>(); } else { prevDateBuf.clear(); } prevDateBuf.addAll(prevDates); prevDateBufDate = log_date; prevDateBufWindow = window; } return prevDates; }
From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java
/** * Calculates the previous cluster ratio feature for each cluster generated * on a specific run date and within the a specific window * * @param log_date the run date// w w w. j a v a 2 s .c o m * @param window the number of days previous to use in feature calculation * @return a table of results, the keys of the table are cluster ids and the * values are lists of two elements. The first element is the * last_growth_ratio_prev_clusters value and the second element is the * last_growth_prefix_ratio_prev_clusters value * @throws SQLException if there is and error calculating the feature */ public Hashtable<Integer, List<Double>> calculatePrevClusterRatios(Date log_date, int window) throws SQLException { Hashtable<Integer, List<Double>> retval = new Hashtable<Integer, List<Double>>(); ArrayList<Date> prevDates = getPrevDates(log_date, window); String query1 = properties.getProperty(PREVCLUSTER_QUERY1KEY); String query2 = properties.getProperty(PREVCLUSTER_QUERY2KEY); String logDateStr = df.format(log_date); String completequery = new String(); StringBuffer addQueryBuff = new StringBuffer(); for (int i = 0; i < prevDates.size(); i++) { String prevDateStr = df.format(prevDates.get(i)); StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); formatter.format(query1, logDateStr, logDateStr, prevDateStr, prevDateStr, prevDateStr); addQueryBuff.append(querybuf.toString()); if (i < prevDates.size() - 1) { addQueryBuff.append(" UNION "); } formatter.close(); } if (addQueryBuff.length() > 0) { StringBuffer querybuf = new StringBuffer(); Formatter formatter = new Formatter(querybuf); formatter.format(query2, logDateStr, logDateStr, addQueryBuff.toString()); completequery = querybuf.toString(); formatter.close(); } if (completequery.length() > 0) { ResultSet rs = null; try { rs = dbi.executeQueryWithResult(completequery); while (rs.next()) { ArrayList<Double> temp = new ArrayList<Double>(); temp.add(rs.getDouble(3)); temp.add(rs.getDouble(4)); retval.put(rs.getInt(1), temp); } } catch (Exception e) { if (log.isErrorEnabled()) { log.error(e); } } finally { if (rs != null && !rs.isClosed()) { rs.close(); } } Hashtable<Integer, Double> queryPerDomain = getQueriesPerDomain(log_date); for (Integer clusterid : retval.keySet()) { List<Double> values = retval.get(clusterid); values.set(0, values.get(0) / queryPerDomain.get(clusterid)); values.set(1, values.get(1) / queryPerDomain.get(clusterid)); } } return retval; }
From source file:com.openddal.test.BaseTestCase.java
public void close(Connection connection, Statement statement, ResultSet rs) { if (rs != null) { try {// w w w .jav a 2 s. c om if (!rs.isClosed()) rs.close(); } catch (SQLException e) { e.printStackTrace(); } } if (statement != null) { try { if (!statement.isClosed()) statement.close(); } catch (SQLException e) { e.printStackTrace(); } } if (connection != null) { try { if (!connection.isClosed()) connection.close(); } catch (SQLException e) { e.printStackTrace(); } } }