List of usage examples for java.sql SQLException toString
public String toString()
From source file:com.mysql.stresstool.RunnableQueryDelete.java
public void run() { if (doDelete) { Connection conn = null;/*from w w w.j a v a 2s.c om*/ try { if (jdbcUrlMap.get("dbType") != null && !((String) jdbcUrlMap.get("dbType")).equals("MySQL")) { conn = DriverManager.getConnection((String) jdbcUrlMap.get("dbType"), "test", "test"); } else conn = DriverManager.getConnection((String) jdbcUrlMap.get("jdbcUrl")); } catch (SQLException ex) { ex.printStackTrace(); } if (conn != null) { try { Statement stmt = null; ResultSet rs = null; conn.setAutoCommit(false); { SoftReference sf = new SoftReference(conn.createStatement()); stmt = (Statement) sf.get(); } // stmt2 = conn.createStatement(); stmt.execute("SET AUTOCOMMIT=0"); long execTime = 0; int pkStart = 0; int pkEnds = 0; ThreadInfo thInfo; long threadTimeStart = System.currentTimeMillis(); active = true; thInfo = new ThreadInfo(); thInfo.setId(this.ID); thInfo.setType("delete"); thInfo.setStatusActive(this.isActive()); StressTool.setInfoDelete(this.ID, thInfo); int deletedRows = 0; int[] pkStartAr = null; int[] pkEndsAr = null; String[][] sqlParameterValues; int[] iLine = { 0, 0 }; // for(int repeat = 0 ; repeat < repeatNumber ; repeat++ ){ // pkEndsAr[repeat] = StressTool.getNumberFromRandom(2147483647).intValue(); // pkStartAr[repeat] = StressTool.getNumberFromRandom(pkEndsAr[repeat]- 10).intValue(); // // } for (int repeat = 0; repeat < repeatNumber; repeat++) { int maxDel = 0; totalLineDeleted = 0; // pkStart = pkStartAr[repeat]; // pkEnds = pkEndsAr[repeat]; // System.gc(); String deleteCheck1 = ""; long timeStart = System.currentTimeMillis(); try { stmt.execute("BEGIN"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { ResultSet rsToDelete = stmt .executeQuery("Select max(a),min(a) from tbtest" + iTable); rsToDelete.next(); DecimalFormat df = new DecimalFormat("#.000000"); long maxDelete = rsToDelete.getLong(1); long minDelete = rsToDelete.getLong(2); long maxToDelete = new Double( ((double) this.getDeleterowmaxpct() * maxDelete) / 100).longValue(); PreparedStatement pstmt = null; { SoftReference sf = new SoftReference(conn.prepareStatement( "DELETE FROM tbtest" + iTable + " where a between ? and ?")); pstmt = (PreparedStatement) sf.get(); } int deleted = 0; if (maxDelete > 0) { for (long iCdelete = minDelete; iCdelete < maxToDelete; iCdelete += getDeleterowsinterval()) { pstmt.setLong(1, iCdelete); pstmt.setLong(2, iCdelete += getDeleterowsinterval()); int rows = pstmt.executeUpdate(); if (rows > 0) deleted += rows; if (deleted >= maxToDelete) { totalLineDeleted += deleted; break; } stmt.execute("COMMIT"); } } stmt.execute("COMMIT"); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { ResultSet rsToDelete = stmt .executeQuery("Select max(a),min(a) from tbtest_child" + iTable); rsToDelete.next(); DecimalFormat df = new DecimalFormat("#.000000"); long maxDelete = rsToDelete.getLong(1); long minDelete = rsToDelete.getLong(2); long maxToDelete = new Double( ((double) this.getDeleterowmaxpct() * maxDelete) / 100).longValue(); PreparedStatement pstmt = conn.prepareStatement( "DELETE FROM tbtest_child" + iTable + " where a between ? and ?"); int deleted = 0; if (maxDelete > 0) { for (long iCdelete = minDelete; iCdelete < maxToDelete; iCdelete += getDeleterowsinterval()) { pstmt.setLong(1, iCdelete); pstmt.setLong(2, iCdelete += getDeleterowsinterval()); int rows = pstmt.executeUpdate(); if (rows > 0) deleted += rows; if (deleted >= maxToDelete) { totalLineDeleted += deleted; break; } stmt.execute("COMMIT"); } } stmt.execute("COMMIT"); } } long timeEnds = System.currentTimeMillis(); execTime = (timeEnds - timeStart); } catch (SQLException sqle) { conn.rollback(); // System.out.println("Query Delete1 = " + deleteCheck1); /** Silently skip any deadlock **/ if (StressTool.getErrorLogHandler() != null) { StressTool.getErrorLogHandler().appendToFile(sqle.toString()); } // sqle.printStackTrace(); } finally { } if (doLog) { System.out.println("Query Delete TH = " + this.getID() + " Id = " + pkStart + " IdEnd = " + pkEnds + " " + "Deleted lines " + (totalLineDeleted) + " Exec Time(ms) =" + execTime); } thInfo.setExecutedLoops(repeat); Thread.sleep(sleepFor); } stmt.close(); // stmt2.close(); conn.close(); long threadTimeEnd = System.currentTimeMillis(); this.executionTime = (threadTimeEnd - threadTimeStart); this.setExecutionTime(executionTime); active = false; // System.out.println("Query Delete TH = " + this.getID() +" Id = " + pkStart + // " IdEnd = " + pkEnds + " " + "Deleted lines " + // deletedRows + " Exec Time(ms) =" + execTime + " Sec =" + (execTime/1000)); thInfo.setExecutionTime(executionTime); thInfo.setStatusActive(false); StressTool.setInfoDelete(this.ID, thInfo); return; } catch (Exception ex) { ex.printStackTrace(); } } } }
From source file:org.apache.hive.jdbc.HiveStatement.java
TGetOperationStatusResp waitForOperationToComplete() throws SQLException { TGetOperationStatusReq statusReq = new TGetOperationStatusReq(stmtHandle); boolean shouldGetProgressUpdate = inPlaceUpdateStream != InPlaceUpdateStream.NO_OP; statusReq.setGetProgressUpdate(shouldGetProgressUpdate); if (!shouldGetProgressUpdate) { /**// ww w. j a va2 s . com * progress bar is completed if there is nothing we want to request in the first place. */ inPlaceUpdateStream.getEventNotifier().progressBarCompleted(); } TGetOperationStatusResp statusResp = null; // Poll on the operation status, till the operation is complete while (!isOperationComplete) { try { /** * For an async SQLOperation, GetOperationStatus will use the long polling approach It will * essentially return after the HIVE_SERVER2_LONG_POLLING_TIMEOUT (a server config) expires */ statusResp = client.GetOperationStatus(statusReq); inPlaceUpdateStream.update(statusResp.getProgressUpdateResponse()); Utils.verifySuccessWithInfo(statusResp.getStatus()); if (statusResp.isSetOperationState()) { switch (statusResp.getOperationState()) { case CLOSED_STATE: case FINISHED_STATE: isOperationComplete = true; isLogBeingGenerated = false; break; case CANCELED_STATE: // 01000 -> warning throw new SQLException("Query was cancelled", "01000"); case TIMEDOUT_STATE: throw new SQLTimeoutException("Query timed out after " + queryTimeout + " seconds"); case ERROR_STATE: // Get the error details from the underlying exception throw new SQLException(statusResp.getErrorMessage(), statusResp.getSqlState(), statusResp.getErrorCode()); case UKNOWN_STATE: throw new SQLException("Unknown query", "HY000"); case INITIALIZED_STATE: case PENDING_STATE: case RUNNING_STATE: break; } } } catch (SQLException e) { isLogBeingGenerated = false; throw e; } catch (Exception e) { isLogBeingGenerated = false; throw new SQLException(e.toString(), "08S01", e); } } /* we set progress bar to be completed when hive query execution has completed */ inPlaceUpdateStream.getEventNotifier().progressBarCompleted(); return statusResp; }
From source file:edu.isi.pfindr.servlets.QueryServlet.java
/** * set the query from a bookmark//w w w . jav a 2 s .c om * * @param request * the servlet request * @param conn * the DB connection */ private void setBookmarkQuery(HttpServletRequest request, Connection conn) throws ServletException { HttpSession session = request.getSession(false); try { String bookmark = request.getParameter("qid"); String sqlQuery = "SELECT description FROM bookmarks WHERE userid = ? AND name = ?"; PreparedStatement stmt = conn.prepareStatement(sqlQuery); stmt.setString(1, (String) session.getAttribute("user")); stmt.setString(2, bookmark); JSONArray rows = Utils.executeSQL(stmt); stmt.close(); String description = ((JSONArray) rows.get(0)).getString(0); logger.info("Description " + description); JSONObject json = new JSONObject(((JSONArray) rows.get(0)).getString(0)); session.setAttribute("query", json); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new ServletException(e.toString()); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:edu.isi.pfindr.servlets.QueryServlet.java
/** * bookmark a query/*www. j a v a 2s .c o m*/ * * @param request * the servlet request * @param conn * the DB connection * @return the bookmark */ private JSONObject bookmarkQuery(HttpServletRequest request, Connection conn) throws ServletException { JSONObject res = new JSONObject(); HttpSession session = request.getSession(false); try { String bookmark = request.getParameter("bookmark"); JSONObject json = new JSONObject(request.getParameter("sql")); json.put("template", request.getParameter("template")); String sqlQuery = "INSERT INTO bookmarks(name, description, userid) VALUES(?, ?, ?)"; PreparedStatement stmt = conn.prepareStatement(sqlQuery); stmt.setString(1, bookmark); stmt.setString(2, json.toString()); stmt.setString(3, (String) session.getAttribute("user")); stmt.executeUpdate(); stmt.close(); res.put("bookmark", bookmark); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new ServletException(e.toString()); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } return res; }
From source file:org.apache.hadoop.hive.jdbc.TestJdbcDriver.java
private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throws Exception { boolean isPartitionTable = tableName.equals(partitionedTableName); Statement stmt = con.createStatement(); if (maxRows >= 0) { stmt.setMaxRows(maxRows);//from w ww.ja v a 2 s . c om } if (fetchSize > 0) { stmt.setFetchSize(fetchSize); assertEquals(fetchSize, stmt.getFetchSize()); } // JDBC says that 0 means return all, which is the default int expectedMaxRows = maxRows < 1 ? 0 : maxRows; assertNotNull("Statement is null", stmt); assertEquals("Statement max rows not as expected", expectedMaxRows, stmt.getMaxRows()); assertFalse("Statement should not be closed", stmt.isClosed()); ResultSet res; // run some queries res = stmt.executeQuery("select * from " + tableName); assertNotNull("ResultSet is null", res); assertTrue("getResultSet() not returning expected ResultSet", res == stmt.getResultSet()); assertEquals("get update count not as expected", 0, stmt.getUpdateCount()); int i = 0; ResultSetMetaData meta = res.getMetaData(); int expectedColCount = isPartitionTable ? 3 : 2; assertEquals("Unexpected column count", expectedColCount, meta.getColumnCount()); String colQualifier = ((tableName != null) && !tableName.isEmpty()) ? tableName.toLowerCase() + "." : ""; boolean moreRow = res.next(); while (moreRow) { try { i++; assertEquals(res.getInt(1), res.getInt(colQualifier + "under_col")); assertEquals(res.getString(1), res.getString(colQualifier + "under_col")); assertEquals(res.getString(2), res.getString(colQualifier + "value")); if (isPartitionTable) { assertEquals(res.getString(3), partitionedColumnValue); assertEquals(res.getString(3), res.getString(colQualifier + partitionedColumnName)); } assertFalse("Last result value was not null", res.wasNull()); assertNull("No warnings should be found on ResultSet", res.getWarnings()); res.clearWarnings(); // verifying that method is supported // System.out.println(res.getString(1) + " " + res.getString(2)); assertEquals("getInt and getString don't align for the same result value", String.valueOf(res.getInt(1)), res.getString(1)); assertEquals("Unexpected result found", "val_" + res.getString(1), res.getString(2)); moreRow = res.next(); } catch (SQLException e) { System.out.println(e.toString()); e.printStackTrace(); throw new Exception(e.toString()); } } // supposed to get 500 rows if maxRows isn't set int expectedRowCount = maxRows > 0 ? maxRows : 500; assertEquals("Incorrect number of rows returned", expectedRowCount, i); // should have no more rows assertEquals(false, moreRow); assertNull("No warnings should be found on statement", stmt.getWarnings()); stmt.clearWarnings(); // verifying that method is supported assertNull("No warnings should be found on connection", con.getWarnings()); con.clearWarnings(); // verifying that method is supported stmt.close(); assertTrue("Statement should be closed", stmt.isClosed()); }
From source file:com.darksky.seller.SellerServlet.java
/** * * ? /* w w w . java 2 s .co m*/ * @param request * @param response * @throws ServletException * @throws IOException */ public void deleteDish(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { System.out.println(); System.out.println("-------------------delete dish-----------------"); if (!LoginState) { request.getRequestDispatcher("HomeServlet?method=showShops").forward(request, response); return; } String dishID = request.getParameter("dishID"); String sellerID = request.getParameter("sellerID"); String sql = "delete from dishinfo where dishID='" + dishID + "' and sellerId = '" + sellerID + "'"; System.out.println(sql); boolean Bdelete = false; try { statement.execute(sql); Bdelete = true; } catch (SQLException e) { System.out.println(e.toString()); } if (Bdelete) { System.out.println("delete dish success"); // request.setAttribute("delState", true); } else { System.out.println("delete dish fail"); // request.setAttribute("delState", false); } System.out.println("-------------------delete dish-----------------"); System.out.println(); getDish(sellerID); request.getSession().setAttribute("dish", DishList); request.getRequestDispatcher("?.jsp").forward(request, response); }
From source file:com.darksky.seller.SellerServlet.java
/** * * ? /* www.java 2 s . c o m*/ * @param request * @param response * @throws ServletException * @throws IOException * @throws ParseException */ public void dealOrder(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, ParseException { System.out.println(); System.out.println("-------------------dealOrder-----------------"); if (!LoginState) { request.getRequestDispatcher("HomeServlet?method=showShops").forward(request, response); return; } int orderID = Integer.parseInt(request.getParameter("orderID")); int state = Integer.parseInt(request.getParameter("state")); String sql = "update historyorders set state=" + state + " where orderID=" + orderID + ""; System.out.println(sql); try { statement.execute(sql); } catch (SQLException e) { System.out.println(e.toString()); } if (state == 2) { double totalPrice = 0; String customerID = null; int quantity = 0; String dishID = null; sql = "Select * from historyorders where OrderID=" + orderID; try { resultSet = statement.executeQuery(sql); while (resultSet.next()) { totalPrice = Double.parseDouble(resultSet.getString("totalPrice")); customerID = resultSet.getString("customerID"); quantity = resultSet.getInt("quantity"); dishID = resultSet.getString("dishID"); } String sql2 = "update customer set customerMoney= customerMoney+" + totalPrice + " where customerID='" + customerID + "'"; statement.execute(sql2); String sql3 = "update dishinfo set dishStock= dishStock+" + quantity + " where dishID='" + dishID + "'"; statement.execute(sql3); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } System.out.println("------------------dealOrder-----------------"); System.out.println(); getOrder(Seller.getSellerID()); request.getSession().setAttribute("order", SellerOrders); request.getRequestDispatcher("SellerServlet?method=sellerOrder&sellerID=" + Seller.getSellerID()) .forward(request, response); }
From source file:edu.education.ucsb.muster.MusterServlet.java
private String testConnectivity(DatabaseDefinition db) { // load driver try {//from w w w .j a v a 2 s .com DriverManager.getDriver(db.url); } catch (SQLException e) { try { DriverManager.registerDriver( (Driver) Class.forName(db.driver).getConstructor().newInstance((Object[]) null)); } catch (Exception e1) { addException(e1, "A driver couldn't be loaded. Check the config file and try again. driver: `" + db.driver + "`, confPath: `" + confPath + "`"); return "FAIL"; } } // connect and test setReadOnly // Add the connection to our list and try setting readOnly to test Connection connection = null; try { connection = DriverManager.getConnection(db.url, db.username, db.password); connection.setReadOnly(true); connection.close(); } catch (Exception e) { addException(e, "Setting readonly failed on " + db.url); return e.toString(); } return "OK"; }
From source file:edu.isi.pfindr.servlets.QueryServlet.java
/** * bookmark a query//from www. j a v a2s .co m * * @param request * the servlet request * @param conn * the DB connection * @return the bookmark */ private JSONObject studyMetadata(HttpServletRequest request, Connection conn) throws ServletException { JSONObject res = new JSONObject(); try { String sqlQuery = "select distinct sex from " + Utils.STUDIES_TABLE + " order by sex"; PreparedStatement stmt = conn.prepareStatement(sqlQuery); JSONArray rows = Utils.executeSQL(stmt); //System.out.println(rows); JSONArray gender = new JSONArray(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { gender.put(row.getString(0)); } } stmt.close(); res.put("gender", gender); sqlQuery = "select distinct race from " + Utils.STUDIES_TABLE + " order by race"; stmt = conn.prepareStatement(sqlQuery); rows = Utils.executeSQL(stmt); HashSet<String> raceSet = new HashSet<String>(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { String values = row.getString(0); StringTokenizer tokenizer = new StringTokenizer(values, ","); while (tokenizer.hasMoreTokens()) { raceSet.add(tokenizer.nextToken().trim()); } } } Object raceValues[] = raceSet.toArray(); Utils.sortArray(raceValues); JSONArray races = new JSONArray(); for (int i = 0; i < raceValues.length; i++) { races.put(raceValues[i]); } stmt.close(); res.put("races", races); sqlQuery = "select distinct platform from " + Utils.STUDIES_TABLE + " order by platform"; stmt = conn.prepareStatement(sqlQuery); rows = Utils.executeSQL(stmt); HashSet<String> platformSet = new HashSet<String>(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { String values = row.getString(0); StringTokenizer tokenizer = new StringTokenizer(values, "|"); while (tokenizer.hasMoreTokens()) { platformSet.add(tokenizer.nextToken().trim()); } } } Object platformValues[] = platformSet.toArray(); Utils.sortArray(platformValues); JSONArray platform = new JSONArray(); for (int i = 0; i < platformValues.length; i++) { platform.put(platformValues[i]); } stmt.close(); res.put("platform", platform); sqlQuery = "select distinct study_type from " + Utils.STUDIES_TABLE + " order by study_type"; stmt = conn.prepareStatement(sqlQuery); rows = Utils.executeSQL(stmt); HashSet<String> study_typeSet = new HashSet<String>(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { String values = row.getString(0); StringTokenizer tokenizer = new StringTokenizer(values, ","); while (tokenizer.hasMoreTokens()) { study_typeSet.add(tokenizer.nextToken().trim()); } } } Object study_typeValues[] = study_typeSet.toArray(); Utils.sortArray(study_typeValues); JSONArray study_type = new JSONArray(); for (int i = 0; i < study_typeValues.length; i++) { study_type.put(study_typeValues[i]); } stmt.close(); res.put("study_type", study_type); sqlQuery = "select distinct genetic_type from " + Utils.STUDIES_TABLE + " order by genetic_type"; stmt = conn.prepareStatement(sqlQuery); rows = Utils.executeSQL(stmt); HashSet<String> genetic_typeSet = new HashSet<String>(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { String values = row.getString(0); StringTokenizer tokenizer = new StringTokenizer(values, ","); while (tokenizer.hasMoreTokens()) { genetic_typeSet.add(tokenizer.nextToken().trim()); } } } Object genetic_typeValues[] = genetic_typeSet.toArray(); Utils.sortArray(genetic_typeValues); JSONArray genetic_type = new JSONArray(); for (int i = 0; i < genetic_typeValues.length; i++) { genetic_type.put(genetic_typeValues[i]); } stmt.close(); res.put("genetic_type", genetic_type); sqlQuery = "select distinct(unnest (a.da)) b from (SELECT diseases, regexp_split_to_array(diseases, ';') as da FROM " + Utils.STUDIES_TABLE + ") a order by b"; stmt = conn.prepareStatement(sqlQuery); rows = Utils.executeSQL(stmt); HashSet<String> diseasesSet = new HashSet<String>(); for (int i = 0; i < rows.length(); i++) { JSONArray row = rows.getJSONArray(i); if (!row.isNull(0)) { String values = row.getString(0); diseasesSet.add(values.trim()); } } Object diseasesValues[] = diseasesSet.toArray(); Utils.sortArray(diseasesValues); JSONArray diseases = new JSONArray(); for (int i = 0; i < diseasesValues.length; i++) { diseases.put(diseasesValues[i]); } stmt.close(); res.put("diseases", diseases); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new ServletException(e.toString()); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } return res; }
From source file:org.artifactory.storage.db.build.dao.BuildsDao.java
/** * get Module Artifact diff total Count// w w w.java 2 s. c o m * * @param offset - row offset * @param limit - row limit * @return */ public int getModuleArtifactsForDiffCount(BuildParams buildParams, String offset, String limit) { ResultSet rs = null; try { Object[] diffParams = getArtifactDiffCountParam(buildParams); String buildQuery = getArtifactDiffCount(buildParams); rs = jdbcHelper.executeSelect(buildQuery, diffParams); if (rs.next()) { return rs.getInt(1); } } catch (SQLException e) { log.error(e.toString()); } finally { DbUtils.close(rs); } return 0; }