Example usage for java.sql SQLException toString

List of usage examples for java.sql SQLException toString

Introduction

In this page you can find the example usage for java.sql SQLException toString.

Prototype

public String toString() 

Source Link

Document

Returns a short description of this throwable.

Usage

From source file:com.boldust.general.LocalDAO.java

public synchronized ResultSet executeQuery(int psIndex) {
    ResultSet rs;//from w  w w.  ja va  2s.  co  m
    try {
        rs = getPstmt(psIndex).executeQuery();
        chmps.remove(psIndex);
        return rs;
    } catch (SQLException sqle) {
        System.err.print("\nQuery DataBase Fail! Returning null resultset!! Error: = " + sqle.toString());
        //sqle.printStackTrace();
        return null;
        //throw new LocalDAOException("SQL exception in localdao.executeQuery:", sqle);
    }
}

From source file:org.apache.sqoop.mapreduce.db.BasicRetrySQLFailureHandler.java

/**
 * Provide specific handling for the failure and return a new valid
 * connection.//from  w  ww  .  j  a  va2s .com
 */
public Connection recover() throws IOException {
    long nextRetryWait = 0;
    int retryAttempts = 0;
    boolean doRetry = true;
    boolean validConnection = false;
    Connection conn = null;

    do {
        validConnection = false;

        // Use increasing wait interval
        nextRetryWait = (long) Math.pow(retryAttempts, 2) * retryWaitInterval;

        // Increase the number of retry attempts
        ++retryAttempts;

        // If we exceeded max retry attempts, try one last time with max value
        if (nextRetryWait > retryWaitMax) {
            nextRetryWait = retryWaitMax;
            doRetry = false;
        }

        try {
            // Wait before trying to recover the connection
            Thread.sleep(nextRetryWait);

            // Discard the connection
            discardConnection(conn);

            // Try to get a new connection
            conn = super.getConnection();
            if (!validateConnection(conn)) {
                // Log failure and continue
                LOG.warn("Connection not valid");
            } else {
                LOG.info("A new connection has been established");

                // Connection has been recovered so stop recovery retries
                doRetry = false;
                validConnection = true;
            }
        } catch (SQLException sqlEx) {
            LOG.warn("Connection recovery attempt [" + retryAttempts + "] failed." + "Exception details: "
                    + sqlEx.toString());
        } catch (Exception ex) {
            // Handle unexpected exceptions
            LOG.error("Failed while recovering the connection. Exception details:" + ex.toString());
            throw new IOException(ex);
        }
    } while (doRetry);

    if (!validConnection) {
        throw new IOException("Failed to recover connection after " + retryAttempts + " retries. Giving up");
    }
    return conn;
}

From source file:eionet.gdem.dcm.business.WorkqueueManager.java

/**
 * Remove the job from the queue and delete temporary files.
 *
 * @param job//from  w  ww  . ja  v a2  s  .  c o  m
 * @throws GDEMException
 */
public void endXQJob(WorkqueueJob job) throws DCMException {
    // remove the job from the queue / DB when the status won't change= FATAL or READY
    try {
        jobDao.endXQJob(job.getJobId());
        LOGGER.info("Delete expired job: " + job.getJobId());
    } catch (SQLException sqle) {
        throw new DCMException("Error getting XQJob data from DB: " + sqle.toString());
    }
    // delete files only, if debug is not enabled
    if (!LOGGER.isDebugEnabled()) {
        // delete the result from filesystem
        String resultFile = job.getResultFile();
        try {
            Utils.deleteFile(resultFile);
        } catch (Exception e) {
            LOGGER.error("Could not delete job result file: " + resultFile + "." + e.getMessage());
        }
        // delete XQuery file, if it is stored in tmp folder
        String xqFile = job.getScriptFile();
        try {
            // Important!!!: delete only, when the file is stored in tmp folder
            if (xqFile.startsWith(Properties.tmpFolder)) {
                Utils.deleteFile(xqFile);
            }
        } catch (Exception e) {
            LOGGER.error("Could not delete job result file: " + xqFile + "." + e.getMessage());
        }
    }
}

From source file:net.riezebos.thoth.configuration.persistence.dbs.DDLExecuter.java

protected void executeSingleStmt(Connection conn, Statement stmt, String command) throws DDLException {
    String actual = command;//from w ww  .  ja v a  2s . com
    try {
        String dialect = applyDialect(command);
        println(dialect);
        actual = dialect;
        if (StringUtils.isNotBlank(dialect))
            stmt.execute(dialect);
    } catch (SQLException e) {
        LOG.error(e.getMessage(), e);
        throw new DDLException("Error executing " + actual + "\n" + e.toString());
    }
}

From source file:org.etudes.jforum.dao.generic.GenericTopicMarkTimeDAO.java

/**
 * @see org.etudes.jforum.dao.TopicMarkTimeDAO#addMarkTime(int, int, Date, boolean)
 *//*from w  w w .  j av  a  2 s . c om*/
public void addMarkTime(int topicId, int userId, Date markTime, boolean isRead) throws Exception {
    //System.out.println("Adding mark time topicId:"+topicId+" userId:"+userId);
    PreparedStatement stmt = JForum.getConnection()
            .prepareStatement(SystemGlobals.getSql("TopicMarkModel.addMarkTime"));

    try {
        stmt.setInt(1, topicId);
        stmt.setInt(2, userId);
        stmt.setTimestamp(3, new java.sql.Timestamp(markTime.getTime()));
        stmt.setInt(4, isRead ? 0 : 1);

        stmt.executeUpdate();
        //System.out.println("After insert");
    } catch (SQLException e) {
        System.out.println("Exceptio " + e.toString());
        // Ignore duplicate key warnings
        if (e.getErrorCode() != DUPLICATE_KEY) {
            throw e;
        }
    } finally {
        if (stmt != null) {
            stmt.close();
        }
    }

}

From source file:com.vangent.hieos.logbrowser.servlets.AuthenticationServlet.java

/**
 *
 * @param req//from   w w w  .j ava 2s. c o m
 * @param res
 */
public void doGet(HttpServletRequest req, HttpServletResponse res) {
    PreparedStatement updateCompany = null;
    PreparedStatement selectCompanyName = null;
    Log log = new Log();
    try {
        Connection con = log.getConnection();
        updateCompany = con.prepareStatement("update ip set company_name=? , email=? where ip=? ;");
        selectCompanyName = con.prepareStatement("SELECT company_name,email FROM ip where ip = ? ; ");
    } catch (SQLException e) {
    } catch (LoggerException e) {
    }
    // Check to see if processing should continue.
    if (updateCompany == null || selectCompanyName == null) {
        try {
            log.closeConnection();
        } catch (LoggerException ex) {
            Logger.getLogger(AuthenticationServlet.class.getName()).log(Level.SEVERE, null, ex);
        }
        return; // EARLY EXIT: Can not continue.
    }
    res.setContentType("text/xml");
    String company = req.getParameter("company").replaceAll("'", "\\\\'");
    String email = req.getParameter("email");
    String ipFrom = req.getRemoteAddr();
    try {
        updateCompany.setString(1, company);
        updateCompany.setString(2, email);
        updateCompany.setString(3, ipFrom);
        boolean ret = updateCompany.execute();
        logger.fatal(ipFrom);
        logger.fatal(email);
        if (ret) {
            res.getWriter().write("<response update='ok' />");
        } else {
            res.getWriter().write("<response update='error' />");
        }
    } catch (SQLException e2) {
        try {
            res.getWriter().write("<error>" + e2.toString() + "</error>");
        } catch (IOException e) {
            e.printStackTrace();
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        try {
            log.closeConnection();
        } catch (LoggerException ex) {
            Logger.getLogger(AuthenticationServlet.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:com.cloudera.sqoop.orm.TestClassWriter.java

@Before
public void setUp() {
    testServer = new HsqldbTestServer();
    org.apache.log4j.Logger root = org.apache.log4j.Logger.getRootLogger();
    root.setLevel(org.apache.log4j.Level.DEBUG);
    try {/*from ww w. j a v a  2s . c  om*/
        testServer.resetServer();
    } catch (SQLException sqlE) {
        LOG.error("Got SQLException: " + sqlE.toString());
        fail("Got SQLException: " + sqlE.toString());
    } catch (ClassNotFoundException cnfe) {
        LOG.error("Could not find class for db driver: " + cnfe.toString());
        fail("Could not find class for db driver: " + cnfe.toString());
    }

    manager = testServer.getManager();
    options = testServer.getSqoopOptions();

    // sanity check: make sure we're in a tmp dir before we blow anything away.
    assertTrue("Test generates code in non-tmp dir!", CODE_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR));
    assertTrue("Test generates jars in non-tmp dir!", JAR_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR));

    // start out by removing these directories ahead of time
    // to ensure that this is truly generating the code.
    File codeGenDirFile = new File(CODE_GEN_DIR);
    File classGenDirFile = new File(JAR_GEN_DIR);

    if (codeGenDirFile.exists()) {
        LOG.debug("Removing code gen dir: " + codeGenDirFile);
        if (!DirUtil.deleteDir(codeGenDirFile)) {
            LOG.warn("Could not delete " + codeGenDirFile + " prior to test");
        }
    }

    if (classGenDirFile.exists()) {
        LOG.debug("Removing class gen dir: " + classGenDirFile);
        if (!DirUtil.deleteDir(classGenDirFile)) {
            LOG.warn("Could not delete " + classGenDirFile + " prior to test");
        }
    }
}

From source file:org.fabrican.extension.variable.provider.servlet.ManagementServlet.java

@Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    Pair<String, String> params = gePathParams(req);
    String cmd = params.left();/*from   w ww .j a v  a 2  s .c o  m*/
    String name = params.right();
    RuleSetDAO dao = RuleSetDAO.getInstance();
    try {
        switch (cmd) {
        case "providers":
            if (name == null || name.trim().length() == 0) {
                resp.setStatus(400);
                output(resp, "missing provider identity");
            } else {

                if (dao.removeRuleSet(name)) {
                    resp.setStatus(200);
                    //output(resp, getListJSON(dao).toString());
                } else {
                    resp.setStatus(404);
                    output(resp, "failed to delete");
                }
            }
            return;
        }
    } catch (InterruptedException ex) {
        resp.setStatus(500);
        output(resp, ex.toString());
    } catch (SQLException e) {
        resp.setStatus(500);
        output(resp, e.toString());
        //} catch (JSONException e) {
        //    resp.setStatus(500);
        //    output(resp, e.toString());
    }
    resp.setStatus(400);
    output(resp, "unrecognized request");
}

From source file:org.apache.hadoop.chukwa.database.TestDatabaseWebJson.java

protected void verifyTableData(String table) {
    Calendar startCalendar = new GregorianCalendar();
    // startCalendar.add(Calendar.HOUR_OF_DAY,-1);
    startCalendar.add(Calendar.MINUTE, -30);
    long startTime = startCalendar.getTime().getTime();

    Calendar endCalendar = new GregorianCalendar();
    // endCalendar.add(Calendar.HOUR_OF_DAY,1);
    long endTime = endCalendar.getTime().getTime();

    String url = data_url + "?table=" + table + "&start=" + startTime + "&end=" + endTime;
    System.out.println(url);// ww  w .  j ava  2  s . co m

    HttpClient client = new HttpClient();
    GetMethod method = new GetMethod(url);

    try {

        /*
         * 1. get the json result for the specified table
         */
        int statusCode = client.executeMethod(method);
        if (statusCode != HttpStatus.SC_OK) {
            System.out.println("Http Error: " + method.getStatusLine());
        }
        BufferedReader reader = new BufferedReader(
                new InputStreamReader(method.getResponseBodyAsStream(), method.getResponseCharSet()));
        String json_str = "";
        String str;
        while ((str = reader.readLine()) != null) {
            json_str += str;
        }

        /*
         * 2. convert the json string to individual field and compare it 
         * with the database
         */

        String cluster = "demo";
        DatabaseWriter db = new DatabaseWriter(cluster);

        JSONArray json_array = new JSONArray(json_str);
        for (int i = 0; i < json_array.length(); i++) {
            JSONObject row_obj = json_array.getJSONObject(i);

            // get the database row

            String queryString = getDatabaseQuery(table, row_obj);
            Macro m = new Macro(startTime, endTime, queryString);
            ResultSet rs = db.query(m.toString());
            // move to the first record
            rs.next();
            ResultSetMetaData md = rs.getMetaData();
            Iterator names = row_obj.keys();
            while (names.hasNext()) {
                String name = (String) names.next();
                String jsonValue = (String) row_obj.get(name);
                String dbValue = rs.getString(name);
                int dbCol = rs.findColumn(name);
                int dbType = md.getColumnType(dbCol);
                if (dbType == 93) {
                    // timestamp
                    dbValue = Long.toString(rs.getTimestamp(name).getTime());
                }
                // System.out.println("compare "+name+":"+dbType+":"+dbValue+":"+jsonValue);
                assertEquals(dbValue, jsonValue);
            }
        }

        db.close();
    } catch (SQLException e) {
        System.out.println("Exception: " + e.toString() + ":" + e.getMessage());
        System.out.println("Exception: " + e.toString() + ":" + e.getSQLState());
        System.out.println("Exception: " + e.toString() + ":" + e.getErrorCode());
        fail("SQL Error:" + ExceptionUtil.getStackTrace(e));
    } catch (Exception eOther) {
        System.out.println("Other Exception: " + eOther.toString());
        eOther.printStackTrace();
        fail("Error:" + ExceptionUtil.getStackTrace(eOther));
    } finally {
    }
}

From source file:org.apache.flume.sink.customhdfs.add.ImpalaTableFill.java

private Connection getConnection() {
    Connection con = null;/*from   w  w  w  .  j ava2s  .  c  om*/

    try {
        Class.forName("org.apache.hive.jdbc.HiveDriver");
        con = DriverManager.getConnection(this.impalaUrl);
        LOG.info("impalaurl:" + this.impalaUrl);
    } catch (ClassNotFoundException var3) {
        LOG.error("impalaTableFillError:" + var3.toString());
    } catch (SQLException var4) {
        LOG.error("impalaTableFillError:" + var4.toString());
    }

    return con;
}