Example usage for java.sql SQLException getNextException

List of usage examples for java.sql SQLException getNextException

Introduction

In this page you can find the example usage for java.sql SQLException getNextException.

Prototype

public SQLException getNextException() 

Source Link

Document

Retrieves the exception chained to this SQLException object by setNextException(SQLException ex).

Usage

From source file:com.xpn.xwiki.store.XWikiHibernateBaseStore.java

/**
 * Hibernate and JDBC will wrap the exception thrown by the trigger in another exception (the
 * java.sql.BatchUpdateException) and this exception is sometimes wrapped again. Also the
 * java.sql.BatchUpdateException stores the underlying trigger exception in the nextException and not in the cause
 * property. The following method helps you to get to the underlying trigger message.
 *//* ww w.  j av  a2s. co m*/
private String getExceptionMessage(Throwable t) {
    StringBuilder sb = new StringBuilder();
    Throwable next = null;
    for (Throwable current = t; current != null; current = next) {
        next = current.getCause();
        if (next == current) {
            next = null;
        }
        if (current instanceof SQLException) {
            SQLException sx = (SQLException) current;
            while (sx.getNextException() != null) {
                sx = sx.getNextException();
                sb.append("\nSQL next exception = [" + sx + "]");
            }
        }
    }
    return sb.toString();
}

From source file:io.stallion.dataAccess.db.DB.java

/**
 * Find a list of objects of the given model via arbitrary SQL
 *
 * @param model/*from w  ww  . ja v  a2 s.co m*/
 * @param sql
 * @param args
 * @param <T>
 * @return
 */
public <T extends Model> List<T> query(Class<T> model, String sql, Object... args) {
    QueryRunner runner = new QueryRunner(dataSource);
    Schema schema = null;
    if (Model.class.isAssignableFrom(model)) {
        schema = getSchemaForModelClass(model);
    }
    if (schema != null) {
        ModelListHandler<T> handler = new ModelListHandler<T>(schema);
        try {
            return runner.query(sql, handler, args);
        } catch (SQLException e) {
            throw new RuntimeException(e);
        }
    } else {
        BeanListHandler<T> handler = new BeanListHandler(model);
        try {
            return runner.query(sql, handler, args);
        } catch (SQLException e) {
            Log.exception(e.getNextException(), "Root exception in query");
            throw new RuntimeException(e);
        }
    }
}

From source file:com.oltpbenchmark.benchmarks.auctionmark.AuctionMarkLoader.java

/**
 * Load the tuples for the given table name
 * @param tableName// ww  w  . j av a2 s. c  om
 */
protected void generateTableData(String tableName) throws SQLException {
    LOG.info("*** START " + tableName);
    final AbstractTableGenerator generator = this.generators.get(tableName);
    assert (generator != null);

    // Generate Data
    final Table catalog_tbl = benchmark.getCatalog().getTable(tableName);
    assert (catalog_tbl != null) : tableName;
    final List<Object[]> volt_table = generator.getVoltTable();
    final String sql = SQLUtil.getInsertSQL(catalog_tbl);
    final PreparedStatement stmt = conn.prepareStatement(sql);
    final int types[] = catalog_tbl.getColumnTypes();

    while (generator.hasMore()) {
        generator.generateBatch();

        //            StringBuilder sb = new StringBuilder();
        //            if (tableName.equalsIgnoreCase("USER_FEEDBACK")) { //  || tableName.equalsIgnoreCase("USER_ATTRIBUTES")) {
        //                sb.append(tableName + "\n");
        //                for (int i = 0; i < volt_table.size(); i++) {
        //                    sb.append(String.format("[%03d] %s\n", i, StringUtil.abbrv(Arrays.toString(volt_table.get(i)), 100)));
        //                }
        //                LOG.info(sb.toString() + "\n");
        //            }

        for (Object row[] : volt_table) {
            for (int i = 0; i < row.length; i++) {
                if (row[i] != null) {
                    stmt.setObject(i + 1, row[i]);
                } else {
                    stmt.setNull(i + 1, types[i]);
                }
            } // FOR
            stmt.addBatch();
        } // FOR
        try {
            stmt.executeBatch();
            conn.commit();
            stmt.clearBatch();
        } catch (SQLException ex) {
            if (ex.getNextException() != null)
                ex = ex.getNextException();
            LOG.warn(tableName + " - " + ex.getMessage());
            throw ex;
            // SKIP
        }

        this.tableSizes.put(tableName, volt_table.size());

        // Release anything to the sub-generators if we have it
        // We have to do this to ensure that all of the parent tuples get
        // insert first for foreign-key relationships
        generator.releaseHoldsToSubTableGenerators();
    } // WHILE
    stmt.close();

    // Mark as finished
    if (this.fail == false) {
        generator.markAsFinished();
        synchronized (this) {
            this.finished.add(tableName);
            LOG.info(String.format("*** FINISH %s - %d tuples - [%d / %d]", tableName,
                    this.tableSizes.get(tableName), this.finished.size(), this.generators.size()));
            if (LOG.isDebugEnabled()) {
                LOG.debug("Remaining Tables: "
                        + CollectionUtils.subtract(this.generators.keySet(), this.finished));
            }
        } // SYNCH
    }
}

From source file:gemlite.core.internal.db.DBSynchronizer.java

/**
 * Returns an {@link SqlExceptionHandler} for the given {@link SQLException}
 * ./*from  www .java 2s  . c om*/
 */
protected SqlExceptionHandler handleSQLException(SQLException sqle) {
    String sqlState = sqle.getSQLState();
    // What to do if SQLState is null? Checking through the exception
    // message for common strings for now but DB specific errorCode and
    // other
    // such checks will be better.
    // Below was due to a bug in wrapper OracleDriver being used and
    // normally
    // this can never be null.
    if (sqlState == null) {
        // no SQLState so fallback to string matching in the message
        // for BatchUpdateException it will look at the nextException
        if (sqle instanceof BatchUpdateException && sqle.getNextException() != null) {
            // "42Y96" represents an unknown exception but batch exception
            // will
            // look at the nextException in any case
            sqlState = "42Y96";
        } else {
            // if connection has been closed then refresh it
            try {
                synchronized (this) {
                    if (this.conn == null || this.conn.isClosed()) {
                        return SqlExceptionHandler.REFRESH;
                    }
                }
            } catch (Exception e) {
                return SqlExceptionHandler.REFRESH;
            }
            // treat like a connection failure by default
            return checkExceptionString(sqle.toString().toLowerCase(), SqlExceptionHandler.REFRESH);
        }
    }
    // check for exception type first
    SqlExceptionHandler handler = checkExceptionType(sqle);
    if (handler != null) {
        return handler;
    }
    // next check SQLStates
    //about SQLStates see http://blog.csdn.net/cangyingaoyou/article/details/7402243
    if (sqlState.startsWith("25") || sqlState.startsWith("42")) {
        // constraint violations can happen in retries, so default action is
        // to
        // IGNORE them; when errorFile is provided then it will be logged to
        // that in XML format in any case
        return SqlExceptionHandler.IGNORE;
    } else if (sqlState.startsWith("22") || sqlState.startsWith("23")) {
        // if numErrorTries is defined, then retry some number of times else
        // ignore after having logged warning since retry is not likely to
        // help
        return this.numErrorTries > 0 ? SqlExceptionHandler.IGNORE_BREAK_LOOP : SqlExceptionHandler.IGNORE;
    } else if (sqlState.startsWith("08")) {
        return SqlExceptionHandler.REFRESH;
    } else if (sqlState.startsWith("40")) {
        // these are transient transaction/lock exceptions so retry whole
        // batch
        return SqlExceptionHandler.IGNORE_BREAK_LOOP;
    } else {
        if (sqle instanceof BatchUpdateException && sqle.getNextException() != null) {
            return handleSQLException(sqle.getNextException());
        }
        // if connection has been closed then refresh it
        try {
            synchronized (this) {
                if (this.conn == null || this.conn.isClosed()) {
                    //return SqlExceptionHandler.REFRESH;
                    //,?,??TODO ??
                    return SqlExceptionHandler.IGNORE;
                }
            }
        } catch (Exception e) {
            return SqlExceptionHandler.REFRESH;
        }
        return checkExceptionString(sqle.toString().toLowerCase(), SqlExceptionHandler.REFRESH);
    }
}

From source file:org.executequery.gui.importexport.ImportDelimitedWorker.java

private Object doWork() {

    // the process result
    String processResult = null;/*www. j  a  v a2  s . co m*/

    // are we halting on any error
    int onError = getParent().getOnError();
    haltOnError = (onError == ImportExportProcess.STOP_TRANSFER);

    boolean isBatch = getParent().runAsBatchProcess();

    appendProgressText("Beginning import from delimited file process...");
    appendProgressText("Using connection: " + getParent().getDatabaseConnection().getName());

    // ---------------------------------------
    // table specific counters

    // the table statement result
    int tableInsertCount = 0;

    // the records processed for this table
    int tableRowCount = 0;

    // the table commit count
    int tableCommitCount = 0;

    // ---------------------------------------
    // total import process counters

    // the current commit block size
    int commitCount = 0;

    // the total records inserted
    int totalInsertCount = 0;

    // the total records processed
    int totalRecordCount = 0;

    // the error count
    int errorCount = 0;

    // the current line number
    int lineNumber = 0;

    int rollbackSize = getParent().getRollbackSize();
    int rollbackCount = 0;

    FileReader fileReader = null;
    BufferedReader reader = null;
    DateFormat dateFormat = null;

    try {
        // retrieve the import files
        Vector files = getParent().getDataFileVector();
        int fileCount = files.size();

        // whether to trim whitespace
        boolean trimWhitespace = getParent().trimWhitespace();

        // whether this table has a date/time field
        boolean hasDate = false;

        // whether we are parsing date formats
        boolean parsingDates = parseDateValues();

        // column names are first row
        boolean hasColumnNames = getParent().includeColumnNames();

        // currently bound variables in the prepared statement
        Map<ColumnData, String> boundVariables = null;

        // ignored indexes of columns from the file
        List<Integer> ignoredIndexes = null;

        if (hasColumnNames) {
            boundVariables = new HashMap<ColumnData, String>();
            ignoredIndexes = new ArrayList<Integer>();
            appendProgressText("Using column names from input file's first row.");
        }

        // columns to be imported that are in the file
        Map<ColumnData, String> fileImportedColumns = new HashMap<ColumnData, String>();

        // whether the data format failed (switch structure)
        boolean failed = false;

        // define the delimiter
        String delim = getParent().getDelimiter();

        // ---------------------------
        // --- initialise counters ---
        // ---------------------------

        // the table's column count
        int columnCount = -1;

        // the length of each line in the file
        int rowLength = -1;

        // progress bar values
        int progressStatus = -1;

        // ongoing progress value
        int progressCheck = -1;

        // the import file size
        long fileSize = -1;

        // set the date format

        if (parseDateValues()) {

            try {

                dateFormat = createDateFormatter();

            } catch (IllegalArgumentException e) {

                errorCount++;
                outputExceptionError("Error applying date mask", e);

                return FAILED;
            }

        }

        // record the start time
        start();

        // setup the regex matcher for delims

        // ----------------------------------------------------------------
        // below was the original pattern from oreilly book.
        // discovered issues when parsing values with quotes
        // in them - not only around them.
        /*
        String regex =
            "(?:^|\\" +
            delim +
            ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \" | ( [^\"\\" +
            delim + "]*+ ) )";
        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");
        */
        // ----------------------------------------------------------------

        // modified above to regex below
        // added the look-ahead after the close quote
        // and removed the quote from the last regex pattern

        String escapedDelim = escapeDelim(delim);

        String regex = "(?:^|" + escapedDelim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \"(?="
                + escapedDelim + "?) | ( [^" + escapedDelim + "]*+ ) )";

        // ----------------------------------------------------------------
        // changed above to the following - seems to work for now
        // regex pattern in full - where <delim> is the delimiter to use
        //      \"([^\"]+?)\"<delim>?|([^<delim>]+)<delim>?|<delim>
        //
        // fixed oreilly one - not running this one
        // ----------------------------------------------------------------

        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");

        // ----------------------------------------
        // --- begin looping through the tables ---
        // ----------------------------------------

        // ensure the connection has auto-commit to false
        conn = getConnection();
        conn.setAutoCommit(false);

        int currentRowLength = 0;
        boolean insertLine = false;

        // the number of columns actually available in the file
        int filesColumnCount = 0;

        for (int i = 0; i < fileCount; i++) {

            lineNumber = 0;
            tableInsertCount = 0;
            tableCommitCount = 0;
            rollbackCount = 0;
            tableRowCount = 0;
            rowLength = 0;

            if (Thread.interrupted()) {
                setProgressStatus(100);
                throw new InterruptedException();
            }

            tableCount++;

            DataTransferObject dto = (DataTransferObject) files.elementAt(i);

            // initialise the file object
            File inputFile = new File(dto.getFileName());

            outputBuffer.append("---------------------------\nTable: ");
            outputBuffer.append(dto.getTableName());
            outputBuffer.append("\nImport File: ");
            outputBuffer.append(inputFile.getName());
            appendProgressText(outputBuffer);

            // setup the reader objects
            fileReader = new FileReader(inputFile);
            reader = new BufferedReader(fileReader);

            // retrieve the columns to be imported (or all)
            Vector<ColumnData> columns = getColumns(dto.getTableName());
            columnCount = columns.size();
            filesColumnCount = columnCount;

            // the wntire row read
            String row = null;

            // the current delimited value
            String value = null;

            // the ignored column count
            int ignoredCount = 0;

            // clear the file columns cache
            fileImportedColumns.clear();

            // if the first row in the file has the column
            // names compare these with the columns selected
            if (hasColumnNames) {

                // init the bound vars cache with the selected columns
                boundVariables.clear();

                for (int k = 0; k < columnCount; k++) {

                    boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                }

                row = reader.readLine();
                lineNumber++;

                String[] _columns = MiscUtils.splitSeparatedValues(row, delim);
                if (_columns != null && _columns.length > 0) {

                    filesColumnCount = _columns.length;

                    // --------------------------------------
                    // first determine if we have any columns in the
                    // input file that were not selected for import

                    // reset the ignored columns
                    ignoredIndexes.clear();

                    // set up another list to re-add the columns in
                    // the order in which they appear in the file.
                    // all other columns will be added to the end
                    Vector<ColumnData> temp = new Vector<ColumnData>(columnCount);

                    ColumnData cd = null;
                    int ignoredIndex = -1;
                    for (int j = 0; j < _columns.length; j++) {
                        ignoredIndex = j;
                        String column = _columns[j];

                        for (int k = 0; k < columnCount; k++) {
                            cd = columns.get(k);
                            String _column = cd.getColumnName();

                            if (_column.equalsIgnoreCase(column)) {
                                temp.add(cd);
                                fileImportedColumns.put(cd, INCLUDED_COLUMN);
                                ignoredIndex = -1;
                                break;
                            }

                        }

                        if (ignoredIndex != -1) {

                            ignoredIndexes.add(Integer.valueOf(ignoredIndex));
                        }

                    }
                    ignoredCount = ignoredIndexes.size();

                    // if we didn't find any columns at all, show warning
                    if (temp.isEmpty()) {

                        String message = "No matching column names were "
                                + "found within the specified file's first line.\n"
                                + "The current file will be ignored.";

                        outputBuffer.append(message);
                        appendProgressWarningText(outputBuffer);

                        int yesNo = GUIUtilities.displayYesNoDialog(message + "\nDo you wish to continue?",
                                "Warning");

                        if (yesNo == JOptionPane.YES_OPTION) {
                            continue;
                        } else {
                            throw new InterruptedException();
                        }

                    } else {

                        // add any other selected columns to the
                        // end of the temp list with the columns
                        // available in the file
                        boolean addColumn = false;
                        for (int k = 0; k < columnCount; k++) {
                            addColumn = false;
                            cd = columns.get(k);
                            for (int j = 0, n = temp.size(); j < n; j++) {
                                addColumn = true;
                                if (temp.get(j) == cd) {
                                    addColumn = false;
                                    break;
                                }
                            }

                            if (addColumn) {
                                temp.add(cd);
                            }

                        }
                        columns = temp; // note: size should not have changed
                    }

                }
            }
            // otherwise just populate the columns in the file
            // with all the selected columns
            else {

                for (int j = 0; j < columnCount; j++) {

                    fileImportedColumns.put(columns.get(j), INCLUDED_COLUMN);
                }

            }

            /*
            Log.debug("ignored count: " + ignoredCount);
            for (int j = 0; j < columnCount; j++) {
            Log.debug("column: " + columns.get(j));
            }
            */

            fileSize = inputFile.length();
            progressStatus = 10;
            progressCheck = (int) (fileSize / progressStatus);

            // prepare the statement
            prepareStatement(dto.getTableName(), columns);

            if (parsingDates && dateFormat == null) {

                // check for a date data type
                for (int j = 0; j < columnCount; j++) {

                    if (dateFormat == null && !hasDate) {

                        ColumnData cd = columns.get(j);

                        if (fileImportedColumns.containsKey(cd)) {

                            if (cd.isDateDataType()) {

                                hasDate = true;
                                break;
                            }

                        }

                    }
                }

                if (hasDate && dateFormat == null) {

                    String pattern = verifyDate();

                    if (StringUtils.isNotBlank(pattern)) {

                        fileReader.close();
                        setProgressStatus(100);
                        throw new InterruptedException();
                    }

                    dateFormat = createDateFormatter(pattern);
                }

            }

            rowLength = 0;

            while ((row = reader.readLine()) != null) {

                insertLine = true;
                lineNumber++;
                tableRowCount++;
                totalRecordCount++;

                if (Thread.interrupted()) {

                    fileReader.close();
                    printTableResult(tableRowCount, tableCommitCount, dto.getTableName());

                    setProgressStatus(100);
                    throw new InterruptedException();
                }

                currentRowLength = row.length();

                if (currentRowLength == 0) {

                    outputBuffer.append("Line ");
                    outputBuffer.append(lineNumber);
                    outputBuffer.append(" contains no delimited values");
                    appendProgressWarningText(outputBuffer);

                    int yesNo = GUIUtilities.displayYesNoDialog("No values provided from line " + lineNumber
                            + " - the row is blank.\n" + "Do you wish to continue?", "Warning");

                    if (yesNo == JOptionPane.YES_OPTION) {
                        continue;
                    } else {
                        throw new InterruptedException();
                    }
                }

                rowLength += currentRowLength;
                if (progressCheck < rowLength) {

                    setProgressStatus(progressStatus);
                    progressStatus += 10;
                    rowLength = 0;
                }

                // reset matcher with current row
                matcher.reset(row);

                int index = 0;
                int lastIndex = -1;
                int loopIgnoredCount = 0;

                //Log.debug(row);

                for (int j = 0; j < filesColumnCount; j++) {

                    if (matcher.find(index)) {

                        String first = matcher.group(2);

                        if (first != null) {

                            value = first;

                        } else {

                            qMatcher.reset(matcher.group(1));
                            value = qMatcher.replaceAll("\"");
                        }

                        index = matcher.end();

                        // check if its an ignored column
                        if (ignoredCount > 0) {

                            if (isIndexIgnored(ignoredIndexes, j)) {

                                loopIgnoredCount++;
                                continue;
                            }

                        }

                    } else {

                        // not enough delims check
                        if (j < (filesColumnCount - 1) && index > (currentRowLength - 1)) {

                            outputBuffer.append("Insufficient number of column ");
                            outputBuffer.append("values provided at line ");
                            outputBuffer.append(lineNumber);
                            appendProgressErrorText(outputBuffer);

                            int yesNo = GUIUtilities
                                    .displayYesNoDialog("Insufficient number of values provided from line "
                                            + lineNumber + ".\n" + "Do you wish to continue?", "Warning");

                            if (yesNo == JOptionPane.YES_OPTION) {

                                insertLine = false;
                                break;

                            } else {

                                throw new InterruptedException();
                            }

                        } else {

                            // check if we're on a delim the matcher didn't pick up

                            int delimLength = delim.length();

                            if (row.substring(index, index + delimLength).equals(delim)) {

                                // increment index
                                index++;
                                // assign as null value
                                value = null;
                            }

                        }

                    }

                    // check if we landed on the same index - likely null value
                    if (index == lastIndex) {
                        index++;
                    }
                    lastIndex = index;

                    if (value != null && value.trim().length() == 0) {
                        value = null;
                    }

                    try {
                        ColumnData cd = columns.get(j - loopIgnoredCount);
                        setValue(value, getIndexOfColumn(columns, cd) + 1, cd.getSQLType(), trimWhitespace,
                                dateFormat);

                        if (hasColumnNames) {
                            boundVariables.put(cd, VARIABLE_BOUND);
                        }

                    } catch (ParseException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing date value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;

                    } catch (NumberFormatException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;
                    }

                }

                if (!insertLine) {

                    prepStmnt.clearParameters();
                    continue;
                }

                if (failed && haltOnError) {

                    processResult = FAILED;
                    break;
                }

                // execute the statement
                try {

                    // check all variables are bound if we used
                    // the column names from the first row
                    if (hasColumnNames) {

                        index = 0;
                        // check all variables are bound - insert NULL otherwise

                        for (Map.Entry<ColumnData, String> entry : boundVariables.entrySet()) {

                            ColumnData cd = entry.getKey();

                            if (VARIABLE_NOT_BOUND.equals(entry.getValue())) {

                                index = getIndexOfColumn(columns, cd);
                                prepStmnt.setNull(index + 1, cd.getSQLType());
                            }

                        }

                    }

                    if (isBatch) {
                        prepStmnt.addBatch();
                    } else {
                        int result = prepStmnt.executeUpdate();
                        tableInsertCount += result;
                        commitCount += result;
                    }

                    rollbackCount++;
                    // check the rollback segment
                    if (rollbackCount == rollbackSize) {
                        if (isBatch) {
                            int result = getBatchResult(prepStmnt.executeBatch())[0];
                            tableInsertCount += result;
                            commitCount += result;
                            prepStmnt.clearBatch();
                        }
                        conn.commit();
                        totalInsertCount += commitCount;
                        tableCommitCount = tableInsertCount;
                        rollbackCount = 0;
                        commitCount = 0;
                    }

                    // reset bound variables
                    if (hasColumnNames) {
                        for (int k = 0; k < columnCount; k++) {
                            boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                        }
                    }

                } catch (SQLException e) {
                    logException(e);
                    errorCount++;

                    if (!isBatch) {
                        outputBuffer.append("Error inserting data from line ");
                        outputBuffer.append(lineNumber);
                        outputExceptionError(null, e);
                    } else {
                        outputBuffer.append("Error on last batch execution");
                        outputExceptionError(null, e);
                    }

                    if (haltOnError) {
                        processResult = FAILED;
                        conn.rollback();
                        getParent().cancelTransfer();
                        throw new InterruptedException();
                    }

                }

            }

            // ----------------------------
            // file/table has ended here

            if (isBatch) {

                int[] batchResult = null;

                try {
                    batchResult = getBatchResult(prepStmnt.executeBatch());
                    int result = batchResult[0];
                    tableInsertCount += result;
                    commitCount += result;
                    tableCommitCount = tableInsertCount;
                } catch (BatchUpdateException e) {
                    logException(e);
                    int[] updateCounts = e.getUpdateCounts();
                    batchResult = getBatchResult(updateCounts);
                    errorCount += batchResult[1];
                    if (errorCount == 0) {
                        errorCount = 1;
                    }

                    outputBuffer.append("An error occured during the batch process: ");
                    outputBuffer.append(e.getMessage());

                    SQLException _e = e.getNextException();
                    while (_e != null) {
                        outputBuffer.append("\nNext Exception: ");
                        outputBuffer.append(_e.getMessage());
                        _e = _e.getNextException();
                    }

                    outputBuffer.append("\n\nRecords processed to the point ");
                    outputBuffer.append("where this error occurred: ");
                    outputBuffer.append(updateCounts.length);
                    appendProgressErrorText(outputBuffer);
                    processResult = FAILED;
                }

                //  Log.debug("commitCount: " + commitCount +
                //                      " batch: " + batchResult[0]);

                if (tableRowCount != tableInsertCount) {
                    conn.rollback();

                    if (onError == ImportExportProcess.STOP_TRANSFER) {
                        getParent().cancelTransfer();
                        processResult = FAILED;
                        throw new InterruptedException();
                    }

                }

            }

            boolean doCommit = true;
            if (failed && !isBatch && rollbackSize != ImportExportProcess.COMMIT_END_OF_ALL_FILES) {

                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the last block?",
                        "Confirm commit");

                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            // do the commit if ok from above
            // and if rollback size selected is end of file
            if (rollbackSize == ImportExportProcess.COMMIT_END_OF_FILE) {
                if (doCommit) {
                    conn.commit();
                    totalInsertCount += commitCount;
                    tableCommitCount = tableInsertCount;
                    commitCount = 0;
                } else {
                    conn.rollback();
                }
            }

            // update the progress display
            printTableResult(tableRowCount, tableInsertCount, dto.getTableName());
            setProgressStatus(100);

            // reset the checks
            hasDate = false;
            failed = false;

        }

        // commit the last remaining block or where
        // set to commit at the end of all files
        if (rollbackSize != ImportExportProcess.COMMIT_END_OF_FILE) {
            setProgressStatus(100);
            boolean doCommit = true;
            if (errorCount > 0 && errorCount != totalRecordCount) {
                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the changes?",
                        "Confirm commit");
                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }

        }

        processResult = SUCCESS;
    } catch (InterruptedException e) {

        if (processResult != FAILED) {
            processResult = CANCELLED;
        }

        try {
            if (prepStmnt != null) {
                prepStmnt.cancel();
            }
            if (conn != null) {
                conn.rollback();
            }
        } catch (SQLException e2) {
            outputExceptionError("Error rolling back transaction", e);
        }

    } catch (Exception e) {
        logException(e);
        outputBuffer.append("Error processing data from line ");
        outputBuffer.append(lineNumber);
        outputExceptionError("\nUnrecoverable error importing table data from file", e);

        int yesNo = GUIUtilities.displayYesNoDialog(
                "The process encountered errors.\n" + "Do you wish to commit the last transaction block?",
                "Confirm commit");
        boolean doCommit = (yesNo == JOptionPane.YES_OPTION);

        try {
            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }
        } catch (SQLException e2) {
            logException(e2);
            outputExceptionError("Error processing last transaction block", e2);
        }
        processResult = FAILED;
    } finally {
        finish();
        releaseResources(getParent().getDatabaseConnection());

        if (totalRecordCount == 0 || errorCount > 0) {
            processResult = FAILED;
        }

        setTableCount(tableCount);
        setRecordCount(totalRecordCount);
        setRecordCountProcessed(totalInsertCount);
        setErrorCount(errorCount);

        setProgressStatus(100);
        GUIUtilities.scheduleGC();

        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
            }
        }
        if (prepStmnt != null) {
            try {
                prepStmnt.close();
            } catch (SQLException e) {
            }
        }

    }

    return processResult;
}

From source file:com.flexive.core.storage.genericSQL.GenericTreeStorage.java

/**
 * {@inheritDoc}/* www  .j  a va  2  s  . c o m*/
 */
@Override
public void removeNode(Connection con, FxTreeMode mode, ContentEngine ce, long nodeId, boolean removeChildren)
        throws FxApplicationException {
    if (mode == FxTreeMode.Live)
        removeChildren = true; //always delete child nodes in live mode
    Statement stmt = null;
    if (nodeId == FxTreeNode.ROOT_NODE)
        throw new FxNoAccessException("ex.tree.delete.root");

    FxTreeNodeInfo nodeInfo = getTreeNodeInfo(con, mode, nodeId);
    ScriptingEngine scripting = EJBLookup.getScriptingEngine();
    final List<Long> scriptBeforeIds = scripting.getByScriptEvent(FxScriptEvent.BeforeTreeNodeRemoved);
    final List<Long> scriptAfterIds = scripting.getByScriptEvent(FxScriptEvent.AfterTreeNodeRemoved);
    //warning: removedNodes will only be available if script mappings for event AfterTreeNodeRemoved exist!
    List<FxTreeNode> removedNodes = scriptAfterIds.size() > 0 ? new ArrayList<FxTreeNode>(100) : null;
    final String TRUE = StorageManager.getBooleanTrueExpression();
    try {
        stmt = con.createStatement();
        if (StorageManager.isDisableIntegrityTransactional()) {
            stmt.execute(StorageManager.getReferentialIntegrityChecksStatement(false));
        }
        List<FxPK> references = new ArrayList<FxPK>(50);
        UserTicket ticket = FxContext.getUserTicket();

        // lock all affected rows
        final List<Long> removeNodeIds = selectAllChildNodeIds(con, mode, nodeInfo.getLeft(),
                nodeInfo.getRight(), true);
        acquireLocksForUpdate(con, mode,
                Iterables.concat(removeNodeIds, Arrays.asList(nodeInfo.getParentId())));
        final Map<FxPK, FxContentSecurityInfo> securityInfos = Maps
                .newHashMapWithExpectedSize(removeNodeIds.size());

        if (removeChildren) {
            //FX-102: edit permission checks on references
            ResultSet rs = stmt.executeQuery("SELECT DISTINCT REF FROM " + getTable(mode) + " WHERE " + " LFT>="
                    + nodeInfo.getLeft() + " AND RGT<=" + nodeInfo.getRight() + " ");
            while (rs != null && rs.next()) {
                try {
                    if (ce != null) {
                        final FxPK pk = new FxPK(rs.getLong(1));
                        final FxContentSecurityInfo info = ce.getContentSecurityInfo(pk);
                        FxPermissionUtils.checkPermission(ticket, ACLPermission.EDIT, info, true);
                        securityInfos.put(pk, info);
                    }
                    references.add(new FxPK(rs.getLong(1)));
                } catch (FxLoadException e) {
                    //ignore, might have been removed meanwhile
                }
            }
            // call BeforeTreeNodeRemoved scripts
            if (scriptBeforeIds.size() > 0 || scriptAfterIds.size() > 0) {
                final FxScriptBinding binding = new FxScriptBinding();
                for (long removedId : removeNodeIds) {
                    final FxTreeNode n = getNode(con, mode, removedId);
                    if (removedNodes != null)
                        removedNodes.add(n);
                    for (long scriptId : scriptBeforeIds) {
                        binding.setVariable("node", n);
                        scripting.runScript(scriptId, binding);
                    }
                }
            }

            for (List<Long> removeIds : Iterables.partition(removeNodeIds, SQL_IN_PARTSIZE)) {
                stmt.addBatch("DELETE FROM " + getTable(mode) + " WHERE id IN ("
                        + StringUtils.join(removeIds, ',') + ")");
            }
        } else {
            //FX-102: edit permission checks on references
            try {
                if (ce != null) {
                    final FxContentSecurityInfo info = ce.getContentSecurityInfo(nodeInfo.getReference());
                    FxPermissionUtils.checkPermission(FxContext.getUserTicket(), ACLPermission.EDIT, info,
                            true);
                    securityInfos.put(nodeInfo.getReference(), info);
                }
                references.add(nodeInfo.getReference());
            } catch (FxLoadException e) {
                //ignore, might have been removed meanwhile
            }
            stmt.addBatch("UPDATE " + getTable(mode) + " SET PARENT=" + nodeInfo.getParentId()
                    + " WHERE PARENT=" + nodeId);
            for (List<Long> part : Iterables.partition(removeNodeIds, SQL_IN_PARTSIZE)) {
                stmt.addBatch("UPDATE " + getTable(mode) + " SET DEPTH=DEPTH-1,DIRTY="
                        + StorageManager.getBooleanExpression(mode != FxTreeMode.Live) + " WHERE id IN ("
                        + StringUtils.join(part, ',') + ") AND DEPTH>0");
            }
            stmt.addBatch("DELETE FROM " + getTable(mode) + " WHERE ID=" + nodeId);
        }

        // Update the childcount of the parents
        if (removeChildren) {
            stmt.addBatch("UPDATE " + getTable(mode) + " SET CHILDCOUNT=CHILDCOUNT-1 WHERE ID="
                    + nodeInfo.getParentId());
        } else {
            stmt.addBatch("UPDATE " + getTable(mode) + " SET CHILDCOUNT=CHILDCOUNT+"
                    + (nodeInfo.getDirectChildCount() - 1) + " WHERE ID=" + nodeInfo.getParentId());
        }

        // Set the dirty flag for the parent if needed
        if (mode != FxTreeMode.Live) {
            stmt.addBatch(
                    "UPDATE " + getTable(mode) + " SET DIRTY=" + TRUE + " WHERE ID=" + nodeInfo.getParentId());
        }

        if (mode == FxTreeMode.Live && exists(con, FxTreeMode.Edit, nodeId)) {
            //check if a node with the same id that has been removed in the live tree exists in the edit tree,
            //the node and all its children will be flagged as dirty in the edit tree
            FxTreeNodeInfo editNode = getTreeNodeInfo(con, FxTreeMode.Edit, nodeId);
            List<Long> editNodeIds = selectAllChildNodeIds(con, FxTreeMode.Edit, editNode.getLeft(),
                    editNode.getRight(), true);

            acquireLocksForUpdate(con, FxTreeMode.Edit, editNodeIds);
            for (List<Long> part : Iterables.partition(editNodeIds, SQL_IN_PARTSIZE)) {
                stmt.addBatch("UPDATE " + getTable(FxTreeMode.Edit) + " SET DIRTY=" + TRUE + " WHERE ID IN ("
                        + StringUtils.join(part, ',') + ")");
            }
        }
        stmt.executeBatch();
        if (ce != null) {
            //if the referenced content is a folder, remove it
            final Set<Long> folderTypeIds = Sets.newHashSet(FxSharedUtils.getSelectableObjectIdList(
                    CacheAdmin.getEnvironment().getType(FxType.FOLDER).getDerivedTypes(true, true)));
            for (FxPK ref : references) {
                FxContentSecurityInfo si = securityInfos.get(ref);
                if (si == null) {
                    si = ce.getContentSecurityInfo(ref);
                }
                if (folderTypeIds.contains(si.getTypeId())) {
                    final int contentCount = ce.getReferencedContentCount(si.getPk());
                    if (contentCount == 0) {
                        ce.remove(ref);
                    }
                }
            }
        }
        afterNodeRemoved(con, nodeInfo, removeChildren);
        if (removedNodes != null) {
            final FxScriptBinding binding = new FxScriptBinding();
            for (long scriptId : scriptAfterIds) {
                for (FxTreeNode n : removedNodes) {
                    binding.setVariable("node", n);
                    scripting.runScript(scriptId, binding);
                }

            }
        }
    } catch (SQLException exc) {
        String next = "";
        if (exc.getNextException() != null)
            next = " next:" + exc.getNextException().getMessage();
        throw new FxRemoveException(LOG, exc, "ex.tree.delete.failed", nodeId, exc.getMessage() + next);
    } finally {
        try {
            if (stmt != null) {
                if (StorageManager.isDisableIntegrityTransactional()) {
                    try {
                        stmt.execute(StorageManager.getReferentialIntegrityChecksStatement(true));
                    } catch (SQLException e) {
                        LOG.error(e);
                    }
                }
                stmt.close();
            }
        } catch (Exception exc) {
            //ignore
        }
    }
}

From source file:gemlite.core.internal.db.DBSynchronizer.java

protected SqlExceptionHandler checkExceptionType(SQLException sqle) {
    if (sqle != null) {
        if (sqle instanceof SQLNonTransientConnectionException) {
            // will need to connect again
            return SqlExceptionHandler.REFRESH;
        }//from  www  .jav a  2  s.c  o  m
        if (sqle instanceof SQLIntegrityConstraintViolationException) {
            // constraint violations can happen in retries, so default
            // action is to
            // IGNORE them; when errorFile is provided then it will be
            // logged to
            // that in XML format in any case
            return SqlExceptionHandler.IGNORE;
        }
        if (sqle instanceof SQLNonTransientException) {
            // if numErrorTries is defined, then retry some number of times
            // else
            // ignore after having logged warning since retry is not likely
            // to help
            return this.numErrorTries > 0 ? SqlExceptionHandler.IGNORE_BREAK_LOOP : SqlExceptionHandler.IGNORE;
        }
        if (sqle instanceof SQLTransientException) {
            // skip the remaining batch and retry whole batch again
            return SqlExceptionHandler.IGNORE_BREAK_LOOP;
        }
        if (sqle instanceof BatchUpdateException) {
            return checkExceptionType(sqle.getNextException());
        }
    }
    return null;
}

From source file:com.amalto.core.storage.hibernate.HibernateStorage.java

@SuppressWarnings({ "unchecked", "rawtypes" })
@Override//from w w  w . jav a2 s.  c  o m
public synchronized void prepare(MetadataRepository repository, Set<Expression> optimizedExpressions,
        boolean force, boolean dropExistingData) {
    if (!force && isPrepared) {
        return; // No op operation
    }
    if (isPrepared) {
        close();
        internalInit();
    }
    if (dataSource == null) {
        throw new IllegalArgumentException("Datasource is not set."); //$NON-NLS-1$
    }
    // No support for data models including inheritance AND for g* XSD simple types AND fields that start with
    // X_TALEND_
    try {
        repository.accept(METADATA_CHECKER);
        userMetadataRepository = repository;
    } catch (Exception e) {
        throw new RuntimeException("Exception occurred during unsupported features check.", e); //$NON-NLS-1$
    }
    // Loads additional types for staging area.
    if (storageType == StorageType.STAGING) {
        userMetadataRepository = repository.copy(); // See TMDM-6938: prevents staging types to appear in master
                                                    // storage.
        userMetadataRepository
                .load(MetadataRepositoryAdmin.class.getResourceAsStream("stagingInternalTypes.xsd")); //$NON-NLS-1$
    }
    // Create class loader for storage's dynamically created classes.
    ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
    Class<? extends StorageClassLoader> clazz;
    try {
        try {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(ALTERNATE_CLASS_LOADER);
        } catch (ClassNotFoundException e) {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(CLASS_LOADER);
        }
        Constructor<? extends StorageClassLoader> constructor = clazz.getConstructor(ClassLoader.class,
                String.class, StorageType.class);
        storageClassLoader = constructor.newInstance(contextClassLoader, storageName, storageType);
        storageClassLoader.setDataSourceConfiguration(dataSource);
        storageClassLoader.generateHibernateConfig(); // Checks if configuration can be generated.
    } catch (Exception e) {
        throw new RuntimeException("Could not create storage class loader", e); //$NON-NLS-1$
    }
    if (dropExistingData) {
        LOGGER.info("Cleaning existing database content."); //$NON-NLS-1$
        StorageCleaner cleaner = new JDBCStorageCleaner(new FullTextIndexCleaner());
        cleaner.clean(this);
    } else {
        LOGGER.info("*NOT* cleaning existing database content."); //$NON-NLS-1$
    }
    if (autoPrepare) {
        LOGGER.info("Preparing database before schema generation."); //$NON-NLS-1$
        StorageInitializer initializer = new JDBCStorageInitializer();
        if (initializer.supportInitialization(this)) {
            if (!initializer.isInitialized(this)) {
                initializer.initialize(this);
            } else {
                LOGGER.info("Database is already prepared."); //$NON-NLS-1$
            }
        } else {
            LOGGER.info("Datasource is not configured for automatic initialization."); //$NON-NLS-1$
        }
    } else {
        LOGGER.info("*NOT* preparing database before schema generation."); //$NON-NLS-1$
    }
    try {
        Thread.currentThread().setContextClassLoader(storageClassLoader);
        // Mapping of data model types to RDBMS (i.e. 'flatten' representation of types).
        MetadataRepository internalRepository;
        try {
            InternalRepository typeEnhancer = getTypeEnhancer();
            internalRepository = userMetadataRepository.accept(typeEnhancer);
            mappingRepository = typeEnhancer.getMappings();
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during type mapping creation.", e); //$NON-NLS-1$
        }
        // Set fields to be indexed in database.
        Set<FieldMetadata> databaseIndexedFields = new HashSet<FieldMetadata>();
        switch (storageType) {
        case MASTER:
            // Adds indexes on user defined fields
            for (Expression optimizedExpression : optimizedExpressions) {
                Collection<FieldMetadata> indexedFields = RecommendedIndexes.get(optimizedExpression);
                for (FieldMetadata indexedField : indexedFields) {
                    // TMDM-5896: Don't index Composite Key fields
                    if (indexedField instanceof CompoundFieldMetadata) {
                        continue;
                    }
                    // TMDM-5311: Don't index TEXT fields
                    TypeMetadata indexedFieldType = indexedField.getType();
                    if (!isIndexable(indexedFieldType)) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue;
                    }
                    // Go up the containment tree in case containing type is anonymous.
                    ComplexTypeMetadata containingType = indexedField.getContainingType().getEntity();
                    TypeMapping mapping = mappingRepository.getMappingFromUser(containingType);
                    FieldMetadata databaseField = mapping.getDatabase(indexedField);
                    if (databaseField == null) {
                        LOGGER.error("Could not index field '" + indexedField + "' (" + indexedField.getPath() //$NON-NLS-1$ //$NON-NLS-2$
                                + "), ignoring index."); //$NON-NLS-1$
                        continue;
                    } else if (!isIndexable(databaseField.getType())) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value (in database mapping) is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue; // Don't take into indexed fields long text fields
                    }
                    // Database specific behaviors
                    switch (dataSource.getDialectName()) {
                    case SQL_SERVER:
                        // TMDM-8144: Don't index field name on SQL Server when size > 900
                        String maxLengthStr = indexedField.getType()
                                .<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                        if (maxLengthStr == null) { // go up the type inheritance tree to find max length annotation
                            TypeMetadata type = indexedField.getType();
                            while (!XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(type.getNamespace())
                                    && !type.getSuperTypes().isEmpty()) {
                                type = type.getSuperTypes().iterator().next();
                                maxLengthStr = type.<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                                if (maxLengthStr != null) {
                                    break;
                                }
                            }
                        }
                        if (maxLengthStr != null) {
                            Integer maxLength = Integer.parseInt(maxLengthStr);
                            if (maxLength > 900) {
                                LOGGER.warn("Skip index on field '" + indexedField.getPath() //$NON-NLS-1$
                                        + "' (too long value)."); //$NON-NLS-1$
                                continue;
                            }
                        }
                        break;
                    case H2:
                    case MYSQL:
                    case POSTGRES:
                    case DB2:
                    case ORACLE_10G:
                    default:
                        // Nothing to do for these databases
                        break;
                    }
                    databaseIndexedFields.add(databaseField);
                    if (!databaseField.getContainingType().isInstantiable()) {
                        Collection<ComplexTypeMetadata> roots = RecommendedIndexes
                                .getRoots(optimizedExpression);
                        for (ComplexTypeMetadata root : roots) {
                            List<FieldMetadata> path = StorageMetadataUtils.path(
                                    mappingRepository.getMappingFromUser(root).getDatabase(), databaseField);
                            if (path.size() > 1) {
                                databaseIndexedFields.addAll(path.subList(0, path.size() - 1));
                            } else {
                                LOGGER.warn("Failed to properly index field '" + databaseField + "'."); //$NON-NLS-1$ //$NON-NLS-2$
                            }
                        }
                    }
                }
            }
            break;
        case STAGING:
            if (!optimizedExpressions.isEmpty()) {
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Ignoring " + optimizedExpressions.size() //$NON-NLS-1$
                            + " to optimize (disabled on staging area)."); //$NON-NLS-1$
                }
            }
            // Adds "staging status" / "staging block key" / "staging task id" as indexed fields
            for (TypeMapping typeMapping : mappingRepository.getAllTypeMappings()) {
                ComplexTypeMetadata database = typeMapping.getDatabase();
                if (database.hasField(METADATA_STAGING_STATUS)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_STATUS));
                }
                if (database.hasField(METADATA_STAGING_BLOCK_KEY)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_BLOCK_KEY));
                }
                if (database.hasField(METADATA_TASK_ID)) {
                    databaseIndexedFields.add(database.getField(METADATA_TASK_ID));
                }
            }
            break;
        case SYSTEM: // Nothing to index on SYSTEM
            break;
        }
        // Don't add FK in indexes if using H2
        if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.H2) {
            Iterator<FieldMetadata> indexedFields = databaseIndexedFields.iterator();
            while (indexedFields.hasNext()) {
                FieldMetadata field = indexedFields.next();
                if (field instanceof ReferenceFieldMetadata || field.isKey()) {
                    indexedFields.remove(); // H2 doesn't like indexes on PKs or FKs.
                }
            }
        }
        switch (dataSource.getDialectName()) {
        case ORACLE_10G:
            tableResolver = new OracleStorageTableResolver(databaseIndexedFields,
                    dataSource.getNameMaxLength());
            break;
        default:
            tableResolver = new StorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength());
        }
        storageClassLoader.setTableResolver(tableResolver);
        // Master, Staging and System share same class creator.
        switch (storageType) {
        case MASTER:
        case STAGING:
        case SYSTEM:
            hibernateClassCreator = new ClassCreator(storageClassLoader);
            break;
        }
        // Create Hibernate classes (after some modifications to the types).
        try {
            internalRepository.accept(hibernateClassCreator);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during dynamic classes creation.", e); //$NON-NLS-1$
        }
        // Last step: configuration of Hibernate
        try {
            // Hibernate needs to have dynamic classes in context class loader during configuration.
            InputStream ehCacheConfig = storageClassLoader
                    .getResourceAsStream(StorageClassLoader.EHCACHE_XML_CONFIG);
            if (ehCacheConfig != null) {
                CacheManager.create(ehCacheConfig);
            }
            configuration.configure(StorageClassLoader.HIBERNATE_CONFIG);
            batchSize = Integer.parseInt(configuration.getProperty(Environment.STATEMENT_BATCH_SIZE));
            // Sets default schema for Oracle
            Properties properties = configuration.getProperties();
            if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.ORACLE_10G) {
                properties.setProperty(Environment.DEFAULT_SCHEMA, dataSource.getUserName());
            }
            // Logs DDL *before* initialization in case initialization fails (useful for debugging).
            if (LOGGER.isTraceEnabled()) {
                traceDDL();
            }
            // Customize schema generation according to datasource content.
            RDBMSDataSource.SchemaGeneration schemaGeneration = dataSource.getSchemaGeneration();
            List exceptions = Collections.emptyList();
            switch (schemaGeneration) {
            case CREATE:
                SchemaExport schemaExport = new SchemaExport(configuration);
                schemaExport.create(false, true);
                // Exception may happen during recreation (hibernate may perform statements on tables that does
                // not exist): these exceptions are supposed to be harmless (but log them to DEBUG just in case).
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Exception(s) occurred during schema creation:"); //$NON-NLS-1$
                    for (Object exceptionObject : schemaExport.getExceptions()) {
                        LOGGER.debug(((Exception) exceptionObject).getMessage());
                    }
                }
                break;
            case VALIDATE:
                SchemaValidator schemaValidator = new SchemaValidator(configuration);
                schemaValidator.validate(); // This is supposed to throw exception on validation issue.
                break;
            case UPDATE:
                SchemaUpdate schemaUpdate = new SchemaUpdate(configuration);
                schemaUpdate.execute(false, true);
                exceptions = schemaUpdate.getExceptions();
                break;
            }
            // Throw an exception if schema update met issue(s).
            if (!exceptions.isEmpty()) {
                StringBuilder sb = new StringBuilder();
                sb.append("Could not prepare database schema: "); //$NON-NLS-1$
                Iterator iterator = exceptions.iterator();
                while (iterator.hasNext()) {
                    Exception exception = (Exception) iterator.next();
                    if (exception instanceof SQLException) {
                        SQLException currentSQLException = (SQLException) exception;
                        while (currentSQLException != null) {
                            sb.append(currentSQLException.getMessage());
                            sb.append('\n');
                            currentSQLException = currentSQLException.getNextException();
                        }
                    } else if (exception != null) {
                        sb.append(exception.getMessage());
                    }
                    if (iterator.hasNext()) {
                        sb.append('\n');
                    }
                }
                throw new IllegalStateException(sb.toString());
            }
            // Initialize Hibernate
            Environment.verifyProperties(properties);
            ConfigurationHelper.resolvePlaceHolders(properties);
            ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(properties)
                    .build();
            factory = configuration.buildSessionFactory(serviceRegistry);
            MDMTransactionSessionContext.declareStorage(this, factory);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during Hibernate initialization.", e); //$NON-NLS-1$
        }
        // All set: set prepared flag to true.
        isPrepared = true;
        LOGGER.info("Storage '" + storageName + "' (" + storageType + ") is ready."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    } catch (Throwable t) {
        try {
            // This prevent PermGen OOME in case of multiple failures to start.
            close();
        } catch (Exception e) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Error occurred during clean up following failed prepare", e); //$NON-NLS-1$
            }
        }
        throw new RuntimeException("Could not prepare '" + storageName + "'.", t); //$NON-NLS-1$ //$NON-NLS-2$
    } finally {
        Thread.currentThread().setContextClassLoader(contextClassLoader);
    }
}

From source file:gemlite.core.internal.db.DBSynchronizer.java

/**
 * Log exception including stack traces for info logging with
 * {@link #traceDBSynchronizer}, and returns an {@link SqlExceptionHandler}
 * for the given {@link SQLException}./*from   ww w.  j ava2s.  co m*/
 */
protected SqlExceptionHandler handleSQLException(SQLException sqle, String format, Statement stmt,
        AsyncEvent event, String eventString, Logger logger, boolean logWarning) throws SQLException {
    SqlExceptionHandler handler = handleSQLException(sqle);

    if (event != null && this.numErrorTries > 0) {

        ErrorEvent ee = new ErrorEvent();
        ee.ev = event;
        ee.errortime = System.currentTimeMillis();
        Object[] tries = this.errorTriesMap.get(ee);

        if (tries != null) {
            Integer numTries = (Integer) tries[0];
            if (numTries >= this.numErrorTries) {
                // at this point ignore this exception and move to others in
                // the batch
                handler = SqlExceptionHandler.IGNORE;
                logWarning = false;
            }
            tries[0] = Integer.valueOf(numTries.intValue() + 1);
            tries[1] = sqle;
        } else {
            this.errorTriesMap.put(ee, new Object[] { 1, sqle });
        }
    }

    boolean skipLogging = false;
    if (event != null && (logWarning || logger.isDebugEnabled())) {
        if (eventString == null) {
            skipLogging = helper.skipFailureLogging(event);
            eventString = event.toString();
        }
    }
    if (!skipLogging) {
        if (logWarning) {
            if (logger.isWarnEnabled()) {
                helper.logFormat(logger, Level.WARNING, sqle, format, eventString, sqle);
                SQLException next = sqle.getNextException();
                if (next != null) {
                    helper.logFormat(logger, Level.WARNING, next, format, eventString, sqle.getNextException());
                }
            }
        }
        if (logger.isDebugEnabled()) {
            if (logger.isWarnEnabled()) {
                String stmtStr = (stmt != null ? ("executing statement=" + stmt) : "preparing statement");
                helper.log(logger, Level.WARNING, sqle, "DBSynchronizer::" + "processEvents: Exception while "
                        + stmtStr + " for event=" + eventString);
                if (sqle.getNextException() != null) {
                    helper.log(logger, Level.WARNING, sqle.getNextException(),
                            "DBSynchronizer::processEvents: next exception");
                }
            }
        }
    }

    handler.execute(this);
    return handler;
}

From source file:pl.umk.mat.zawodyweb.www.RequestBean.java

@HttpAction(name = "dellanguage", pattern = "del/{id}/language")
public String deleteLanguage(@Param(name = "id", encode = true) int id) {
    if (rolesBean.canEditAnyProblem()) {
        try {/*from  w  ww.ja  va  2 s .c o  m*/
            languagesDAO.deleteById(id);
            HibernateUtil.getSessionFactory().getCurrentSession().getTransaction().commit();
            HibernateUtil.getSessionFactory().getCurrentSession().beginTransaction();
        } catch (JDBCException e) {
            e.printStackTrace();
            SQLException ex = e.getSQLException();
            while ((ex = ex.getNextException()) != null) {
                ex.printStackTrace();
            }
        }

        return "/admin/listlanguages";
    } else {
        return "/error/404";
    }
}