Example usage for java.sql SQLException getSQLState

List of usage examples for java.sql SQLException getSQLState

Introduction

In this page you can find the example usage for java.sql SQLException getSQLState.

Prototype

public String getSQLState() 

Source Link

Document

Retrieves the SQLState for this SQLException object.

Usage

From source file:org.apache.hive.jdbc.TestJdbcDriver2.java

/**
 * Negative Test for cursor repositioning to start of resultset
 * Verify unsupported JDBC resultset attributes
 * @throws Exception//from  w w w  .j a v a  2  s  .c  om
 */
@Test
public void testUnsupportedFetchTypes() throws Exception {
    try {
        con.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY);
        fail("createStatement with TYPE_SCROLL_SENSITIVE should fail");
    } catch (SQLException e) {
        assertEquals("HYC00", e.getSQLState().trim());
    }

    try {
        con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        fail("createStatement with CONCUR_UPDATABLE should fail");
    } catch (SQLException e) {
        assertEquals("HYC00", e.getSQLState().trim());
    }
}

From source file:org.apache.hive.jdbc.TestJdbcDriver2.java

@Test
public void testErrorDiag() throws SQLException {
    Statement stmt = con.createStatement();
    // verify syntax error
    try {/*  w  ww . j a  va  2 s.c o m*/
        stmt.executeQuery("select from " + dataTypeTableName);
        fail("SQLException is expected");
    } catch (SQLException e) {
        assertEquals("42000", e.getSQLState());
    }

    // verify table not fuond error
    try {
        stmt.executeQuery("select * from nonTable");
        fail("SQLException is expected");
    } catch (SQLException e) {
        assertEquals("42S02", e.getSQLState());
    }

    // verify invalid column error
    try {
        stmt.executeQuery("select zzzz from " + dataTypeTableName);
        fail("SQLException is expected");
    } catch (SQLException e) {
        assertEquals("42000", e.getSQLState());
    }
}

From source file:org.jumpmind.symmetric.service.impl.DataExtractorService.java

protected List<OutgoingBatch> extract(ProcessInfo processInfo, Node targetNode,
        List<OutgoingBatch> activeBatches, IDataWriter dataWriter, ExtractMode mode) {
    boolean streamToFileEnabled = parameterService.is(ParameterConstants.STREAM_TO_FILE_ENABLED);
    List<OutgoingBatch> processedBatches = new ArrayList<OutgoingBatch>(activeBatches.size());
    if (activeBatches.size() > 0) {
        Set<String> channelsProcessed = new HashSet<String>();
        long batchesSelectedAtMs = System.currentTimeMillis();
        OutgoingBatch currentBatch = null;
        try {//from  ww w. j av a  2 s. co  m

            long bytesSentCount = 0;
            int batchesSentCount = 0;
            long maxBytesToSync = parameterService.getLong(ParameterConstants.TRANSPORT_MAX_BYTES_TO_SYNC);

            for (int i = 0; i < activeBatches.size(); i++) {
                currentBatch = activeBatches.get(i);

                channelsProcessed.add(currentBatch.getChannelId());
                processInfo.setDataCount(currentBatch.getDataEventCount());
                processInfo.setCurrentBatchId(currentBatch.getBatchId());
                processInfo.setCurrentLoadId(currentBatch.getLoadId());

                currentBatch = requeryIfEnoughTimeHasPassed(batchesSelectedAtMs, currentBatch);

                if (currentBatch.isExtractJobFlag() && currentBatch.getStatus() != Status.IG) {
                    if (parameterService.is(ParameterConstants.INITIAL_LOAD_USE_EXTRACT_JOB)) {
                        if (currentBatch.getStatus() != Status.RQ && currentBatch.getStatus() != Status.IG
                                && !isPreviouslyExtracted(currentBatch)) {
                            /*
                             * the batch must have been purged. it needs to
                             * be re-extracted
                             */
                            log.info(
                                    "Batch {} is marked as ready but it has been deleted.  Rescheduling it for extraction",
                                    currentBatch.getNodeBatchId());
                            if (changeBatchStatus(Status.RQ, currentBatch, mode)) {
                                resetExtractRequest(currentBatch);
                            }
                            break;
                        } else if (currentBatch.getStatus() == Status.RQ) {
                            log.info(
                                    "Batch {} is not ready for delivery.  It is currently scheduled for extraction",
                                    currentBatch.getNodeBatchId());
                            break;
                        }
                    } else {
                        currentBatch.setStatus(Status.NE);
                        currentBatch.setExtractJobFlag(false);
                    }
                } else {
                    processInfo.setStatus(ProcessInfo.Status.EXTRACTING);
                    currentBatch = extractOutgoingBatch(processInfo, targetNode, dataWriter, currentBatch,
                            streamToFileEnabled, true, mode);
                }

                if (streamToFileEnabled || mode == ExtractMode.FOR_PAYLOAD_CLIENT) {
                    processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
                    currentBatch = sendOutgoingBatch(processInfo, targetNode, currentBatch, dataWriter, mode);
                }

                processedBatches.add(currentBatch);

                if (currentBatch.getStatus() != Status.OK) {
                    currentBatch.setLoadCount(currentBatch.getLoadCount() + 1);
                    changeBatchStatus(Status.LD, currentBatch, mode);

                    bytesSentCount += currentBatch.getByteCount();
                    batchesSentCount++;

                    if (bytesSentCount >= maxBytesToSync && processedBatches.size() < activeBatches.size()) {
                        log.info(
                                "Reached the total byte threshold after {} of {} batches were extracted for node '{}'.  The remaining batches will be extracted on a subsequent sync",
                                new Object[] { batchesSentCount, activeBatches.size(),
                                        targetNode.getNodeId() });
                        break;
                    }
                }
            }

        } catch (RuntimeException e) {
            SQLException se = unwrapSqlException(e);
            if (currentBatch != null) {
                /* Reread batch in case the ignore flag has been set */
                currentBatch = outgoingBatchService.findOutgoingBatch(currentBatch.getBatchId(),
                        currentBatch.getNodeId());
                statisticManager.incrementDataExtractedErrors(currentBatch.getChannelId(), 1);
                if (se != null) {
                    currentBatch.setSqlState(se.getSQLState());
                    currentBatch.setSqlCode(se.getErrorCode());
                    currentBatch.setSqlMessage(se.getMessage());
                } else {
                    currentBatch.setSqlMessage(getRootMessage(e));
                }
                currentBatch.revertStatsOnError();
                if (currentBatch.getStatus() != Status.IG && currentBatch.getStatus() != Status.OK) {
                    currentBatch.setStatus(Status.ER);
                    currentBatch.setErrorFlag(true);
                }
                outgoingBatchService.updateOutgoingBatch(currentBatch);

                if (isStreamClosedByClient(e)) {
                    log.warn(
                            "Failed to transport batch {}.  The stream was closed by the client.  There is a good chance that a previously sent batch errored out and the stream was closed or there was a network error.  The error was: {}",
                            currentBatch, getRootMessage(e));
                } else {
                    if (e instanceof ProtocolException) {
                        IStagedResource resource = getStagedResource(currentBatch);
                        if (resource != null) {
                            resource.delete();
                        }
                    }
                    log.error("Failed to extract batch {}", currentBatch, e);
                }
                processInfo.setStatus(ProcessInfo.Status.ERROR);
            } else {
                log.error("Could not log the outgoing batch status because the batch was null", e);
            }
        }

        // Next, we update the node channel controls to the
        // current timestamp
        Calendar now = Calendar.getInstance();

        for (String channelProcessed : channelsProcessed) {
            NodeChannel nodeChannel = configurationService.getNodeChannel(channelProcessed,
                    targetNode.getNodeId(), false);
            if (nodeChannel != null) {
                nodeChannel.setLastExtractTime(now.getTime());
                configurationService.updateLastExtractTime(nodeChannel);
            }
        }

        return processedBatches;
    } else {
        return Collections.emptyList();
    }
}

From source file:com.funambol.foundation.items.dao.PIMCalendarDAO.java

public CalendarWrapper getItem(String uid) throws DAOException {

    if (log.isTraceEnabled()) {
        log.trace("DAO start getItem " + uid);
    }//w  ww  . j a  v  a 2 s . com

    Connection con = null;
    PreparedStatement ps = null;
    ResultSet rs = null;
    CalendarWrapper cw = null;

    try {
        // Looks up the data source when the first connection is created
        con = getUserDataSource().getRoutedConnection(userId);
        con.setReadOnly(true);

        ps = con.prepareStatement(SQL_GET_FNBL_PIM_CALENDAR_BY_ID_USERID);

        ps.setLong(1, Long.parseLong(uid));
        ps.setString(2, userId);

        rs = ps.executeQuery();

        cw = createCalendar(uid, rs);

        DBTools.close(null, ps, rs);

        ps = con.prepareStatement(SQL_GET_FNBL_PIM_CALENDAR_EXCEPTION_BY_CALENDAR);

        ps.setLong(1, Long.parseLong(uid));

        rs = ps.executeQuery();

        try {
            cw = addPIMCalendarExceptions(cw, rs);
        } catch (SQLException sqle) {
            throw new SQLException("Error while adding PIM calendar " + "exceptions. " + sqle,
                    sqle.getSQLState());
        }

    } catch (Exception e) {
        throw new DAOException("Error retrieving a calendar item: " + e, e);
    } finally {
        DBTools.close(con, ps, rs);
    }

    return cw;
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 *
 * @param conn/*from w w w. j  av  a 2s . c  om*/
 * @param term
 * @param idUser
 * @return idTerm
 */
private boolean addUSE(Connection conn, Term term, int idUser) {
    boolean status = false;
    //     Connection conn;
    Statement stmt;
    term.setLexical_value(new StringPlus().convertString(term.getLexical_value()));
    try {
        try {
            stmt = conn.createStatement();
            try {
                String query = "Insert into non_preferred_term " + "(id_term, lexical_value, lang, "
                        + "id_thesaurus, source, status, hiden)" + " values (" + "'" + term.getId_term() + "'"
                        + ",'" + term.getLexical_value() + "'" + ",'" + term.getLang() + "'" + ",'"
                        + term.getId_thesaurus() + "'" + ",'" + term.getSource() + "'" + ",'" + term.getStatus()
                        + "'" + "," + term.isHidden() + ")";

                stmt.executeUpdate(query);
                status = true;
            } finally {
                stmt.close();
            }
        } finally {
            //    conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        if (!sqle.getSQLState().equalsIgnoreCase("23505"))
            status = false;
    }
    return status;
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 *
 * @param conn/*  ww w  . j  a  va  2  s  .  c  o  m*/
 * @param term
 * @param idUser
 * @param action
 * @return idTerm
 */
private boolean addUSEHistorique(Connection conn, Term term, int idUser, String action) {
    boolean status = false;
    //     Connection conn; 
    Statement stmt;
    term.setLexical_value(new StringPlus().convertString(term.getLexical_value()));
    try {
        try {
            stmt = conn.createStatement();
            try {
                String query = "Insert into non_preferred_term_historique " + "(id_term, lexical_value, lang, "
                        + "id_thesaurus, source, status, id_user, action)" + " values (" + "'"
                        + term.getId_term() + "'" + ",'" + term.getLexical_value() + "'" + ",'" + term.getLang()
                        + "'" + ",'" + term.getId_thesaurus() + "'" + ",'" + term.getSource() + "'" + ",'"
                        + term.getStatus() + "'" + ",'" + idUser + "'" + ",'" + action + "')";

                stmt.executeUpdate(query);
                status = true;
            } finally {
                stmt.close();
            }
        } finally {
            //    conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        if (!sqle.getSQLState().equalsIgnoreCase("23505"))
            status = false;
    }
    return status;
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 * Cette fonction permet d'ajouter une traduction  un Terme
 *
 * @param conn/*from   w ww  .  jav a2  s. co  m*/
 * @param term
 * @param idUser
 * @return
 */
public boolean addTermTraduction(Connection conn, Term term, int idUser) {

    Statement stmt;
    term.setLexical_value(new StringPlus().convertString(term.getLexical_value()));
    try {
        // Get connection from pool
        //        conn = ds.getConnection();
        try {
            stmt = conn.createStatement();
            try {
                String query = "Insert into term " + "(id_term, lexical_value, lang, "
                        + "id_thesaurus, source, status,contributor, creator)" + " values (" + "'"
                        + term.getId_term() + "'" + ",'" + term.getLexical_value() + "'" + ",'" + term.getLang()
                        + "'" + ",'" + term.getId_thesaurus() + "'" + ",'" + term.getSource() + "'" + ",'"
                        + term.getStatus() + "'" + ", " + term.getContributor() + ", " + term.getCreator()
                        + ")";

                stmt.execute(query);
                addNewTermHistorique(conn, term, idUser);
            } finally {
                stmt.close();
            }
        } finally {
            //  conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        if (!sqle.getSQLState().equalsIgnoreCase("23505"))
            return false;
    }

    return true;
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 * Cette fonction permet de dcouper les mots Synonymes d'un concept (phrase) pour
 * remplir la table Non permute//from   w  ww .ja va2 s.  c o m
 *
 * @param ds
 * @param idConcept
 * @param idGroup
 * @param lexicalValue
 * @param idLang
 * @param idThesaurus
 */
public void splitConceptForNonPermuted(HikariDataSource ds, String idConcept, String idGroup,
        String idThesaurus, String idLang, String lexicalValue) {

    Connection conn;
    Statement stmt;

    //ici c'est la fonction qui dcoupe la phrase en mots spar pour la recherche permute
    lexicalValue = lexicalValue.replaceAll("-", " ");
    lexicalValue = lexicalValue.replaceAll("\\(", " ");
    lexicalValue = lexicalValue.replaceAll("\\)", " ");
    lexicalValue = lexicalValue.replaceAll("\\/", " ");
    //        lexicalValue = lexicalValue.replaceAll("'", " ");

    lexicalValue = new StringPlus().convertString(lexicalValue.trim());

    String tabMots[] = lexicalValue.split(" ");

    try {
        // Get connection from pool
        conn = ds.getConnection();
        try {
            stmt = conn.createStatement();
            try {
                int index = 1;
                for (String value : tabMots) {
                    String query = "Insert into permuted" + " (ord, id_concept, id_group, id_thesaurus,"
                            + " id_lang, lexical_value, ispreferredterm, original_value)" + " values (" + ""
                            + index++ + "" + ",'" + idConcept + "'" + ",'" + idGroup + "'" + ",'" + idThesaurus
                            + "'" + ",'" + idLang + "'" + ",'" + value + "'" + "," + false + ",'" + lexicalValue
                            + "')";

                    stmt.executeUpdate(query);
                }

            } finally {
                stmt.close();
            }
        } finally {
            conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        if (!sqle.getSQLState().equalsIgnoreCase("23505")) {
            //  if (!sqle.getMessage().contains("duplicate key value violates unique constraint")) {
            log.error("Error while adding values in table Permuted for Non_Preferred_term : " + idConcept,
                    sqle);
        }
    }
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 *
 * @param conn/*from w  w  w. j a  v a 2  s .c om*/
 * @param term
 * @param idUser
 * @return idTerm
 */
public String addNewTerm(Connection conn, Term term, int idUser) {
    String idTerm = null;
    //     Connection conn;
    Statement stmt;
    ResultSet resultSet;
    term.setLexical_value(new StringPlus().convertString(term.getLexical_value()));
    try {
        // Get connection from pool
        //   conn = ds.getConnection();
        try {
            stmt = conn.createStatement();
            try {
                String query = "select max(id) from term";
                stmt.executeQuery(query);
                resultSet = stmt.getResultSet();
                resultSet.next();
                int idTermNum = resultSet.getInt(1);
                idTermNum++;
                idTerm = "" + (idTermNum);
                // si le nouveau Id existe, on l'incrmente
                while (isIdOfTermExist(conn, idTerm, term.getId_thesaurus())) {
                    idTerm = "" + (++idTermNum);
                }
                term.setId_term(idTerm);
                /**
                 * Ajout des informations dans la table Concept
                 */
                query = "Insert into term " + "(id_term, lexical_value, lang, "
                        + "id_thesaurus, source, status, contributor, creator)" + " values (" + "'"
                        + term.getId_term() + "'" + ",'" + term.getLexical_value() + "'" + ",'" + term.getLang()
                        + "'" + ",'" + term.getId_thesaurus() + "'" + ",'" + term.getSource() + "'" + ",'"
                        + term.getStatus() + "'" + ", " + idUser + "" + ", " + idUser + ")";

                stmt.executeUpdate(query);
                addNewTermHistorique(conn, term, idUser);

            } finally {
                stmt.close();
            }
        } finally {
            //    conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        if (!sqle.getSQLState().equalsIgnoreCase("23505"))
            idTerm = null;
    }

    return idTerm;
}

From source file:mom.trd.opentheso.bdd.helper.TermHelper.java

/**
 * Cette fonction permet de rajouter une relation Terme Prfr
 * //from  w  w w  .java2  s  .  c  o m
 * @param conn
 * @param term
 * @param idConcept
 * @param idUser
 * @return 
 */
public boolean addLinkTerm(Connection conn, Term term, String idConcept, int idUser) {

    Statement stmt;
    try {
        // Get connection from pool
        //           conn = ds.getConnection();
        try {
            stmt = conn.createStatement();
            try {
                String query = "Insert into preferred_term " + "(id_concept, id_term, id_thesaurus)"
                        + " values (" + "'" + idConcept + "'" + ",'" + term.getId_term() + "'" + ",'"
                        + term.getId_thesaurus() + "')";

                stmt.executeUpdate(query);

            } finally {
                stmt.close();
            }
        } finally {
            //              conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        System.out.println(sqle);
        if (!sqle.getSQLState().equalsIgnoreCase("23505"))
            return false;
    }
    return true;
}