Example usage for java.sql Connection rollback

List of usage examples for java.sql Connection rollback

Introduction

In this page you can find the example usage for java.sql Connection rollback.

Prototype

void rollback() throws SQLException;

Source Link

Document

Undoes all changes made in the current transaction and releases any database locks currently held by this Connection object.

Usage

From source file:com.app.das.business.dao.SearchDAO.java

/**
 * ?    ?  ?  ?./*from   ww w  .  ja  v a  2 s. com*/
 * @param tapeLendingItemDOList
 * @param commonDO
 * @throws Exception 
 */
public void updateTapeLendingItems(TapeLendingDO tapeLendingDO, DASCommonDO commonDO) throws Exception {
    StringBuffer buf = new StringBuffer();
    buf.append("\n update DAS.TAPELENDMST_TBL set ");
    buf.append("\n    PURPOSE = ?,  ");
    buf.append("\n    U_PGM = ?,  ");
    buf.append("\n    MOD_DT = ?,  ");
    buf.append("\n    MODRID = ? ");
    buf.append("\n where LEND_APLN_NO = ? ");

    Connection con = null;
    PreparedStatement stmt = null;
    try {
        con = DBService.getInstance().getConnection();
        con.setAutoCommit(false);

        TapeLendingItemDO maxTapeLendingItemDO = getTapeLendingLendAplnNo(commonDO.getUserNo(),
                commonDO.getUserId());

        stmt = con.prepareStatement(buf.toString());

        //          ? .
        String toDateTime = CalendarUtil.getDateTime("yyyyMMddHHmmss");

        int index = 0;
        stmt.setString(++index, tapeLendingDO.getPurpose());
        stmt.setString(++index, tapeLendingDO.getUPgm());
        stmt.setString(++index, toDateTime);
        stmt.setString(++index, commonDO.getUserId());
        stmt.setString(++index, maxTapeLendingItemDO.getLendAplnNo());

        stmt.executeUpdate();

        int seq = maxTapeLendingItemDO.getNum();
        for (Iterator i = tapeLendingDO.getTapeLendingItemDOList().iterator(); i.hasNext();) {
            TapeLendingItemDO tapeLendingItemDO = (TapeLendingItemDO) i.next();
            if (!isThereLendingItem(tapeLendingItemDO.getReqNo())) {
                seq = seq + 1;

                tapeLendingItemDO.setLendAplnNo(maxTapeLendingItemDO.getLendAplnNo());
                tapeLendingItemDO.setNum(seq);

                insertTapeLendingItem(con, tapeLendingItemDO, toDateTime, commonDO);
            }
        }

        con.commit();

    }

    catch (Exception e) {
        logger.error(buf.toString());

        if (con != null) {
            try {
                con.rollback();
            } catch (SQLException e1) {
                // TODO ?? ?? catch ?
                e1.printStackTrace();
            }
        }

        throw e;
    } finally {
        release(null, stmt, con);
    }

}

From source file:com.che.software.testato.domain.dao.jdbc.impl.ElementDAO.java

/**
 * Creates a procedural diagram from a testCaseId, a set of elements and a
 * set of transitions. If activities have been reused or not to create this
 * diagram./*from   w  ww .j  av a2  s  .  co  m*/
 * 
 * @param testCaseId the test case id.
 * @param elements the set of elements.
 * @param transitions the set of transitions.
 * @throws ElementCreationDAOException if an error occurs during the
 *         creation.
 */
@Override
public void createDiagram(int testCaseId, List<ElementCreation> elements, List<TransitionCreation> transitions)
        throws ElementCreationDAOException {
    LOGGER.debug("createDiagram(" + testCaseId + ", " + elements.size() + " elements, " + transitions.size()
            + " transitions).");
    Connection connection = null;
    try {
        connection = getDataSource().getConnection();
        connection.setAutoCommit(false);
        for (ElementCreation element : elements) {
            Integer activityId = null, pointId = null;
            if (element.getType().equals(ElementCreationTypes.ACTIVITY)) {
                activityId = (Integer) getQueryRunner().query(connection,
                        "SELECT activity_id::int AS activityId FROM activity WHERE label = ? ",
                        new ScalarHandler("activityId"), new Object[] { element.getLabel() });
                if (null == activityId) {
                    getQueryRunner().update(connection,
                            "INSERT INTO activity(activity_id, global_description, label) VALUES(nextval('activity_id_seq'), NULL, ?) ",
                            new Object[] { element.getLabel() });
                    activityId = (Integer) getQueryRunner().query(connection,
                            "SELECT activity_id::int AS activityId FROM activity WHERE label = ? ",
                            new ScalarHandler("activityId"), new Object[] { element.getLabel() });
                }
            } else {
                getQueryRunner().update(connection,
                        "INSERT INTO point(point_id, point_type, label) VALUES(nextval('point_id_seq'), ?, ?) ",
                        new Object[] { element.getType().name(), element.getLabel() });
                pointId = (Integer) getQueryRunner().query(connection,
                        "SELECT MAX(point_id)::int AS pointId FROM point ", new ScalarHandler("pointId"));
            }
            getQueryRunner().update(connection,
                    "INSERT INTO element(element_id, point_id, activity_id, test_case_id) VALUES(nextval('element_id_seq'),"
                            + ((null != activityId) ? "NULL" : "?") + "," + ((null != pointId) ? "NULL" : "?")
                            + ",?) ",
                    (null != activityId) ? new Object[] { activityId, testCaseId }
                            : new Object[] { pointId, testCaseId });
        }
        List<Element> createdElements = getQueryRunner().query(connection,
                "SELECT element_id AS elementId, point_id AS pointId, activity_id AS activityId, test_case_id AS testCaseId, COALESCE(activity.label, point.label) AS label FROM element LEFT JOIN activity USING(activity_id) LEFT JOIN point USING(point_id) WHERE test_case_id = ? ",
                new BeanListHandler<Element>(Element.class), new Object[] { testCaseId });
        for (TransitionCreation transition : transitions) {
            boolean source = false, target = false;
            for (Element element : createdElements) {
                if (element.getLabel().equalsIgnoreCase(transition.getSource())) {
                    transition.setSourceId(element.getElementId());
                    source = true;
                }
                if (element.getLabel().equalsIgnoreCase(transition.getTarget())) {
                    transition.setTargetId(element.getElementId());
                    target = true;
                }
                if (source && target) {
                    break;
                }
            }
            getQueryRunner().update(connection,
                    "INSERT INTO transition(transition_id, target_element, source_element, test_case_id, label) VALUES(nextval('transition_id_seq'), ?, ?, ?, ?) ",
                    new Object[] { transition.getTargetId(), transition.getSourceId(), testCaseId,
                            (null != transition.getLabel()) ? transition.getLabel() : "" });
        }
        connection.commit();
    } catch (SQLException e) {
        try {
            connection.rollback();
        } catch (SQLException e1) {
            throw new ElementCreationDAOException(e1);
        }
        throw new ElementCreationDAOException(e);
    } finally {
        if (null != connection) {
            DbUtils.closeQuietly(connection);
        }
    }
}

From source file:com.che.software.testato.domain.dao.jdbc.impl.IterationDAO.java

/**
 * Creates the next iteration (both analytical and selective) for a given
 * prioritization.//  ww w . j a va 2s.  c o m
 * 
 * @author Clement HELIOU (clement.heliou@che-software.com).
 * @param prioritization the given prioritization.
 * @param scripts the scripts to use for this depth.
 * @since July, 2011.
 * @throws IterationCreationDAOException if an error occurs during the
 *         creation.
 */
@Override
public void createNextIteration(Prioritization prioritization, List<Script> scripts)
        throws IterationCreationDAOException {
    LOGGER.debug("createNextIteration(" + prioritization.getPrioritizationId() + ").");
    Connection connection = null;
    try {
        connection = getDataSource().getConnection();
        connection.setAutoCommit(false);
        getQueryRunner().update(connection,
                "INSERT INTO iteration(iteration_id, prioritization_id) VALUES(nextval('iteration_seq'), ?) ",
                new Object[] { prioritization.getPrioritizationId() });
        Integer createdIterationId = (Integer) getQueryRunner().query(connection,
                "SELECT MAX(iteration_id)::int AS iterationId FROM iteration ",
                new ScalarHandler("iterationId"));
        getQueryRunner().update(connection,
                "INSERT INTO iteration_assignment(iteration_assignment_id, criterion_type, iteration_id, analytical_prioritization_status) VALUES(nextval('iteration_assignment_seq'), ?, ?, ?) ",
                new Object[] { CriterionTypes.COST.name(), createdIterationId,
                        AssignmentStatus.NOT_ASSIGNED.name() });
        createComparisonMatrixItem(connection, scripts,
                (Integer) getQueryRunner().query(connection,
                        "SELECT MAX(iteration_assignment_id)::int AS result FROM iteration_assignment ",
                        new ScalarHandler("result")));
        getQueryRunner().update(connection,
                "INSERT INTO iteration_assignment(iteration_assignment_id, criterion_type, iteration_id, analytical_prioritization_status) VALUES(nextval('iteration_assignment_seq'), ?, ?, ?) ",
                new Object[] { CriterionTypes.FIT.name(), createdIterationId,
                        AssignmentStatus.NOT_ASSIGNED.name() });
        createComparisonMatrixItem(connection, scripts,
                (Integer) getQueryRunner().query(connection,
                        "SELECT MAX(iteration_assignment_id)::int AS result FROM iteration_assignment ",
                        new ScalarHandler("result")));
        getQueryRunner().update(connection,
                "INSERT INTO iteration_assignment(iteration_assignment_id, criterion_type, iteration_id, analytical_prioritization_status) VALUES(nextval('iteration_assignment_seq'), ?, ?, ?) ",
                new Object[] { CriterionTypes.RISK.name(), createdIterationId,
                        AssignmentStatus.NOT_ASSIGNED.name() });
        createComparisonMatrixItem(connection, scripts,
                (Integer) getQueryRunner().query(connection,
                        "SELECT MAX(iteration_assignment_id)::int AS result FROM iteration_assignment ",
                        new ScalarHandler("result")));
        getQueryRunner().update(connection,
                "INSERT INTO iteration_assignment(iteration_assignment_id, criterion_type, iteration_id, analytical_prioritization_status) VALUES(nextval('iteration_assignment_seq'), ?, ?, ?) ",
                new Object[] { CriterionTypes.VALUE.name(), createdIterationId,
                        AssignmentStatus.NOT_ASSIGNED.name() });
        createComparisonMatrixItem(connection, scripts,
                (Integer) getQueryRunner().query(connection,
                        "SELECT MAX(iteration_assignment_id)::int AS result FROM iteration_assignment ",
                        new ScalarHandler("result")));
        getQueryRunner().update(connection,
                "INSERT INTO iteration_assignment(iteration_assignment_id, iteration_id, selective_prioritization_status) VALUES(nextval('iteration_assignment_seq'), ?, ?) ",
                new Object[] { createdIterationId, AssignmentStatus.NOT_ASSIGNED.name() });
        connection.commit();
    } catch (SQLException e) {
        try {
            connection.rollback();
        } catch (SQLException e1) {
            throw new IterationCreationDAOException(e1);
        }
        throw new IterationCreationDAOException(e);
    } finally {
        if (null != connection) {
            DbUtils.closeQuietly(connection);
        }
    }
}

From source file:org.cretz.sbnstat.scrape.Scraper.java

@Override
public void run(Arguments args) {
    //build a connection
    Connection conn = null;
    try {/* ww w  .  java  2s.  com*/
        conn = JdbcUtils.connectToMySqlDatabase(args.getDatabaseHost(), args.getDatabasePort(),
                args.getDatabaseName(), args.getDatabaseUser(), args.getDatabasePass());
        SbnStatDao dao = new SbnStatDao(conn);
        //load users
        Map<String, User> users = dao.getUsers();
        //create a context
        ScrapeContext context = new ScrapeContext(DateUtils.toBeginningOfDayCalendar(args.getFrom()),
                DateUtils.toEndOfDayCalendar(args.getTo()), dao.getPosts(users), users);
        logger.info("Scraping all posts and comments from {} to {}", context.getFrom().getTime(),
                context.getTo().getTime());
        //create cache if dir is there
        Cache cache = null;
        if (args.getCacheDir() != null) {
            cache = new Cache(new File(args.getCacheDir()));
        }
        //populate the fan posts
        PostLoader postLoader = new PostLoader(context);
        String url = "http://www." + args.getDomain() + "/fanposts/recent";
        if (args.getStartPanPostPage() != null) {
            url += "/" + args.getStartPanPostPage();
        }
        do {
            logger.debug("Loading fanpost list from: {}", url);
            url = postLoader.populateFanPosts(loadUrl(url, cache));
        } while (url != null);
        //populate the fan shots
        url = "http://www." + args.getDomain() + "/fanshots";
        int page = 1;
        if (args.getStartFanShotPage() != null) {
            url += "?page=" + args.getStartFanShotPage();
            page = args.getStartFanShotPage();
        }
        do {
            logger.debug("Loading fanshot list from: {}", url);
            if (postLoader.populateFanShots(loadUrl(url, cache))) {
                url = "http://www." + args.getDomain() + "/fanshots?page=" + (++page);
            } else {
                url = null;
            }
        } while (url != null);
        //populate the front page stuff
        int year = context.getTo().get(Calendar.YEAR) + 1;
        do {
            url = "http://www." + args.getDomain() + "/stories/archive/" + --year;
            logger.debug("Loading frontpage post list from: {}", url);
        } while (postLoader.populateFrontPage(loadUrl(url, cache), year));
        //persist all the users
        logger.debug("Initial user persist of {} users", context.getUsers().size());
        dao.persistUnpersistedUsers(context.getUsers());
        //go post by post, grab comments and persist
        CommentLoader commentLoader = new CommentLoader(context);
        logger.debug("Working {} posts", context.getPosts().size());
        for (Post post : context.getPosts().values()) {
            if (post.isCommentsLoaded()) {
                continue;
            }
            logger.debug("Getting comments from post {}", post.getUrl());
            //get comments
            List<Comment> comments = commentLoader.loadCommentsAndUpdatePost(loadUrl(post.getUrl(), cache),
                    post);
            post.setCommentsLoaded(true);
            //persist unpersisted users
            try {
                //transaction start
                conn.setAutoCommit(false);
                try {
                    dao.persistUnpersistedUsers(context.getUsers());
                    //persist post if not persisted
                    if (post.getId() == 0) {
                        dao.persistPost(post);
                    }
                    //persist comments
                    logger.debug("Persisting {} comments", comments.size());
                    dao.persistComments(comments);
                    conn.commit();
                    conn.setAutoCommit(true);
                } catch (Exception e) {
                    conn.rollback();
                    throw new Exception(e);
                }
            } catch (Exception e) {
                logger.warn("Couldn't persist post: {}", post.getUrl(), e);
            }
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    } finally {
        JdbcUtils.closeQuietly(conn);
    }
}

From source file:mom.trd.opentheso.bdd.helper.ConceptHelper.java

/**
 * Cette fonction permet d'ajouter un Top Concept avec le libell et les
 * relations Si l'opration choue, on rollback les modifications
 *
 * @param ds//from  www.j  a va  2 s. com
 * @param idParent
 * @param concept
 * @param term
 * @param urlSite
 * @param isArkActive
 * @param idUser
 * @return null si le term existe ou si erreur, sinon le numero de Concept
 */
public String addTopConcept(HikariDataSource ds, String idParent, Concept concept, Term term, String urlSite,
        boolean isArkActive, int idUser) {

    Connection conn = null;

    try {
        conn = ds.getConnection();
        conn.setAutoCommit(false);

        TermHelper termHelper = new TermHelper();
        // controle si le term existe avant de rajouter un concept
        if (termHelper.isTermExist(ds, term.getLexical_value(), term.getId_thesaurus(), term.getLang())) {
            conn.close();
            return null;
        }

        concept.setTopConcept(true);
        String idConcept = addConceptInTable(conn, concept, idUser);
        if (idConcept == null) {
            conn.rollback();
            conn.close();
            return null;
        }

        String idTerm = termHelper.addTerm(conn, term, idConcept, idUser);
        if (idTerm == null) {
            conn.rollback();
            conn.close();
            return null;
        }
        term.setId_term(idTerm);

        // cette fonction permet de remplir la table Permute
        termHelper.splitConceptForPermute(ds, idConcept,
                getGroupIdOfConcept(ds, idConcept, term.getId_thesaurus()), term.getId_thesaurus(),
                term.getLang(), term.getLexical_value());

        // Si on arrive ici, c'est que tout va bien 
        // alors c'est le moment de rcuprer le code ARK
        if (isArkActive) {
            NodeMetaData nodeMetaData = new NodeMetaData();
            nodeMetaData.setCreator(term.getSource());
            nodeMetaData.setTitle(term.getLexical_value());
            nodeMetaData.setDcElementsList(new ArrayList<DcElement>());

            if (!addIdArk(conn, idConcept, concept.getIdThesaurus(), urlSite, nodeMetaData, idUser)) {
                conn.rollback();
                conn.close();
                return null;
            }
        }

        conn.commit();
        conn.close();
        return idConcept;

    } catch (SQLException ex) {
        Logger.getLogger(ConceptHelper.class.getName()).log(Level.SEVERE, null, ex);
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException ex1) {
        }
    }
    return null;
}

From source file:mom.trd.opentheso.bdd.helper.ConceptHelper.java

/**
 * Cette fonction permet d'ajouter un Concept  la table Concept, en
 * paramtre un objet Classe Concept/*from  w  w  w .java2  s . c o m*/
 *
 * @param ds
 * @param hierarchicalRelationship
 * @param idUser
 */
public void addAssociativeRelation(HikariDataSource ds, HierarchicalRelationship hierarchicalRelationship,
        int idUser) { // Role RT pour terme associs

    Connection conn;
    Statement stmt;

    try {
        // Get connection from pool
        conn = ds.getConnection();
        try {
            conn.setAutoCommit(false);
            stmt = conn.createStatement();
            try {
                if (!new RelationsHelper().addRelationHistorique(conn, hierarchicalRelationship.getIdConcept1(),
                        hierarchicalRelationship.getIdThesaurus(), hierarchicalRelationship.getIdConcept2(),
                        hierarchicalRelationship.getRole(), idUser, "ADD")) {
                    conn.rollback();
                    conn.close();
                    return;
                }

                if (!new RelationsHelper().addRelationHistorique(conn, hierarchicalRelationship.getIdConcept2(),
                        hierarchicalRelationship.getIdThesaurus(), hierarchicalRelationship.getIdConcept1(),
                        hierarchicalRelationship.getRole(), idUser, "ADD")) {
                    conn.rollback();
                    conn.close();
                    return;
                }

                String query = "Insert into hierarchical_relationship"
                        + "(id_concept1, id_thesaurus, role, id_concept2)" + " values (" + "'"
                        + hierarchicalRelationship.getIdConcept1() + "'" + ",'"
                        + hierarchicalRelationship.getIdThesaurus() + "'" + ",'"
                        + hierarchicalRelationship.getRole() + "'" + ",'"
                        + hierarchicalRelationship.getIdConcept2() + "')";

                stmt.executeUpdate(query);

                query = "Insert into hierarchical_relationship"
                        + "(id_concept1, id_thesaurus, role, id_concept2)" + " values (" + "'"
                        + hierarchicalRelationship.getIdConcept2() + "'" + ",'"
                        + hierarchicalRelationship.getIdThesaurus() + "'" + ",'"
                        + hierarchicalRelationship.getRole() + "'" + ",'"
                        + hierarchicalRelationship.getIdConcept1() + "')";
                stmt.executeUpdate(query);
                conn.commit();
            } finally {
                stmt.close();
            }
        } finally {
            conn.close();
        }
    } catch (SQLException sqle) {
        // Log exception
        //    if (!sqle.getMessage().contains("duplicate key value violates unique constraint")) {
        if (!sqle.getSQLState().equalsIgnoreCase("23505")) {
            log.error("Error while adding hierarchicalRelationship RT : "
                    + hierarchicalRelationship.getIdConcept1(), sqle);
        }
    }

}

From source file:com.app.das.business.dao.SearchDAO.java

/**
 * ?  ? ./*  w w  w  . jav  a2  s  .  co m*/
 * @param tapeLendingDO  Tape  ?  DataObject
 * @param commonDO 
 * @throws Exception 
 */
public void insertTapeLending(TapeLendingDO tapeLendingDO, DASCommonDO commonDO) throws Exception {
    StringBuffer buf = new StringBuffer();
    buf.append("\n insert into DAS.TAPELENDMST_TBL( ");
    buf.append("\n    LEND_APLN_NO, ");
    buf.append("\n    EMP_NO, ");
    buf.append("\n    AGNT,  ");
    buf.append("\n    APLN_DD, ");
    buf.append("\n    PURPOSE,  ");
    buf.append("\n    U_PGM,  ");
    buf.append("\n    REG_DT,  ");
    buf.append("\n    REGRID,  ");
    buf.append("\n    MOD_DT,  ");
    buf.append("\n    MODRID ");
    buf.append("\n ) ");
    buf.append("\n values(?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ");

    Connection con = null;
    PreparedStatement stmt = null;
    try {
        con = DBService.getInstance().getConnection();
        //logger.debug("######insertTapeLending######## con : " + con);
        con.setAutoCommit(false);

        stmt = con.prepareStatement(buf.toString());

        // ? .
        String toDateTime = CalendarUtil.getDateTime("yyyyMMddHHmmss");

        int index = 0;
        String nextLendAplnNo = getNextSquence(con, DASBusinessConstants.SequenceName.LENDING_APP_NAME);

        stmt.setString(++index, nextLendAplnNo);
        stmt.setString(++index, tapeLendingDO.getEmpNo());
        stmt.setString(++index, tapeLendingDO.getAgnt());
        stmt.setString(++index, tapeLendingDO.getAplnDd());
        stmt.setString(++index, tapeLendingDO.getPurpose());
        stmt.setString(++index, tapeLendingDO.getUPgm());
        stmt.setString(++index, toDateTime);
        stmt.setString(++index, commonDO.getUserId());
        stmt.setString(++index, toDateTime);
        stmt.setString(++index, commonDO.getUserId());

        stmt.executeUpdate();

        int seq = 0;
        for (Iterator i = tapeLendingDO.getTapeLendingItemDOList().iterator(); i.hasNext();) {
            seq = seq + 1;
            TapeLendingItemDO tapeLendingItemDO = (TapeLendingItemDO) i.next();
            tapeLendingItemDO.setLendAplnNo(nextLendAplnNo);
            tapeLendingItemDO.setNum(seq);

            insertTapeLendingItem(con, tapeLendingItemDO, toDateTime, commonDO);
        }

        con.commit();

    }

    catch (Exception e) {
        logger.error(buf.toString());

        if (con != null) {
            try {
                con.rollback();
            } catch (SQLException e1) {
                // TODO ?? ?? catch ?
                e1.printStackTrace();
            }
        }

        throw e;
    } finally {
        release(null, stmt, con);
    }

}

From source file:mom.trd.opentheso.bdd.helper.ConceptHelper.java

/**
 * Cette fonction permet d'ajouter un Concept complet  la base avec le
 * libell et les relations Si l'opration choue, elle envoi un NULL et ne
 * modifie pas la base de donnes//from ww  w .j  a  va 2s.c o m
 *
 * @param ds
 * @param idParent
 * @param concept
 * @param term
 * @param urlSite
 * @param isArkActive
 * @param idUser
 * @return null si le term existe ou si erreur, sinon le numero de Concept
 */
public String addConcept(HikariDataSource ds, String idParent, Concept concept, Term term, String urlSite,
        boolean isArkActive, int idUser) {

    Connection conn = null;
    try {
        conn = ds.getConnection();
        conn.setAutoCommit(false);

        TermHelper termHelper = new TermHelper();
        // controle si le term existe avant de rajouter un concept
        /*if (termHelper.isTermExist(ds, term.getLexical_value(),
         term.getId_thesaurus(), term.getLang())) {
         conn.close();
         return null;
         }*/
        concept.setTopConcept(false);

        String idConcept = addConceptInTable(conn, concept, idUser);
        if (idConcept == null) {
            conn.rollback();
            conn.close();
            return null;
        }

        String idTerm = termHelper.addTerm(conn, term, idConcept, idUser);
        if (idTerm == null) {
            conn.rollback();
            conn.close();
            return null;
        }
        term.setId_term(idTerm);

        /**
         * ajouter le lien hirarchique
         */
        HierarchicalRelationship hierarchicalRelationship = new HierarchicalRelationship();
        hierarchicalRelationship.setIdConcept1(idParent);
        hierarchicalRelationship.setIdConcept2(idConcept);
        hierarchicalRelationship.setIdThesaurus(concept.getIdThesaurus());
        hierarchicalRelationship.setRole("NT");

        if (!addLinkHierarchicalRelation(conn, hierarchicalRelationship, idUser)) {
            conn.rollback();
            conn.close();
            return null;
        }

        hierarchicalRelationship.setIdConcept1(idConcept);
        hierarchicalRelationship.setIdConcept2(idParent);
        hierarchicalRelationship.setIdThesaurus(concept.getIdThesaurus());
        hierarchicalRelationship.setRole("BT");

        if (!addLinkHierarchicalRelation(conn, hierarchicalRelationship, idUser)) {
            conn.rollback();
            conn.close();
            return null;
        }

        // cette fonction permet de remplir la table Permute
        termHelper.splitConceptForPermute(ds, idConcept,
                getGroupIdOfConcept(ds, idConcept, term.getId_thesaurus()), term.getId_thesaurus(),
                term.getLang(), term.getLexical_value());

        // Si on arrive ici, c'est que tout va bien 
        // alors c'est le moment de rcuprer le code ARK
        if (isArkActive) {
            NodeMetaData nodeMetaData = new NodeMetaData();
            nodeMetaData.setCreator(term.getSource());
            nodeMetaData.setTitle(term.getLexical_value());
            nodeMetaData.setDcElementsList(new ArrayList<DcElement>());

            if (!addIdArk(conn, idConcept, concept.getIdThesaurus(), urlSite, nodeMetaData, idUser)) {
                conn.rollback();
                conn.close();
                return null;
            }
        }

        conn.commit();
        conn.close();
        return idConcept;

    } catch (SQLException ex) {
        try {
            Logger.getLogger(ConceptHelper.class.getName()).log(Level.SEVERE, null, ex);
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException ex1) {
        }
    }
    return null;
}

From source file:org.apache.hadoop.hive.metastore.txn.TxnHandler.java

/**
 * For testing only, do not use./* w  w w.j a v  a  2  s.c o m*/
 */
@VisibleForTesting
public int numLocksInLockTable() throws SQLException, MetaException {
    Connection dbConn = null;
    Statement stmt = null;
    ResultSet rs = null;
    try {
        dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
        stmt = dbConn.createStatement();
        String s = "select count(*) from HIVE_LOCKS";
        LOG.debug("Going to execute query <" + s + ">");
        rs = stmt.executeQuery(s);
        rs.next();
        int rc = rs.getInt(1);
        // Necessary to clean up the transaction in the db.
        dbConn.rollback();
        return rc;
    } finally {
        close(rs, stmt, dbConn);
    }
}

From source file:es.juntadeandalucia.panelGestion.negocio.utiles.JDBCConnector.java

/**
 * This method executes a low level insert with the data values
 * and data types (java.sql.Types) specified in a Map.
 * This is a way to improve the performance of data insertion.
 *
 * @param sql escaped SQL to avoid SQL-I
 * @param line the line to inser//from   ww w  . j  ava  2  s  . c om
 * @param columns of the table
 * @return number of affected rows
 * @throws Exception exception thrown
 */
public int executeLineInsertLowLevel(String sql, String[] line, List<ColumnVO> columns) throws Exception {
    Exception error = null;

    int numRowsAffected = 0;

    if (columns.size() > 0) {
        Connection connection = null;
        PreparedStatement preparedStmnt = null;

        try {
            DataSource dataSource = poolDataSources.get(schemaId);
            connection = dataSource.getConnection();
            connection.setAutoCommit(false);
            preparedStmnt = connection.prepareStatement(sql);

            String coordinateX = null;
            String coordinateY = null;
            int paramPosition = 1;
            for (ColumnVO column : columns) {
                Integer filePosition = column.getFilePosition();

                String dataValue;
                Integer dataType = column.getSqlType();

                if (column.isCoordinateX()) {
                    dataValue = line[filePosition];
                    coordinateX = dataValue;
                    preparedStmnt.setObject(paramPosition, dataValue, dataType);
                } else if (column.isCoordinateY()) {
                    dataValue = line[filePosition];
                    coordinateY = dataValue;
                    preparedStmnt.setObject(paramPosition, dataValue, dataType);
                } else if (column.isFromCoordinates()) {
                    int coordXIndex = column.getFileCoordinateXPosition();
                    int coordYIndex = column.getFileCoordinateYPosition();
                    coordinateX = line[coordXIndex];
                    coordinateY = line[coordYIndex];
                    continue;
                } else if (dataType == Types.OTHER) { // it is a geometry
                    // ((org.postgresql.PGConnection)connection).addDataType(column.getName(),
                    // column.getTypeClass());
                    dataValue = line[filePosition];
                    preparedStmnt.setObject(paramPosition, dataValue);
                } else {
                    dataValue = line[filePosition];
                    if (StringUtils.isEmpty(dataValue)) {
                        preparedStmnt.setNull(paramPosition, dataType);
                    } else {
                        preparedStmnt.setObject(paramPosition, dataValue, dataType);
                    }
                }
                paramPosition++;
            }
            if ((coordinateX != null) && (coordinateY != null)) {
                String pointWKT = Utils.getPointWKTFromCoordinates(coordinateX, coordinateY);
                preparedStmnt.setObject(paramPosition, pointWKT);
            }
            numRowsAffected = preparedStmnt.executeUpdate();

            connection.commit();
        } catch (SQLException e) {
            error = e;
        } finally {
            if (preparedStmnt != null) {
                try {
                    preparedStmnt.close();
                } catch (SQLException se2) {
                    log.warn("No se pudo cerrar el statment: ".concat(se2.getLocalizedMessage()));
                }
            }
            if (connection != null) {
                try {
                    if (error != null) {
                        connection.rollback();
                    }
                } catch (SQLException se) {
                    log.warn("Se produjo un error al manejar la conexin: ".concat(se.getLocalizedMessage()));
                }
                try {
                    connection.close();
                } catch (SQLException se) {
                    log.warn("Se produjo un error al intentar cerrar la conexin: "
                            .concat(se.getLocalizedMessage()));
                }
            }
        }
        if (error != null) {
            throw error;
        }
    }
    return numRowsAffected;
}