Example usage for java.sql Connection setAutoCommit

List of usage examples for java.sql Connection setAutoCommit

Introduction

In this page you can find the example usage for java.sql Connection setAutoCommit.

Prototype

void setAutoCommit(boolean autoCommit) throws SQLException;

Source Link

Document

Sets this connection's auto-commit mode to the given state.

Usage

From source file:edu.lternet.pasta.dml.database.SimpleDatabaseLoader.java

/**
 * // w w w.  ja v  a  2  s.com
 */
public void run() {

    if (entity == null) {
        success = false;
        completed = true;
        return;
    }

    //don't reload data if we have it
    if (doesDataExist(entity.getEntityIdentifier())) {
        return;
    }

    AttributeList attributeList = entity.getAttributeList();
    String tableName = entity.getDBTableName();

    String insertSQL = "";
    Vector rowVector = new Vector();
    Connection connection = null;

    try {
        rowVector = this.dataReader.getOneRowDataVector();
        connection = DataManager.getConnection();
        if (connection == null) {
            success = false;
            exception = new Exception("The connection to db is null");
            completed = true;
            return;
        }
        connection.setAutoCommit(false);
        while (!rowVector.isEmpty()) {
            insertSQL = databaseAdapter.generateInsertSQL(attributeList, tableName, rowVector);
            if (insertSQL != null) {
                PreparedStatement statement = connection.prepareStatement(insertSQL);
                statement.execute();
            }
            rowVector = this.dataReader.getOneRowDataVector();
        }

        connection.commit();
        success = true;
    } catch (Exception e) {
        log.error("problem while loading data into table.  Error message: " + e.getMessage());
        e.printStackTrace();
        log.error("SQL string to insert row:\n" + insertSQL);

        success = false;
        exception = e;

        try {
            connection.rollback();
        } catch (Exception ee) {
            System.err.println(ee.getMessage());
        }
    } finally {
        try {
            connection.setAutoCommit(true);
        } catch (Exception ee) {
            log.error(ee.getMessage());
        }

        DataManager.returnConnection(connection);
    }
}

From source file:com.anyuan.thomweboss.persistence.jdbcimpl.user.UserDaoJdbcImpl.java

@Override
public boolean save(User entity) {
    Connection conn = getConnection();
    boolean result = false;

    String userSql = "insert into t_user (f_username, f_nickname, f_loginname, f_password, "
            + "f_birthday, f_gender, f_createtime, f_logintime, f_roleid, f_contactid, "
            + "f_description) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";

    try {//ww w.jav a  2  s.c  o  m
        conn.setAutoCommit(false);
        Contact contact = entity.getContact();

        Address address = contact.getAddress();
        saveAddress(conn, address); // address id not modify, Addressgettersetter

        Phone phone = contact.getPhone();
        savePhone(conn, phone);

        saveContact(conn, contact);

        PreparedStatement preState = conn.prepareStatement(userSql);
        preState.setObject(1, entity.getUsername());
        preState.setString(2, entity.getNickname());
        preState.setString(3, entity.getLoginname());
        preState.setString(4, entity.getPassword());
        preState.setObject(5, entity.getBirthday());
        preState.setObject(6, entity.getGender());
        preState.setObject(7, entity.getCreatetime());
        preState.setObject(8, entity.getLogintime()); // ?
        preState.setObject(9, null);
        preState.setObject(10, entity.getContact().getId());
        preState.setObject(11, entity.getDescription());
        result = preState.execute();
        conn.commit();

    } catch (SQLException e) {
        e.printStackTrace();
        try {
            conn.rollback();
        } catch (SQLException e1) {
            e1.printStackTrace();
        }
    } finally {
        try {
            conn.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

    return result;
}

From source file:com.google.enterprise.connector.salesforce.storetype.DBStore.java

public void setDocList(String checkpoint, String str_store_entry) {

    DatabaseMetaData dbm = null;/*from ww  w  .j av a2  s . c om*/
    Connection connection = null;

    logger.log(Level.FINEST, "Setting doclist " + checkpoint);
    logger.log(Level.FINEST, "Setting store_entry " + str_store_entry);
    try {

        connection = ds.getConnection();
        connection.setAutoCommit(true);

        dbm = connection.getMetaData();

        //logger.log(Level.FINE,"Base64 ENCODING...");
        String encode_entry = new String(
                org.apache.commons.codec.binary.Base64.encodeBase64(str_store_entry.getBytes()));
        str_store_entry = encode_entry;

        //logger.log(Level.FINE,"Setting store_entry ENCODED " + str_store_entry);

        if (dbm.getDatabaseProductName().equals("MySQL")) {
            //get the most recent row
            Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            String update_stmt = "select crawl_set from " + this.instance_table
                    + " where crawl_set=(select max(crawl_set) from " + this.instance_table + ")";
            logger.log(Level.FINE, "Getting lastentryp in db: " + update_stmt);
            ResultSet rs = statement.executeQuery(update_stmt);

            boolean ret_rows = rs.first();

            String last_entry_in_db = null;

            while (ret_rows) {
                BigDecimal crawl_set = rs.getBigDecimal("crawl_set");
                last_entry_in_db = crawl_set.toPlainString();
                ret_rows = rs.next();
            }

            logger.log(Level.FINER, "Last_Entry_in_Database " + last_entry_in_db);

            if (last_entry_in_db != null) {
                if (last_entry_in_db.startsWith(checkpoint)) {
                    //increment if in the same set
                    BigDecimal bd = new BigDecimal(last_entry_in_db);
                    bd = bd.add(new BigDecimal(".00001"));
                    logger.log(Level.INFO, "Adding to DBStore. Index Value: " + bd.toPlainString());
                    update_stmt = "insert into " + this.instance_table
                            + " (crawl_set,crawl_data) values (?,COMPRESS(?))";

                    PreparedStatement ps = connection.prepareStatement(update_stmt);
                    ps.setString(1, bd.toPlainString());
                    ps.setString(2, str_store_entry);
                    ps.executeUpdate();
                    ps.close();
                } else {
                    //otherwise add the the 0th row for this set
                    logger.log(Level.INFO, "Adding to DBStore. Index Value: " + checkpoint + ".00000");
                    update_stmt = "insert into " + this.instance_table
                            + " (crawl_set,crawl_data) values (?,COMPRESS(?))";
                    PreparedStatement ps = connection.prepareStatement(update_stmt);
                    ps.setString(1, checkpoint + ".00000");
                    ps.setString(2, str_store_entry);
                    ps.executeUpdate();
                    ps.close();
                }
            } else {
                logger.log(Level.INFO, "Adding to DBStore. Index Value: " + checkpoint + ".00000");
                update_stmt = "insert into " + this.instance_table
                        + " (crawl_set,crawl_data) values (?,COMPRESS(?))";
                PreparedStatement ps = connection.prepareStatement(update_stmt);
                ps.setString(1, checkpoint + ".00000");
                ps.setString(2, str_store_entry);
                ps.executeUpdate();
                ps.close();

            }

            rs.close();
            statement.close();
            connection.close();
        }
    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Exception initializing context Datasource " + ex);
        return;
    }
}

From source file:com.amazonaws.services.kinesis.connectors.redshift.RedshiftManifestEmitter.java

@Override
public List<String> emit(final UnmodifiableBuffer<String> buffer) throws IOException {
    List<String> records = buffer.getRecords();
    Connection conn = null;

    String manifestFileName = getManifestFile(records);
    // Copy to Amazon Redshift using manifest file
    try {//from w  w  w .  j av a 2  s.c om
        conn = DriverManager.getConnection(redshiftURL, loginProps);
        conn.setAutoCommit(false);
        List<String> deduplicatedRecords = checkForExistingFiles(conn, records);
        if (deduplicatedRecords.isEmpty()) {
            LOG.info("All the files in this set were already copied to Redshift.");
            // All of these files were already written
            rollbackAndCloseConnection(conn);
            records.clear();
            return Collections.emptyList();
        }

        if (deduplicatedRecords.size() != records.size()) {
            manifestFileName = getManifestFile(deduplicatedRecords);
        }
        // Write manifest file to Amazon S3
        try {
            writeManifestToS3(manifestFileName, records);
        } catch (Exception e) {
            LOG.error("Error writing file " + manifestFileName + " to S3. Failing this emit attempt.", e);
            return buffer.getRecords();
        }

        LOG.info("Inserting " + deduplicatedRecords.size() + " rows into the files table.");
        insertRecords(conn, deduplicatedRecords);
        LOG.info("Initiating Amazon Redshift manifest copy of " + deduplicatedRecords.size() + " files.");
        redshiftCopy(conn, manifestFileName);
        conn.commit();
        LOG.info("Successful Amazon Redshift manifest copy of " + getNumberOfCopiedRecords(conn)
                + " records from " + deduplicatedRecords.size() + " files using manifest s3://" + s3Bucket + "/"
                + getManifestFile(records));
        closeConnection(conn);
        return Collections.emptyList();
    } catch (SQLException | IOException e) {
        LOG.error("Error copying data from manifest file " + manifestFileName
                + " into Amazon Redshift. Failing this emit attempt.", e);
        rollbackAndCloseConnection(conn);
        return buffer.getRecords();
    } catch (Exception e) {
        LOG.error("Error copying data from manifest file " + manifestFileName
                + " into Redshift. Failing this emit attempt.", e);
        rollbackAndCloseConnection(conn);
        return buffer.getRecords();
    }
}

From source file:eionet.cr.dao.virtuoso.VirtuosoPostHarvestScriptDAO.java

/**
 * @see eionet.cr.dao.PostHarvestScriptDAO#move(eionet.cr.dto.PostHarvestScriptDTO.TargetType, java.lang.String, java.util.Set,
 *      int)// w w w .j  a v  a2s .  c o  m
 */
@Override
public void move(TargetType targetType, String targetUrl, Set<Integer> ids, int direction) throws DAOException {

    if (ids == null || ids.isEmpty()) {
        return;
    }

    if (direction == 0) {
        throw new IllegalArgumentException("Direction must not be 0!");
    }

    String sourceUrl = targetType != null && targetType.equals(TargetType.SOURCE) ? targetUrl : null;
    String typeUrl = targetType != null && targetType.equals(TargetType.TYPE) ? targetUrl : null;

    ArrayList<Object> values = new ArrayList<Object>();
    values.add(sourceUrl == null ? "" : sourceUrl);
    values.add(typeUrl == null ? "" : typeUrl);

    Connection conn = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);

        PostHarvestScriptDTOReader reader = new PostHarvestScriptDTOReader();
        SQLUtil.executeQuery(LIST_SQL, values, reader, conn);
        List<PostHarvestScriptDTO> scripts = reader.getResultList();

        // helper object for handling min, max positions and real count of scripts
        PostHarvestScriptSet scriptSet = new PostHarvestScriptSet(scripts);

        // If even one script is already at position 1 then moving up is not considered possible.
        // And conversely, if even one script is already at the last position, then moving down
        // is not considered possible either.
        boolean isMovingPossible = true;
        List<Integer> selectedPositions = new ArrayList<Integer>();
        for (PostHarvestScriptDTO script : scripts) {

            // we do this check only for scripts that have been selected
            if (ids.contains(script.getId())) {
                int position = script.getPosition();
                if ((direction < 0 && position == scriptSet.getMinPosition())
                        || (direction > 0 && position == scriptSet.getMaxPosition())) {
                    isMovingPossible = false;
                } else {
                    selectedPositions.add(position);
                }
            }
        }

        if (isMovingPossible) {

            if (direction < 0) {
                for (Integer selectedPosition : selectedPositions) {
                    PostHarvestScriptDTO scriptToMove = scriptSet.getScriptByPosition(selectedPosition);
                    int i = scripts.indexOf(scriptToMove);

                    scripts.set(i, scripts.get(i - 1));
                    scripts.set(i - 1, scriptToMove);
                }
            } else {
                for (int j = selectedPositions.size() - 1; j >= 0; j--) {
                    PostHarvestScriptDTO scriptToMove = scriptSet.getScriptByPosition(selectedPositions.get(j));
                    int i = scripts.indexOf(scriptToMove);

                    scripts.set(i, scripts.get(i + 1));
                    scripts.set(i + 1, scriptToMove);
                }
            }
        }

        values.add(0, Integer.valueOf(scriptSet.getMaxPosition()));
        SQLUtil.executeUpdate(INCREASE_POSITIONS_SQL, values, conn);

        for (int i = 0; i < scripts.size(); i++) {

            values = new ArrayList<Object>();
            values.add(i + 1);
            values.add(Integer.valueOf(scripts.get(i).getId()));
            SQLUtil.executeUpdate(UPDATE_POSITION_SQL, values, conn);
        }

        conn.commit();
    } catch (Exception e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(conn);
    }
}

From source file:de.codecentric.multitool.db.DBUnitLibrary.java

public void openConnection(String dsName, String connectString, String dbUser, String dbPassword)
        throws Exception {

    if (!(connectionMap.containsKey(dsName) && connectionMap.get(dsName).isValid(0))) {

        Connection connection = DriverManager.getConnection(connectString, dbUser, dbPassword);

        if (MSSQL_DBMS_TYPE.equals(curDbmsType)) {
            connection.setAutoCommit(false);
        } else if (MYSQL_DBMS_TYPE.equals(curDbmsType)) {
            connection.setAutoCommit(false);
        }// w ww .  ja v a2s  .c o  m
        connectionMap.put(dsName, connection);
    }
}

From source file:eionet.cr.dao.virtuoso.VirtuosoHarvestScriptDAO.java

/**
 * @see eionet.cr.dao.HarvestScriptDAO#delete(eionet.cr.dto.HarvestScriptDTO)
 *//*from   w  ww .  j  av  a 2 s . com*/
@Override
public void delete(List<Integer> ids) throws DAOException {

    if (ids == null || ids.isEmpty()) {
        return;
    }

    Connection conn = null;
    PreparedStatement stmt = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);
        stmt = conn.prepareStatement(DELETE_SQL);
        for (Integer id : ids) {
            stmt.setInt(1, id);
            stmt.addBatch();
        }
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(stmt);
        SQLUtil.close(conn);
    }
}

From source file:eionet.cr.dao.virtuoso.VirtuosoHarvestScriptDAO.java

/**
 * @see eionet.cr.dao.HarvestScriptDAO#activateDeactivate(java.util.List)
 *///from  ww w  . ja v  a  2  s . c  om
@Override
public void activateDeactivate(List<Integer> ids) throws DAOException {

    if (ids == null || ids.isEmpty()) {
        return;
    }

    Connection conn = null;
    PreparedStatement stmt = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);
        stmt = conn.prepareStatement(ACTIVATE_DEACTIVATE_SQL);
        for (Integer id : ids) {
            stmt.setInt(1, id);
            stmt.addBatch();
        }
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(stmt);
        SQLUtil.close(conn);
    }
}

From source file:fitmon.WorkoutData.java

public ArrayList<ArrayList> getTotalCaloriesBurned(int userId) throws SQLException {
    PreparedStatement st = null;/* w  ww .  j a va 2s  . co m*/
    Connection conn = null;
    ArrayList<ArrayList> calBurnedList = new ArrayList<ArrayList>();
    try {
        String query = "select date,sum(calories) from Workout where userId=" + userId
                + " group by date limit 5";
        Class.forName("com.mysql.jdbc.Driver");
        conn = (Connection) DriverManager.getConnection("jdbc:mysql://localhost:3306/fitmon", "root",
                "april-23");
        st = conn.prepareStatement(query);
        conn.setAutoCommit(false);

        ArrayList calBurned = new ArrayList();
        ResultSet rs = st.executeQuery(query);
        while (rs.next()) {
            calBurned = new ArrayList();
            calBurned.add(rs.getString("date"));
            calBurned.add(rs.getDouble("sum(calories)"));
            calBurnedList.add(calBurned);
        }
        st.close();
        conn.close();

    } catch (ClassNotFoundException ce) {
        ce.printStackTrace();
    } catch (SQLException se) {
        se.printStackTrace();
    } catch (Exception e) {
        //Handle errors for Class.forName
        e.printStackTrace();
    } finally {

        if (st != null) {
            st.close();
        }

        if (conn != null) {
            conn.close();
        }

    }

    return calBurnedList;
}

From source file:org.zenoss.zep.dao.impl.ElapsedTime.java

@Override
public void optimizeTables() throws ZepException {
    final DatabaseType dbType = databaseCompatibility.getDatabaseType();

    final String externalToolName = this.useExternalToolPath + "/pt-online-schema-change";
    final String tableToOptimize = "event_summary";
    // if we want to use percona's pt-online-schema-change to avoid locking the tables due to mysql optimize...
    //checks if external tool is available
    if (this.useExternalTool && dbType == DatabaseType.MYSQL
            && DaoUtils.executeCommand("ls " + externalToolName) == 0) {
        logger.info("Validating state of event_summary");
        this.validateEventSummaryState();
        logger.debug("Optimizing table: " + tableToOptimize + " via percona " + externalToolName);
        eventSummaryOptimizationTime.setStartTime();

        String externalToolCommandPrefix = externalToolName + " --alter \"ENGINE=Innodb\" D=" + this.dbname
                + ",t=";
        String externalToolCommandSuffix = "";
        if (System.getenv("USE_ZENDS") != null && Integer.parseInt(System.getenv("USE_ZENDS").trim()) == 1) {
            externalToolCommandSuffix = " --defaults-file=/opt/zends/etc/zends.cnf";
        }//from  w w  w . j  a  v a2 s . c om
        externalToolCommandSuffix += " " + this.externalToolOptions
                + " --alter-foreign-keys-method=drop_swap --host=" + this.hostname + " --port=" + this.port
                + " --user=" + this.username + " --password=" + this.password + " --execute";
        int return_code = DaoUtils
                .executeCommand(externalToolCommandPrefix + tableToOptimize + externalToolCommandSuffix);
        if (return_code != 0) {
            logger.error("External tool failed on: " + tableToOptimize + ". Therefore, table:" + tableToOptimize
                    + "will not be optimized.");
        } else {
            logger.debug(
                    "Successfully optimized table: " + tableToOptimize + "using percona " + externalToolName);
        }

        eventSummaryOptimizationTime.setEndTime();
        SendOptimizationTimeEvent(eventSummaryOptimizationTime, tableToOptimize, "percona");

        if (this.tablesToOptimize.contains(tableToOptimize)) {
            this.tablesToOptimize.remove(tableToOptimize);
        }
    } else {
        if (this.useExternalTool) {
            logger.warn(
                    "External tool not available. Table: " + tableToOptimize + " optimization may be slow.");
        }
        if (!this.tablesToOptimize.contains(tableToOptimize)) {
            this.tablesToOptimize.add(tableToOptimize);
        }
    }

    eventSummaryOptimizationTime.setStartTime(); // init so elapsedTime() == 0

    try {
        logger.debug("Optimizing tables: {}", this.tablesToOptimize);
        this.template.execute(new ConnectionCallback<Object>() {
            @Override
            public Object doInConnection(Connection con) throws SQLException, DataAccessException {
                Boolean currentAutoCommit = null;
                Statement statement = null;
                try {
                    currentAutoCommit = con.getAutoCommit();
                    con.setAutoCommit(true);
                    statement = con.createStatement();
                    for (String tableToOptimize : tablesToOptimize) {
                        logger.debug("Optimizing table: {}", tableToOptimize);
                        final String sql;
                        switch (dbType) {
                        case MYSQL:
                            sql = "OPTIMIZE TABLE " + tableToOptimize;
                            break;
                        case POSTGRESQL:
                            sql = "VACUUM ANALYZE " + tableToOptimize;
                            break;
                        default:
                            throw new IllegalStateException("Unsupported database type: " + dbType);
                        }
                        if (tableToOptimize == "event_summary") {
                            eventSummaryOptimizationTime.setStartTime();
                        }
                        statement.execute(sql);
                        if (tableToOptimize == "event_summary") {
                            eventSummaryOptimizationTime.setEndTime();
                        }
                        logger.debug("Completed optimizing table: {}", tableToOptimize);
                    }
                } finally {
                    JdbcUtils.closeStatement(statement);
                    if (currentAutoCommit != null) {
                        con.setAutoCommit(currentAutoCommit);
                    }
                }
                return null;
            }
        });
    } finally {
        logger.info("Validating state of event_summary");
        this.validateEventSummaryState();
    }

    if (eventSummaryOptimizationTime.getElapsedTime() > 0) {
        SendOptimizationTimeEvent(eventSummaryOptimizationTime, "event_summary", "");
    }

    logger.debug("Completed Optimizing tables: {}", tablesToOptimize);
}