Example usage for java.sql Connection setAutoCommit

List of usage examples for java.sql Connection setAutoCommit

Introduction

In this page you can find the example usage for java.sql Connection setAutoCommit.

Prototype

void setAutoCommit(boolean autoCommit) throws SQLException;

Source Link

Document

Sets this connection's auto-commit mode to the given state.

Usage

From source file:com.che.software.testato.domain.dao.jdbc.impl.ActionDAO.java

/**
 * Creates a map from an action plan id, a list of intentions and a list of
 * sections./* w  w w  .  j  a  v  a  2  s. co m*/
 * 
 * @author Clement HELIOU (clement.heliou@che-software.com).
 * @param actionPlanId the action plan id to be linked to the created
 *        actions.
 * @param intentions the intentions to add in the map.
 * @param sections the sections to add in the map.
 * @since July, 2011.
 * @throws ActionCreationDAOException if an error occurs during the
 *         creation.
 */
@Override
public void createMap(int actionPlanId, List<Intention> intentions, List<Section> sections)
        throws ActionCreationDAOException {
    LOGGER.debug("createMap(" + actionPlanId + "," + intentions.size() + " intentions," + sections.size()
            + " sections).");
    Connection connection = null;
    try {
        connection = getDataSource().getConnection();
        connection.setAutoCommit(false);
        List<Integer> createdIntentionsId = new ArrayList<Integer>();
        for (Intention intention : intentions) {
            getQueryRunner().update(connection,
                    "INSERT INTO intention(intention_id, label) VALUES(nextval('intention_seq'), ?) ",
                    new Object[] { intention.getLabel() });
            createdIntentionsId.add((Integer) getQueryRunner().query(connection,
                    "SELECT MAX(intention_id)::int AS intentionId FROM intention ",
                    new ScalarHandler("intentionId")));
        }
        List<Intention> createdIntentions = getQueryRunner().query(connection,
                "SELECT intention_id AS intentionId, label FROM intention WHERE intention_id IN("
                        + getInClauseFromIntentionsIds(createdIntentionsId) + ") ",
                new BeanListHandler<Intention>(Intention.class));
        LOGGER.debug(createdIntentions.size() + " intentions created with success...");
        for (Section section : sections) {
            boolean source = false, target = false;
            for (Intention intention : createdIntentions) {
                if (!source && intention.getLabel().equalsIgnoreCase(section.getSourceIntention())) {
                    section.setSourceIntentionId(intention.getIntentionId());
                    source = true;
                }
                if (!target && intention.getLabel().equalsIgnoreCase(section.getTargetIntention())) {
                    section.setTargetIntentionId(intention.getIntentionId());
                    target = true;
                }
                if (target && source) {
                    break;
                }
            }
            Integer actionId = (Integer) getQueryRunner().query(connection,
                    "SELECT action_id::int AS actionId FROM action WHERE action_plan_id = ? AND source_intention = ? AND target_intention = ? ",
                    new ScalarHandler("actionId"), new Object[] { actionPlanId, section.getSourceIntentionId(),
                            section.getTargetIntentionId() });
            if (null == actionId) {
                LOGGER.debug("Action creation...");
                getQueryRunner().update(connection,
                        "INSERT INTO action(action_id, target_intention, source_intention, action_plan_id) VALUES(nextval('action_seq'),?,?,?) ",
                        new Object[] { section.getTargetIntentionId(), section.getSourceIntentionId(),
                                actionPlanId });
                actionId = (Integer) getQueryRunner().query(connection,
                        "SELECT action_id::int AS actionId FROM action WHERE action_plan_id = ? AND source_intention = ? AND target_intention = ? ",
                        new ScalarHandler("actionId"), new Object[] { actionPlanId,
                                section.getSourceIntentionId(), section.getTargetIntentionId() });
            }
            getQueryRunner().update(connection,
                    "INSERT INTO item(item_id, label, iteration_max_number) VALUES(nextval('item_seq'), ?, ?) ",
                    new Object[] { section.getStrategyLabel(), section.getMaxIterationNumber() });
            Integer createdItemId = (Integer) getQueryRunner().query(connection,
                    "SELECT MAX(item_id)::int AS itemId FROM item ", new ScalarHandler("itemId"));
            getQueryRunner().update(connection,
                    (section.isExclusive())
                            ? "INSERT INTO action_exclusive_item(action_id, item_id) VALUES(?,?) "
                            : "INSERT INTO action_inclusive_item(action_id, item_id) VALUES(?,?) ",
                    new Object[] { actionId, createdItemId });
        }
        connection.commit();
    } catch (SQLException e) {
        try {
            connection.rollback();
        } catch (SQLException e1) {
            throw new ActionCreationDAOException(e1);
        }
        throw new ActionCreationDAOException(e);
    } finally {
        if (null != connection) {
            DbUtils.closeQuietly(connection);
        }
    }
}

From source file:com.cloudera.sqoop.manager.CubridManagerImportTest.java

public void setUpData(String tableName, boolean nullEntry) {
    SqoopOptions options = new SqoopOptions(CubridTestUtils.getConnectString(), tableName);
    options.setUsername(CubridTestUtils.getCurrentUser());
    options.setPassword(CubridTestUtils.getPassword());

    LOG.debug("Setting up another CubridImport test: " + CubridTestUtils.getConnectString());

    manager = new CubridManager(options);

    Connection connection = null;
    Statement st = null;/*from w  w w.j  a v  a 2s.c o  m*/

    try {
        connection = manager.getConnection();
        connection.setAutoCommit(false);
        st = connection.createStatement();

        // create the database table and populate it with data.
        st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
        st.executeUpdate("CREATE TABLE " + tableName + " (" + manager.escapeColName("id")
                + " INT NOT NULL PRIMARY KEY, " + manager.escapeColName("name") + " VARCHAR(24) NOT NULL, "
                + manager.escapeColName("start_date") + " DATE, " + manager.escapeColName("Salary") + " FLOAT, "
                + manager.escapeColName("dept") + " VARCHAR(32));");

        st.executeUpdate(
                "INSERT INTO " + tableName + " VALUES(1,'Aaron','2009-05-14'," + "1000000.00,'engineering');");
        st.executeUpdate("INSERT INTO " + tableName + " VALUES(2,'Bob','2009-04-20',400.00,'sales');");
        st.executeUpdate("INSERT INTO " + tableName + " VALUES(3,'Fred','2009-01-23'," + "15.00,'marketing');");
        if (nullEntry) {
            st.executeUpdate("INSERT INTO " + tableName + " VALUES(4,'Mike',NULL,NULL,NULL);");
        }

        connection.commit();
    } catch (SQLException sqlE) {
        LOG.error("Encountered SQL Exception: " + sqlE);
        sqlE.printStackTrace();
        fail("SQLException when running test setUp(): " + sqlE);
    } finally {
        try {
            if (null != st) {
                st.close();
            }

            if (null != connection) {
                connection.close();
            }
        } catch (SQLException sqlE) {
            LOG.warn("Got SQLException when closing connection: " + sqlE);
        }
    }
}

From source file:mayoapp.migrations.V0300_1005__extract_image_metadata_retroactively.java

@Override
public void migrate(Connection connection) throws Exception {
    ImageProcessor imageProcessor = new DefaultImageProcessor();
    ImageDimensionsMetadataExtractor extractor = new ImageDimensionsMetadataExtractor(imageProcessor);

    StatementContext context = new StatementContextStub();
    connection.setAutoCommit(false);
    Statement countStatement = connection.createStatement();

    Integer count = 0;/* w w w  .  j a v a 2s. co m*/
    ResultSet res = countStatement
            .executeQuery("SELECT COUNT(*) FROM attachment JOIN entity on attachment.entity_id = entity.id"); //WHERE parent_id is not null
    while (res.next()) {
        count = res.getInt(1);
    }
    countStatement.close();

    Integer i = 0;

    Map<UUID, Object> toSave = new HashMap<>();

    for (int offset = 0; offset < count; offset += 50) {
        Statement queryStatement = connection.createStatement();
        ResultSet data = queryStatement.executeQuery(
                "SELECT * from attachment JOIN entity on attachment.entity_id = entity.id LIMIT 50 OFFSET "
                        + offset);

        while (data.next()) {
            LoadedAttachmentMapper mapper = new LoadedAttachmentMapper();
            LoadedAttachment attachment = mapper.map(0, data, context);

            logger.info("Processing attachment " + i + " : " + attachment.getFilename());

            Optional<Map<String, Object>> metadata = extractor.extractMetadata(attachment);

            if (metadata.isPresent()) {
                Map<String, Map<String, Object>> meta = new HashMap<>(attachment.getMetadata());
                meta.put("imageDimensions", metadata.get());
                toSave.put(attachment.getId(), meta);
            }

            i++;
        }

        queryStatement.close();
    }

    ObjectMapper mapper = new ObjectMapper();
    PreparedStatement statement = connection
            .prepareStatement("UPDATE attachment SET metadata = CAST (? AS json) WHERE entity_id =?");

    for (UUID attachment : toSave.keySet()) {
        statement.setObject(2, new PG_UUID(attachment));
        statement.setObject(1, mapper.writeValueAsString(toSave.get(attachment)));
        statement.addBatch();
        logger.info("Adding image to batch " + i + " : " + attachment.toString());
    }

    statement.executeBatch();
}

From source file:com.che.software.testato.domain.dao.jdbc.impl.ScriptDAO.java

/**
 * Creates the scripts for a given hierarchy.
 * // w w w .  j a  va2 s.  co m
 * @author Clement HELIOU (clement.heliou@che-software.com).
 * @param hierarchyId the hierarchy id.
 * @param scripts the scripts to create.
 * @since July, 2011.
 * @throws ScriptCreationDAOException if an error occurs during the
 *         creation.
 */
@Override
public void createScriptsFromHierarchy(int hierarchyId, List<ScriptCreation> scripts)
        throws ScriptCreationDAOException {
    LOGGER.debug("createScriptsFromHierarchy(" + hierarchyId + "," + scripts.size() + " scripts).");
    Connection connection = null;
    try {
        connection = getDataSource().getConnection();
        connection.setAutoCommit(false);
        for (ScriptCreation script : scripts) {
            getQueryRunner().update(connection,
                    "INSERT INTO script(script_id, hierarchy_id, label, depth) VALUES(nextval('script_seq'),?,'', ?) ",
                    new Object[] { hierarchyId, 1 });
            Integer createdScript = (Integer) getQueryRunner().query(connection,
                    "SELECT MAX(script_id)::int AS scriptId FROM script ", new ScalarHandler("scriptId"));
            createItems(connection, script.getScriptArrows(), createdScript, 1, hierarchyId);
        }
        connection.commit();
    } catch (SQLException e) {
        try {
            connection.rollback();
        } catch (SQLException e1) {
            throw new ScriptCreationDAOException(e1);
        }
        throw new ScriptCreationDAOException(e);
    } finally {
        if (null != connection) {
            DbUtils.closeQuietly(connection);
        }
    }
}

From source file:azkaban.database.AzkabanDatabaseSetup.java

private void createNewTables() throws SQLException, IOException {
    Connection conn = dataSource.getConnection();
    conn.setAutoCommit(false);
    try {//www.  java  2s.  co m
        // Make sure that properties table is created first.
        if (missingTables.contains("properties")) {
            runTableScripts(conn, "properties", version, dataSource.getDBType(), false);
        }
        for (String table : missingTables) {
            if (!table.equals("properties")) {
                runTableScripts(conn, table, version, dataSource.getDBType(), false);
                // update version as we have create a new table
                installedVersions.put(table, version);
            }
        }
    } finally {
        conn.close();
    }
}

From source file:com.concursive.connect.web.portal.PortletPreferencesServiceImpl.java

public void store(PortletWindow portletWindow, PortletRequest request, InternalPortletPreference[] preferences)
        throws PortletContainerException {
    String key = getFormattedKey(portletWindow);
    if (key.startsWith("T")) {
        LOG.debug("Storing temporary portlet prefs");
        DashboardPortlet portlet = (DashboardPortlet) request.getAttribute("dashboardPortlet");
        LOG.debug("write------------------------------------");
        LOG.debug("portlet page name: " + portlet.getPageName());
        LOG.debug("portlet key: " + portlet.getWindowConfigId());
        LOG.debug("portlet formatted key: " + key);
        LOG.debug("-----------------------------------------");
        // Store them in an array for quick retrieval
        storage.put(key, clonePreferences(preferences));
    } else {/*from   ww  w .ja  v a2s.  c om*/
        // Store them in an array for quick retrieval
        storage.put(key, clonePreferences(preferences));
        // Store the preferences into the database -- container does not specify what changed
        // so delete them all
        int portletId = Integer.parseInt(key);
        Connection db = (Connection) request.getAttribute("connection");
        try {
            db.setAutoCommit(false);
            DashboardPortletPrefsList.delete(db, portletId);
            for (InternalPortletPreference thisPref : preferences) {
                DashboardPortletPrefs portletPrefs = new DashboardPortletPrefs();
                portletPrefs.setPortletId(portletId);
                portletPrefs.setName(thisPref.getName());
                portletPrefs.setValues(thisPref.getValues());
                portletPrefs.insert(db);
            }
            db.commit();
        } catch (Exception e) {
            try {
                db.rollback();
                e.printStackTrace(System.out);
                LOG.error("Preferences", e);
            } catch (Exception e2) {

            }
        } finally {
            try {
                db.setAutoCommit(true);
            } catch (Exception e2) {

            }
        }
    }
}

From source file:dk.netarkivet.harvester.datamodel.extendedfield.ExtendedFieldDBDAO.java

@Override
public void delete(long aExtendedfieldId) throws IOFailure {
    ArgumentNotValid.checkNotNull(aExtendedfieldId, "aExtendedfieldId");

    Connection c = HarvestDBConnection.get();
    PreparedStatement stm = null;
    try {/*ww  w.j a  v  a 2 s.  c om*/
        c.setAutoCommit(false);

        stm = c.prepareStatement("DELETE FROM extendedfieldvalue WHERE extendedfield_id = ?");
        stm.setLong(1, aExtendedfieldId);
        stm.executeUpdate();
        stm.close();
        stm = c.prepareStatement("DELETE FROM extendedfield WHERE extendedfield_id = ?");
        stm.setLong(1, aExtendedfieldId);
        stm.executeUpdate();

        c.commit();

    } catch (SQLException e) {
        String message = "SQL error deleting extended fields for ID " + aExtendedfieldId + "\n"
                + ExceptionUtils.getSQLExceptionCause(e);
        log.warn(message, e);
    } finally {
        DBUtils.closeStatementIfOpen(stm);
        DBUtils.rollbackIfNeeded(c, "delete extended field", aExtendedfieldId);
        HarvestDBConnection.release(c);
    }

}

From source file:io.apiman.gateway.engine.jdbc.JdbcRegistry.java

/**
 * @see io.apiman.gateway.engine.IRegistry#publishApi(io.apiman.gateway.engine.beans.Api, io.apiman.gateway.engine.async.IAsyncResultHandler)
 */// www . java 2  s . com
@Override
public void publishApi(Api api, IAsyncResultHandler<Void> handler) {
    Connection conn = null;
    try {
        conn = ds.getConnection();
        conn.setAutoCommit(false);
        QueryRunner run = new QueryRunner();

        // First delete any record we might already have.
        run.update(conn, "DELETE FROM gw_apis WHERE org_id = ? AND id = ? AND version = ?", //$NON-NLS-1$
                api.getOrganizationId(), api.getApiId(), api.getVersion());

        // Now insert a row for the api.
        String bean = mapper.writeValueAsString(api);
        run.update(conn, "INSERT INTO gw_apis (org_id, id, version, bean) VALUES (?, ?, ?, ?)", //$NON-NLS-1$
                api.getOrganizationId(), api.getApiId(), api.getVersion(), bean);

        DbUtils.commitAndClose(conn);
        handler.handle(AsyncResultImpl.create((Void) null, Void.class));
    } catch (SQLException | JsonProcessingException e) {
        handler.handle(AsyncResultImpl.create(e));
    }
}

From source file:azkaban.database.AzkabanDatabaseSetup.java

private void updateTables() throws SQLException, IOException {
    Connection conn = dataSource.getConnection();
    conn.setAutoCommit(false);
    try {/*w  ww  .j a v a2s  . c  o  m*/
        // Make sure that properties table is created first.
        if (upgradeList.containsKey("properties")) {
            for (String version : upgradeList.get("properties")) {
                runTableScripts(conn, "properties", version, dataSource.getDBType(), true);
            }
        }
        for (String table : upgradeList.keySet()) {
            if (!table.equals("properties")) {
                for (String version : upgradeList.get(table)) {
                    runTableScripts(conn, table, version, dataSource.getDBType(), true);
                }
            }
        }
    } finally {
        conn.close();
    }
}

From source file:com.cloudera.sqoop.manager.DirectMySQLExportTest.java

/**
 * Test an authenticated export using mysqlimport.
 *///from ww  w  .ja  va 2  s  . c  om
public void testAuthExport() throws IOException, SQLException {
    SqoopOptions options = new SqoopOptions(MySQLAuthTest.AUTH_CONNECT_STRING, getTableName());
    options.setUsername(MySQLAuthTest.AUTH_TEST_USER);
    options.setPassword(MySQLAuthTest.AUTH_TEST_PASS);

    manager = new DirectMySQLManager(options);

    Connection connection = null;
    Statement st = null;

    String tableName = getTableName();

    try {
        connection = manager.getConnection();
        connection.setAutoCommit(false);
        st = connection.createStatement();

        // create a target database table.
        st.executeUpdate("DROP TABLE IF EXISTS " + tableName);
        st.executeUpdate("CREATE TABLE " + tableName + " (" + "id INT NOT NULL PRIMARY KEY, "
                + "msg VARCHAR(24) NOT NULL)");
        connection.commit();

        // Write a file containing a record to export.
        Path tablePath = getTablePath();
        Path filePath = new Path(tablePath, "datafile");
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "file:///");

        FileSystem fs = FileSystem.get(conf);
        fs.mkdirs(tablePath);
        OutputStream os = fs.create(filePath);
        BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
        w.write(getRecordLine(0));
        w.write(getRecordLine(1));
        w.write(getRecordLine(2));
        w.close();
        os.close();

        // run the export and verify that the results are good.
        runExport(getArgv(true, 10, 10, "--username", MySQLAuthTest.AUTH_TEST_USER, "--password",
                MySQLAuthTest.AUTH_TEST_PASS, "--connect", MySQLAuthTest.AUTH_CONNECT_STRING));
        verifyExport(3, connection);
    } catch (SQLException sqlE) {
        LOG.error("Encountered SQL Exception: " + sqlE);
        sqlE.printStackTrace();
        fail("SQLException when accessing target table. " + sqlE);
    } finally {
        try {
            if (null != st) {
                st.close();
            }

            if (null != connection) {
                connection.close();
            }
        } catch (SQLException sqlE) {
            LOG.warn("Got SQLException when closing connection: " + sqlE);
        }
    }
}