Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:las.DBConnector.java

/**
 * Load CSV test data into SQL Table/*w  w  w  . j a  v  a2 s. co m*/
 *
 * example for clearFirst: boolean check =
 * DBConnector.checkDataExistedInTable("MEMBERS");
 *
 * if (check) { DBConnector.loadCSVIntoTable("src/resources/members.csv",
 * "MEMBERS", true); System.out.println("Test data inserted into MEMBERS
 * table"); }
 *
 * ignore createNewReader, since it uses for loadCSVIntoTable, don't modify
 * it
 *
 * Getter and Setter provided for Separator to set your own separator inside
 * your CSV File
 *
 * @param csvFile : src/resources/xxx.csv (put your csv file under this
 * path)
 * @param tableName: TABLENAME (All in capital letters)
 * @param clearFirst true = if data not existed in SQL Table, write test
 * data inside false = if data exisited in SQL Table, don't write again.
 * @throws java.lang.Exception
 */
public static void loadCSVIntoTable(String csvFile, String tableName, boolean clearFirst) throws Exception {

    CSVReader csvReader = null;
    if (null == DBConnector.conn) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = DBConnector.getInstance().createNewReader(csvFile);

    } catch (ClassNotFoundException | SQLException | FileNotFoundException e) {
        e.printStackTrace();
        throw new Exception("Error occured while executing file. " + e.getMessage());
    }

    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    String[] nextLine;
    PreparedStatement ps = null;
    try {
        conn.setAutoCommit(false);
        ps = conn.prepareStatement(query);

        if (clearFirst) {
            //delete data from table before loading csv
            conn.createStatement().execute("DELETE FROM " + tableName);
        }

        final int batchSize = 1000;
        int count = 0;
        Date date = null;
        while ((nextLine = csvReader.readNext()) != null) {

            if (null != nextLine) {
                int index = 1;
                for (String string : nextLine) {
                    date = DateUtil.convertToDate(string);
                    if (null != date) {
                        ps.setDate(index++, new java.sql.Date(date.getTime()));
                    } else {
                        ps.setString(index++, string);
                    }
                }
                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
            }
        }
        ps.executeBatch(); // insert remaining records
        conn.commit();
    } catch (SQLException e) {
        conn.rollback();
        e.printStackTrace();
        throw new Exception("Error occured while loading data from file to database." + e.getMessage());
    } finally {
        if (null != ps) {
            ps.close();
        }

        csvReader.close();
    }
}

From source file:biblivre3.cataloging.bibliographic.IndexDAO.java

public final boolean insert(IndexTable table, List<IndexDTO> indexList) {
    if (indexList == null && indexList.isEmpty()) {
        return false;
    }//from ww w .  j  av a2  s.c o  m

    Connection con = null;
    try {
        con = getDataSource().getConnection();
        StringBuilder sql = new StringBuilder();
        sql.append(" INSERT INTO ").append(table.getTableName());
        sql.append(" (index_word, record_serial) ");
        sql.append(" VALUES (?, ?);");

        PreparedStatement pst = con.prepareStatement(sql.toString());

        for (IndexDTO index : indexList) {
            pst.setString(1, StringUtils.substring(index.getWord(), 0, 511));
            pst.setInt(2, index.getRecordSerial());
            pst.addBatch();
        }

        pst.executeBatch();
    } catch (BatchUpdateException bue) {
        log.error(bue.getNextException(), bue);
        throw new ExceptionUser("ERROR_BIBLIO_DAO_EXCEPTION");
    } catch (Exception e) {
        log.error(e.getMessage(), e);
        throw new ExceptionUser("ERROR_BIBLIO_DAO_EXCEPTION");
    } finally {
        closeConnection(con);
    }
    return true;
}

From source file:com.useekm.indexing.postgis.IndexedStatement.java

public static void addNewBatch(PreparedStatement stat, IndexedStatement indexedStatement) throws SQLException {
    int idx = 1;/*  w  w  w.j a v a 2s.c om*/
    if (indexedStatement.objectDate != null)
        stat.setDate(idx++, new java.sql.Date(indexedStatement.objectDate.getTime()));
    else
        stat.setDate(idx++, null);
    stat.setString(idx++, indexedStatement.objectLanguage);
    stat.setObject(idx++, indexedStatement.objectSpatial);
    stat.setString(idx++, indexedStatement.objectString);
    stat.setString(idx++, indexedStatement.objectTsVectorConfig);
    stat.setString(idx++, indexedStatement.objectType);
    stat.setBoolean(idx++, indexedStatement.objectUri);
    stat.setString(idx++, indexedStatement.predicate);
    stat.setString(idx++, indexedStatement.subject);
    stat.addBatch();
}

From source file:org.wso2.carbon.apimgt.migration.util.StatDBUtil.java

private static void updateResponseSummaryTable() {
    Connection connection = null;
    Statement statement = null;//from w  ww.ja va 2  s.co m
    PreparedStatement preparedStatement = null;
    ResultSet resultSet = null;

    try {
        String sql = "SELECT CONTEXT, VERSION, API_VERSION FROM API_REQUEST_SUMMARY GROUP BY CONTEXT, VERSION, API_VERSION";

        connection = dataSource.getConnection();
        statement = connection.createStatement();
        connection.setAutoCommit(false);
        statement.setFetchSize(50);
        resultSet = statement.executeQuery(sql);

        preparedStatement = connection
                .prepareStatement("UPDATE API_RESPONSE_SUMMARY SET CONTEXT = concat(?, '/', ?) "
                        + "WHERE CONTEXT = ? AND API_VERSION = ?");
        while (resultSet.next()) {
            final String context = resultSet.getString("CONTEXT");
            final String version = resultSet.getString("VERSION");

            if (!context.endsWith('/' + version)) {
                preparedStatement.setString(1, context);
                preparedStatement.setString(2, version);
                preparedStatement.setString(3, context);
                preparedStatement.setString(4, resultSet.getString("API_VERSION"));
                preparedStatement.addBatch();
            }
        }
        preparedStatement.executeBatch();
        connection.commit();

    } catch (SQLException e) {
        log.error("SQLException when updating API_RESPONSE_SUMMARY table", e);
    } finally {
        try {
            if (preparedStatement != null)
                preparedStatement.close();
            if (statement != null)
                statement.close();
            if (resultSet != null)
                resultSet.close();
            if (connection != null)
                connection.close();
        } catch (SQLException e) {
            log.error("SQLException when closing resource", e);
        }
    }
}

From source file:de.langmi.spring.batch.examples.readers.jdbc.JdbcPagingItemReaderTests.java

/**
 * Setup Datasource and create table for test.
 *
 * @throws Exception /*from  ww w . j a  v a2s  .c  o  m*/
 */
@Before
public void setUp() throws Exception {
    // DataSource Setup, apache commons 
    dataSource = new BasicDataSource();
    dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
    dataSource.setUrl("jdbc:hsqldb:mem:testdb");
    dataSource.setUsername("sa");
    dataSource.setPassword("");

    // drop table if exists
    Connection conn = dataSource.getConnection();
    Statement st = conn.createStatement();
    st.execute(DROP_TEST_TABLE);
    conn.commit();
    st.close();
    conn.close();

    // create table
    conn = dataSource.getConnection();
    st = conn.createStatement();
    st.execute(CREATE_TEST_TABLE);
    conn.commit();
    st.close();
    conn.close();

    // fill with values
    conn = dataSource.getConnection();
    // prevent auto commit for batching
    conn.setAutoCommit(false);
    PreparedStatement ps = conn.prepareStatement(INSERT);
    // fill with values
    for (int i = 0; i < EXPECTED_COUNT; i++) {
        ps.setString(1, String.valueOf(i));
        ps.addBatch();
    }
    ps.executeBatch();
    conn.commit();
    ps.close();
    conn.close();
}

From source file:henu.dao.impl.CaclDaoImpl.java

@Override
public boolean recordCaclUser(long cid, long users[]) {

    int result = 0;
    try {/*from ww  w. j av  a2s.c  o m*/
        String sql = "insert into uc (cid, uid) values(?,?)";
        SqlDB.getConnection().setAutoCommit(false);
        PreparedStatement ps = SqlDB.executePreparedStatement(sql);

        for (int i = 0; i < users.length; i++) {
            ps.setLong(1, cid);
            ps.setLong(2, users[i]);
            ps.addBatch();
        }

        result = ps.executeBatch().length;
        ps.clearBatch();
        SqlDB.close();
    } catch (SQLException ex) {

    }

    return result == users.length;
}

From source file:de.langmi.spring.batch.examples.readers.support.CompositeCursorItemReaderTest.java

/**
 * Create a table and fill with some test data.
 *
 * @param dataSource// www .  ja  v  a  2s  .  c  o m
 * @throws Exception 
 */
private void createTableWithTestData(final DataSource dataSource) throws Exception {
    // create table
    Connection conn = dataSource.getConnection();
    Statement st = conn.createStatement();
    st.execute(CREATE_TEST_TABLE);
    conn.commit();
    st.close();
    conn.close();

    // fill with values
    conn = dataSource.getConnection();
    // prevent auto commit for batching
    conn.setAutoCommit(false);
    PreparedStatement ps = conn.prepareStatement(INSERT);
    // fill with values
    for (int i = 0; i < EXPECTED_COUNT; i++) {
        ps.setString(1, String.valueOf(i));
        ps.addBatch();
    }
    ps.executeBatch();
    conn.commit();
    ps.close();
    conn.close();
}

From source file:de.is24.infrastructure.gridfs.http.metadata.generation.PrimaryDbGenerator.java

private void writeDependency(PreparedStatement ps, int pkgKey, List<YumPackageFormatEntry> dependencies)
        throws SQLException {
    for (YumPackageFormatEntry dependency : dependencies) {
        int c = fillStatementForYumPackageFormatEntry(ps, dependency, 1);
        ps.setInt(c, pkgKey);/*from  www  . ja  va2  s .c  o m*/
        ps.addBatch();
    }
    if (!dependencies.isEmpty()) {
        ps.executeBatch();
    }
}

From source file:org.schedoscope.metascope.tasks.repository.mysql.impl.TableDependencyEntityMySQLRepository.java

@Override
public void insertOrUpdate(Connection connection, List<TableDependencyEntity> tableDependencies) {
    String insertDependencySql = "insert into table_dependency_entity ("
            + JDBCUtil.getDatabaseColumnsForClass(TableDependencyEntity.class) + ") values ("
            + JDBCUtil.getValuesCountForClass(TableDependencyEntity.class) + ") " + "on duplicate key update "
            + MySQLUtil.getOnDuplicateKeyString(TableDependencyEntity.class);
    PreparedStatement stmt = null;
    try {// ww  w.  java2 s  .c om
        int batch = 0;
        connection.setAutoCommit(false);
        stmt = connection.prepareStatement(insertDependencySql);
        for (TableDependencyEntity dependency : tableDependencies) {
            stmt.setString(1, dependency.getFqdn());
            stmt.setString(2, dependency.getDependencyFqdn());
            stmt.addBatch();
            batch++;
            if (batch % 1024 == 0) {
                stmt.executeBatch();
            }
        }
        stmt.executeBatch();
        connection.commit();
        connection.setAutoCommit(true);
    } catch (SQLException e) {
        LOG.error("Could not save table dependency", e);
    } finally {
        DbUtils.closeQuietly(stmt);
    }
}

From source file:mayoapp.migrations.V0300_1005__extract_image_metadata_retroactively.java

@Override
public void migrate(Connection connection) throws Exception {
    ImageProcessor imageProcessor = new DefaultImageProcessor();
    ImageDimensionsMetadataExtractor extractor = new ImageDimensionsMetadataExtractor(imageProcessor);

    StatementContext context = new StatementContextStub();
    connection.setAutoCommit(false);// ww  w. ja  v a2s.  c  o  m
    Statement countStatement = connection.createStatement();

    Integer count = 0;
    ResultSet res = countStatement
            .executeQuery("SELECT COUNT(*) FROM attachment JOIN entity on attachment.entity_id = entity.id"); //WHERE parent_id is not null
    while (res.next()) {
        count = res.getInt(1);
    }
    countStatement.close();

    Integer i = 0;

    Map<UUID, Object> toSave = new HashMap<>();

    for (int offset = 0; offset < count; offset += 50) {
        Statement queryStatement = connection.createStatement();
        ResultSet data = queryStatement.executeQuery(
                "SELECT * from attachment JOIN entity on attachment.entity_id = entity.id LIMIT 50 OFFSET "
                        + offset);

        while (data.next()) {
            LoadedAttachmentMapper mapper = new LoadedAttachmentMapper();
            LoadedAttachment attachment = mapper.map(0, data, context);

            logger.info("Processing attachment " + i + " : " + attachment.getFilename());

            Optional<Map<String, Object>> metadata = extractor.extractMetadata(attachment);

            if (metadata.isPresent()) {
                Map<String, Map<String, Object>> meta = new HashMap<>(attachment.getMetadata());
                meta.put("imageDimensions", metadata.get());
                toSave.put(attachment.getId(), meta);
            }

            i++;
        }

        queryStatement.close();
    }

    ObjectMapper mapper = new ObjectMapper();
    PreparedStatement statement = connection
            .prepareStatement("UPDATE attachment SET metadata = CAST (? AS json) WHERE entity_id =?");

    for (UUID attachment : toSave.keySet()) {
        statement.setObject(2, new PG_UUID(attachment));
        statement.setObject(1, mapper.writeValueAsString(toSave.get(attachment)));
        statement.addBatch();
        logger.info("Adding image to batch " + i + " : " + attachment.toString());
    }

    statement.executeBatch();
}