Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.ExtractorLogHelper.java

public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException {
    String taskInstanceId = AdapterExtractorContext.getInstance().getTaskInstanceId();

    for (ExtractorLog extractorLog : extractorLogs) {
        ExtractorLogLevel level = extractorLog.getLevel();
        if (!TaskInstance.STATE_IMPORTED_ERROR
                .equals(AdapterExtractorContext.getInstance().getReturnStatus())) {
            // ?error??
            if (level == ExtractorLogLevel.ERROR) {
                AdapterExtractorContext.getInstance().setReturnStatus(TaskInstance.STATE_IMPORTED_ERROR);
            } else if (level == ExtractorLogLevel.WARN) {
                AdapterExtractorContext.getInstance().setReturnStatus(TaskInstance.STATE_IMPORTED_WARN);
            }/* ww  w .  j  av a 2s .  co  m*/
        }
        if (extractorLog.getMessage() == null) // 
            continue;

        ps.setString(1, taskInstanceId);
        ps.setString(2, String.valueOf(level.ordinal()));
        ps.setLong(3, extractorLog.getLogTime());
        // ?
        boolean msgTooLength = extractorLog.getMessage().length() > AdapterExtractorContext.getInstance()
                .getMaxLogSize();
        String message = msgTooLength
                ? extractorLog.getMessage().substring(0, AdapterExtractorContext.getInstance().getMaxLogSize())
                : extractorLog.getMessage();
        ps.setString(4, message);
        ps.setString(5, extractorLog.getLogType());

        ps.addBatch();
        ps.clearParameters();
    }

    ps.executeBatch();
    ps.clearBatch();

    return null;
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveNotifications() throws ArchivalDAOException {
    Statement stmt = null;//w  w w  .  j a v  a 2  s  . com
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_NOTIFICATION WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        //            ArchivalDestinationDAOFactory.beginTransaction();
        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_NOTIFICATION_ARCH VALUES(?, ?, ?, ?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("NOTIFICATION_ID"));
            stmt2.setInt(2, rs.getInt("DEVICE_ID"));
            stmt2.setInt(3, rs.getInt("OPERATION_ID"));
            stmt2.setInt(4, rs.getInt("TENANT_ID"));
            stmt2.setString(5, rs.getString("STATUS"));
            stmt2.setString(6, rs.getString("DESCRIPTION"));
            stmt2.setTimestamp(7, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        //            ArchivalDestinationDAOFactory.commitTransaction();
        if (log.isDebugEnabled()) {
            log.debug(count + " [NOTIFICATIONS] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_NOTIFICATION" + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving notifications ", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.owasp.dependencycheck.data.nvdcve.CveDB.java

/**
 * Updates the vulnerability within the database. If the vulnerability does
 * not exist it will be added./*from  w  ww  .j  ava2s.  co m*/
 *
 * @param vuln the vulnerability to add to the database
 * @throws DatabaseException is thrown if the database
 */
public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
    clearCache();
    ResultSet rs = null;
    try {
        int vulnerabilityId = 0;
        final PreparedStatement selectVulnerabilityId = getPreparedStatement(SELECT_VULNERABILITY_ID);
        selectVulnerabilityId.setString(1, vuln.getName());
        rs = selectVulnerabilityId.executeQuery();
        if (rs.next()) {
            vulnerabilityId = rs.getInt(1);
            // first delete any existing vulnerability info. We don't know what was updated. yes, slower but atm easier.
            final PreparedStatement deleteReference = getPreparedStatement(DELETE_REFERENCE);
            deleteReference.setInt(1, vulnerabilityId);
            deleteReference.execute();

            final PreparedStatement deleteSoftware = getPreparedStatement(DELETE_SOFTWARE);
            deleteSoftware.setInt(1, vulnerabilityId);
            deleteSoftware.execute();
        }

        DBUtils.closeResultSet(rs);

        if (vulnerabilityId != 0) {
            if (vuln.getDescription().contains("** REJECT **")) {
                final PreparedStatement deleteVulnerability = getPreparedStatement(DELETE_VULNERABILITY);
                deleteVulnerability.setInt(1, vulnerabilityId);
                deleteVulnerability.executeUpdate();
            } else {
                final PreparedStatement updateVulnerability = getPreparedStatement(UPDATE_VULNERABILITY);
                updateVulnerability.setString(1, vuln.getDescription());
                updateVulnerability.setString(2, vuln.getCwe());
                updateVulnerability.setFloat(3, vuln.getCvssScore());
                updateVulnerability.setString(4, vuln.getCvssAccessVector());
                updateVulnerability.setString(5, vuln.getCvssAccessComplexity());
                updateVulnerability.setString(6, vuln.getCvssAuthentication());
                updateVulnerability.setString(7, vuln.getCvssConfidentialityImpact());
                updateVulnerability.setString(8, vuln.getCvssIntegrityImpact());
                updateVulnerability.setString(9, vuln.getCvssAvailabilityImpact());
                updateVulnerability.setInt(10, vulnerabilityId);
                updateVulnerability.executeUpdate();
            }
        } else {
            final PreparedStatement insertVulnerability = getPreparedStatement(INSERT_VULNERABILITY);
            insertVulnerability.setString(1, vuln.getName());
            insertVulnerability.setString(2, vuln.getDescription());
            insertVulnerability.setString(3, vuln.getCwe());
            insertVulnerability.setFloat(4, vuln.getCvssScore());
            insertVulnerability.setString(5, vuln.getCvssAccessVector());
            insertVulnerability.setString(6, vuln.getCvssAccessComplexity());
            insertVulnerability.setString(7, vuln.getCvssAuthentication());
            insertVulnerability.setString(8, vuln.getCvssConfidentialityImpact());
            insertVulnerability.setString(9, vuln.getCvssIntegrityImpact());
            insertVulnerability.setString(10, vuln.getCvssAvailabilityImpact());
            insertVulnerability.execute();
            try {
                rs = insertVulnerability.getGeneratedKeys();
                rs.next();
                vulnerabilityId = rs.getInt(1);
            } catch (SQLException ex) {
                final String msg = String.format("Unable to retrieve id for new vulnerability for '%s'",
                        vuln.getName());
                throw new DatabaseException(msg, ex);
            } finally {
                DBUtils.closeResultSet(rs);
            }
        }

        PreparedStatement insertReference = getPreparedStatement(INSERT_REFERENCE);
        int countReferences = 0;
        for (Reference r : vuln.getReferences()) {
            insertReference.setInt(1, vulnerabilityId);
            insertReference.setString(2, r.getName());
            insertReference.setString(3, r.getUrl());
            insertReference.setString(4, r.getSource());
            if (isBatchInsertEnabled()) {
                insertReference.addBatch();
                countReferences++;
                if (countReferences % getBatchSize() == 0) {
                    insertReference.executeBatch();
                    insertReference = getPreparedStatement(INSERT_REFERENCE);
                    LOGGER.trace(getLogForBatchInserts(countReferences,
                            "Completed %s batch inserts to references table: %s"));
                    countReferences = 0;
                } else if (countReferences == vuln.getReferences().size()) {
                    if (LOGGER.isTraceEnabled()) {
                        LOGGER.trace(getLogForBatchInserts(countReferences,
                                "Completed %s batch inserts to reference table: %s"));
                    }
                    insertReference.executeBatch();
                    countReferences = 0;
                }
            } else {
                insertReference.execute();
            }
        }

        PreparedStatement insertSoftware = getPreparedStatement(INSERT_SOFTWARE);
        int countSoftware = 0;
        for (VulnerableSoftware vulnerableSoftware : vuln.getVulnerableSoftware()) {
            int cpeProductId = 0;
            final PreparedStatement selectCpeId = getPreparedStatement(SELECT_CPE_ID);
            selectCpeId.setString(1, vulnerableSoftware.getName());
            try {
                rs = selectCpeId.executeQuery();
                if (rs.next()) {
                    cpeProductId = rs.getInt(1);
                }
            } catch (SQLException ex) {
                throw new DatabaseException(
                        "Unable to get primary key for new cpe: " + vulnerableSoftware.getName(), ex);
            } finally {
                DBUtils.closeResultSet(rs);
            }

            if (cpeProductId == 0) {
                final PreparedStatement insertCpe = getPreparedStatement(INSERT_CPE);
                insertCpe.setString(1, vulnerableSoftware.getName());
                insertCpe.setString(2, vulnerableSoftware.getVendor());
                insertCpe.setString(3, vulnerableSoftware.getProduct());
                insertCpe.executeUpdate();
                cpeProductId = DBUtils.getGeneratedKey(insertCpe);
            }
            if (cpeProductId == 0) {
                throw new DatabaseException("Unable to retrieve cpeProductId - no data returned");
            }

            insertSoftware.setInt(1, vulnerabilityId);
            insertSoftware.setInt(2, cpeProductId);

            if (vulnerableSoftware.getPreviousVersion() == null) {
                insertSoftware.setNull(3, java.sql.Types.VARCHAR);
            } else {
                insertSoftware.setString(3, vulnerableSoftware.getPreviousVersion());
            }
            if (isBatchInsertEnabled()) {
                insertSoftware.addBatch();
                countSoftware++;
                if (countSoftware % getBatchSize() == 0) {
                    executeBatch(vuln, vulnerableSoftware, insertSoftware);
                    insertSoftware = getPreparedStatement(INSERT_SOFTWARE);
                    LOGGER.trace(getLogForBatchInserts(countSoftware,
                            "Completed %s batch inserts software table: %s"));
                    countSoftware = 0;
                } else if (countSoftware == vuln.getVulnerableSoftware().size()) {
                    if (LOGGER.isTraceEnabled()) {
                        LOGGER.trace(getLogForBatchInserts(countSoftware,
                                "Completed %s batch inserts software table: %s"));
                        countReferences = 0;
                    }
                    executeBatch(vuln, vulnerableSoftware, insertSoftware);
                }
            } else {
                try {
                    insertSoftware.execute();
                } catch (SQLException ex) {
                    if (ex.getMessage().contains("Duplicate entry")) {
                        final String msg = String.format("Duplicate software key identified in '%s:%s'",
                                vuln.getName(), vuln.getName());
                        LOGGER.info(msg, ex);
                    } else {
                        throw ex;
                    }
                }
            }
        }
    } catch (SQLException ex) {
        final String msg = String.format("Error updating '%s'", vuln.getName());
        LOGGER.debug(msg, ex);
        throw new DatabaseException(msg, ex);
    } finally {
        DBUtils.closeResultSet(rs);
    }
}

From source file:org.wso2.carbon.appmgt.migration.client.MigrationClientImpl.java

private void migrateMobileAppRatings(Map<String, Float> appRating, String tenantDomain)
        throws APPMMigrationException {
    Connection connection = null;
    PreparedStatement statement = null;

    try {/*from w  w  w.j  a v a 2s.  co m*/
        if (log.isDebugEnabled()) {
            log.debug("Executing: " + Constants.INSERT_SOCIAL_CACHE);
        }
        connection = getConnection(Constants.SOCIAL_DB_NAME);
        statement = connection.prepareStatement(Constants.INSERT_SOCIAL_CACHE);
        for (String contextId : appRating.keySet()) {
            statement.setString(1, contextId);
            Float rating = appRating.get(contextId);
            statement.setInt(2, rating.intValue());
            statement.setInt(3, 1);
            statement.setDouble(4, rating.doubleValue());
            statement.setString(5, tenantDomain);
            statement.addBatch();
        }
        statement.executeBatch();
    } catch (SQLException e) {
        handleException("Error occurred while migrating mobile application ratings for tenant " + tenantDomain,
                e);
    } catch (DataSourceException e) {
        handleException("Error occurred while obtaining datasource connection for " + Constants.SOCIAL_DB_NAME
                + " during mobile application ratings migration for tenant " + tenantDomain, e);
    } finally {
        closeConnection(connection);
        if (statement != null) {
            try {
                statement.close();
            } catch (SQLException e) {
                handleException(
                        "Error occurred while closing prepared statement for Mobile app Social Cache update "
                                + "for tenant " + tenantDomain,
                        e);
            }
        }
    }

}

From source file:broadwick.data.readers.DataFileReader.java

/**
 * Perform the insertion into the database.
 * @param connection      the connection to the database.
 * @param tableName       the name of the table into which the data will be put.
 * @param insertString    the command used to insert a row into the database.
 * @param dataFile        the [CSV] file that contained the data.
 * @param dateFormat      the format of the date in the file.
 * @param insertedColInfo a map of column name to column in the data file.
 * @param dateFields      a collection of columns in the csv file that contains date fields.
 * @return the number of rows inserted./*  www.j av a2 s  . c om*/
 */
protected final int insert(final Connection connection, final String tableName, final String insertString,
        final String dataFile, final String dateFormat, final Map<String, Integer> insertedColInfo,
        final Collection<Integer> dateFields) {

    int inserted = 0;
    try {
        // Now do the insertion.
        log.trace("Inserting into {} via {}", tableName, insertString);
        PreparedStatement pstmt = connection.prepareStatement(insertString);
        log.trace("Prepared statement = {}", pstmt.toString());

        try (FileInput instance = new FileInput(dataFile, ",")) {
            final StopWatch sw = new StopWatch();
            sw.start();
            List<String> data = instance.readLine();
            while (data != null && !data.isEmpty()) {
                int parameterIndex = 1;
                for (Map.Entry<String, Integer> entry : insertedColInfo.entrySet()) {
                    if (entry.getValue() == -1) {
                        pstmt.setObject(parameterIndex, null);
                    } else {
                        final String value = data.get(entry.getValue() - 1);
                        if (dateFields.contains(entry.getValue())) {
                            int dateField = Integer.MAX_VALUE;
                            if (value != null && !value.isEmpty()) {
                                dateField = BroadwickConstants.getDate(value, dateFormat);
                            }
                            pstmt.setObject(parameterIndex, dateField);
                        } else {
                            pstmt.setObject(parameterIndex, value);
                        }
                    }
                    parameterIndex++;
                }
                pstmt.addBatch();
                try {
                    pstmt.executeUpdate();
                    inserted++;
                } catch (SQLException ex) {
                    if ("23505".equals(ex.getSQLState())) {
                        //Ignore found duplicate from database view
                        continue;
                    } else {
                        log.warn("Duplicate data found for {}: continuing despite errors: {}", data.get(0),
                                ex.getLocalizedMessage());
                        log.trace("{}", Throwables.getStackTraceAsString(ex));
                        throw ex;
                    }
                }
                if (inserted % 250000 == 0) {
                    log.trace("Inserted {} rows in {}", inserted, sw.toString());
                    connection.commit();
                    pstmt.close();
                    pstmt = connection.prepareStatement(insertString);
                }

                data = instance.readLine();
            }
            connection.commit();

        } catch (IOException ex) {
            log.error("IO error : {}", ex.getLocalizedMessage());
            log.trace("{}", Throwables.getStackTraceAsString(ex));
        } catch (SQLException ex) {
            log.error("SQL Error : {}", ex.getLocalizedMessage());
            log.trace("{}", Throwables.getStackTraceAsString(ex));
            throw ex;
        } finally {
            pstmt.close();
        }
    } catch (SQLException ex) {
        log.error("{}", ex.getLocalizedMessage());
        log.trace("{}", Throwables.getStackTraceAsString(ex));
        throw new BroadwickException(ex);
    }

    return inserted;
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveEnrolmentMappings() throws ArchivalDAOException {
    Statement stmt = null;//from   www  .ja v a  2  s .c o  m
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_ENROLMENT_OP_MAPPING WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_ENROLMENT_OP_MAPPING_ARCH VALUES(?, ?, ?, ?, ?, ?, ?,?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("ID"));
            stmt2.setInt(2, rs.getInt("ENROLMENT_ID"));
            stmt2.setInt(3, rs.getInt("OPERATION_ID"));
            stmt2.setString(4, rs.getString("STATUS"));
            stmt2.setString(5, rs.getString("PUSH_NOTIFICATION_STATUS"));
            stmt2.setInt(6, rs.getInt("CREATED_TIMESTAMP"));
            stmt2.setInt(7, rs.getInt("UPDATED_TIMESTAMP"));
            stmt2.setTimestamp(8, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
                if (log.isDebugEnabled()) {
                    log.debug("Executing batch " + count);
                }
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(
                    count + " [ENROLMENT_OP_MAPPING] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_ENROLMENT_OP_MAPPING WHERE OPERATION_ID IN ("
                + "SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving enrolment mappings", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

public void markFilesReadyToProcess(Collection<ArcProcessFile> filesReadyToProcess) throws DatabaseException {
    synchronized (DatabaseResourceManager.DB_LOCK) {
        PooledDbConnection conn = null;/*from www.ja v a  2s. c  o m*/
        try {
            conn = connPool.getConnection();
            conn.setAutoCommit(false);
            PreparedStatement stmt = conn.prepareStatement(MARK_FILES_READY_TO_PROCESS_STMT_NAME,
                    markFilesReadyToProcessSql);
            long sizeToAdd = 0;
            for (ArcProcessFile file : filesReadyToProcess) {
                long fileSize = file.getSourceFile().getSize();
                sizeToAdd += fileSize;
                stmt.clearParameters();
                stmt.setLong(1, fileSize);
                stmt.setLong(2, file.getDatabaseRecordId());
                stmt.addBatch();
            }
            stmt.executeBatch();

            // now update overall job stats to reflect these changes
            ManagedJobsSchema.getInstance().updateTotalFilesStats(conn, jobId, filesReadyToProcess.size(),
                    sizeToAdd);

            conn.commit();
        } catch (Exception e) {
            rollback(conn);
            throw new DatabaseException(
                    DBUtils.getErrorMessage("An error occurred marking files ready to process", e), e);
        } finally {
            connPool.returnConnection(conn);
        }
    }
}

From source file:org.wso2.carbon.apimgt.migration.client.MigrateFrom110to200.java

private void updateIdnConsumerApps(PreparedStatement consumerAppsInsert,
        ConsumerAppsTableDTO consumerAppsTableDTO) throws SQLException {
    consumerAppsInsert.setString(1, consumerAppsTableDTO.getConsumerKey().getDecryptedConsumerKey());
    consumerAppsInsert.setString(2, consumerAppsTableDTO.getConsumerSecret());
    consumerAppsInsert.setString(3, consumerAppsTableDTO.getUsername());
    consumerAppsInsert.setInt(4, consumerAppsTableDTO.getTenantID());
    consumerAppsInsert.setString(5, consumerAppsTableDTO.getAppName());
    consumerAppsInsert.setString(6, consumerAppsTableDTO.getOauthVersion());
    consumerAppsInsert.setString(7, consumerAppsTableDTO.getCallbackURL());
    consumerAppsInsert.setString(8, consumerAppsTableDTO.getGrantTypes());
    consumerAppsInsert.addBatch();
}

From source file:datawarehouse.CSVLoader.java

/**
 * Parse CSV file using OpenCSV library and load in given database table.
 *
 * @param csvFile Input CSV file/*w ww .j a v a  2s. c o  m*/
 * @param tableName Database table name to import data
 * @param truncateBeforeLoad Truncate the table before inserting new
 * records.
 * @throws Exception
 */
public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad) throws Exception {

    CSVReader csvReader = null;
    if (null == this.connection) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = new CSVReader(new FileReader(csvFile), this.seprator);

    } catch (Exception e) {
        e.printStackTrace();
        throw new Exception("Error occured while executing file. " + e.getMessage());
    }

    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    System.out.println("Query: " + query);

    String[] nextLine;
    Connection con = null;
    PreparedStatement ps = null;
    try {
        con = this.connection;
        con.setAutoCommit(false);
        ps = con.prepareStatement(query);

        if (truncateBeforeLoad) {
            //delete data from table before loading csv
            con.createStatement().execute("DELETE FROM " + tableName);
        }

        final int batchSize = 1000;
        int count = 0;
        while ((nextLine = csvReader.readNext()) != null) {
            if (null != nextLine) {
                int index = 1;
                for (String string : nextLine) {
                    //System.out.print(string + ": ");
                    try {
                        DateFormat format = new SimpleDateFormat("dd.mm.yyyy");
                        Date date = format.parse(string);
                        ps.setDate(index++, new java.sql.Date(date.getTime()));
                        //System.out.println("date");
                    } catch (ParseException | SQLException e) {
                        try {
                            Double income = parseDouble(string.replace(",", "."));
                            ps.setDouble(index++, income);
                            //System.out.println("double");
                        } catch (NumberFormatException | SQLException err) {
                            ps.setString(index++, string);
                            //System.out.println("string");
                        }
                    }
                }
                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
            }
        }
        ps.executeBatch(); // insert remaining records
        con.commit();
    } catch (Exception e) {
        con.rollback();
        e.printStackTrace();
        throw new Exception("Error occured while loading data from file to database." + e.getMessage());
    } finally {
        if (null != ps) {
            ps.close();
        }
        if (null != con) {
            con.close();
        }

        csvReader.close();
    }
}

From source file:HSqlManager.java

private static void commonInitialize(int bps, Connection connection) throws SQLException, IOException {
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    INSTANCE.parseAllPhages(bps);/*from ww  w  .ja va 2  s. c  om*/
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers"
            + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.parallelStream().map(x -> x[0]).collect(Collectors.toSet()).parallelStream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).forEach(z -> {
            try {
                List<String> clustphages = phages.stream().filter(a -> a[0].equals(x) && a[1].equals(z))
                        .map(a -> a[2]).collect(Collectors.toList());
                Set<String> primers = Collections.synchronizedSet(CSV
                        .readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv"));
                clustphages.remove(0);
                clustphages.parallelStream().forEach(phage -> {
                    primers.retainAll(
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv"));
                });
                int i = 0;
                for (CharSequence a : primers) {
                    try {
                        //finish update
                        st.setInt(1, bps);
                        st.setString(2, a.toString());
                        st.setString(3, x);
                        st.setString(4, z);
                        st.addBatch();
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    i++;
                    if (i == 1000) {
                        i = 0;
                        st.executeBatch();
                        db.commit();
                    }
                }
                if (i > 0) {
                    st.executeBatch();
                    db.commit();
                }
            } catch (SQLException e) {
                e.printStackTrace();
                System.out.println("Error occurred at " + x + " " + z);
            }
        });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Common Updated");
}