Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:com.oltpbenchmark.benchmarks.seats.SEATSLoader.java

/**
 * /*from w ww .j a  v a  2s. c om*/
 * @param catalog_tbl
 */
public void loadTable(Table catalog_tbl, Iterable<Object[]> iterable, int batch_size) {
    // Special Case: Airport Locations
    final boolean is_airport = catalog_tbl.getName().equals(SEATSConstants.TABLENAME_AIRPORT);

    if (LOG.isDebugEnabled())
        LOG.debug(String.format("Generating new records for table %s [batchSize=%d]", catalog_tbl.getName(),
                batch_size));
    final List<Column> columns = catalog_tbl.getColumns();

    // Check whether we have any special mappings that we need to maintain
    Map<Integer, Integer> code_2_id = new HashMap<Integer, Integer>();
    Map<Integer, Map<String, Long>> mapping_columns = new HashMap<Integer, Map<String, Long>>();
    for (int col_code_idx = 0, cnt = columns.size(); col_code_idx < cnt; col_code_idx++) {
        Column catalog_col = columns.get(col_code_idx);
        String col_name = catalog_col.getName();

        // Code Column -> Id Column Mapping
        // Check to see whether this table has columns that we need to map their
        // code values to tuple ids
        String col_id_name = this.profile.code_columns.get(col_name);
        if (col_id_name != null) {
            Column catalog_id_col = catalog_tbl.getColumnByName(col_id_name);
            assert (catalog_id_col != null) : "The id column " + catalog_tbl.getName() + "." + col_id_name
                    + " is missing";
            int col_id_idx = catalog_tbl.getColumnIndex(catalog_id_col);
            code_2_id.put(col_code_idx, col_id_idx);
        }

        // Foreign Key Column to Code->Id Mapping
        // If this columns references a foreign key that is used in the Code->Id mapping
        // that we generating above, then we need to know when we should change the 
        // column value from a code to the id stored in our lookup table
        if (this.profile.fkey_value_xref.containsKey(col_name)) {
            String col_fkey_name = this.profile.fkey_value_xref.get(col_name);
            mapping_columns.put(col_code_idx, this.profile.code_id_xref.get(col_fkey_name));
        }
    } // FOR

    int row_idx = 0;
    int row_batch = 0;

    try {
        String insert_sql = SQLUtil.getInsertSQL(catalog_tbl);
        PreparedStatement insert_stmt = this.conn.prepareStatement(insert_sql);
        int sqlTypes[] = catalog_tbl.getColumnTypes();

        for (Object tuple[] : iterable) {
            assert (tuple[0] != null) : "The primary key for " + catalog_tbl.getName() + " is null";

            // AIRPORT 
            if (is_airport) {
                // Skip any airport that does not have flights
                int col_code_idx = catalog_tbl.getColumnByName("AP_CODE").getIndex();
                if (profile.hasFlights((String) tuple[col_code_idx]) == false) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(String.format("Skipping AIRPORT '%s' because it does not have any flights",
                                tuple[col_code_idx]));
                    continue;
                }

                // Update the row # so that it matches what we're actually loading
                int col_id_idx = catalog_tbl.getColumnByName("AP_ID").getIndex();
                tuple[col_id_idx] = (long) (row_idx + 1);

                // Store Locations
                int col_lat_idx = catalog_tbl.getColumnByName("AP_LATITUDE").getIndex();
                int col_lon_idx = catalog_tbl.getColumnByName("AP_LONGITUDE").getIndex();
                Pair<Double, Double> coords = Pair.of((Double) tuple[col_lat_idx], (Double) tuple[col_lon_idx]);
                if (coords.first == null || coords.second == null) {
                    LOG.error(Arrays.toString(tuple));
                }
                assert (coords.first != null) : String.format("Unexpected null latitude for airport '%s' [%d]",
                        tuple[col_code_idx], col_lat_idx);
                assert (coords.second != null) : String.format(
                        "Unexpected null longitude for airport '%s' [%d]", tuple[col_code_idx], col_lon_idx);
                this.airport_locations.put(tuple[col_code_idx].toString(), coords);
                if (LOG.isTraceEnabled())
                    LOG.trace(String.format("Storing location for '%s': %s", tuple[col_code_idx], coords));
            }

            // Code Column -> Id Column
            for (int col_code_idx : code_2_id.keySet()) {
                assert (tuple[col_code_idx] != null) : String.format(
                        "The value of the code column at '%d' is null for %s\n%s", col_code_idx,
                        catalog_tbl.getName(), Arrays.toString(tuple));
                String code = tuple[col_code_idx].toString().trim();
                if (code.length() > 0) {
                    Column from_column = columns.get(col_code_idx);
                    assert (from_column != null);
                    Column to_column = columns.get(code_2_id.get(col_code_idx));
                    assert (to_column != null) : String.format("Invalid column %s.%s", catalog_tbl.getName(),
                            code_2_id.get(col_code_idx));
                    long id = (Long) tuple[code_2_id.get(col_code_idx)];
                    if (LOG.isTraceEnabled())
                        LOG.trace(String.format("Mapping %s '%s' -> %s '%d'", from_column.fullName(), code,
                                to_column.fullName(), id));
                    this.profile.code_id_xref.get(to_column.getName()).put(code, id);
                }
            } // FOR

            // Foreign Key Code -> Foreign Key Id
            for (int col_code_idx : mapping_columns.keySet()) {
                Column catalog_col = columns.get(col_code_idx);
                assert (tuple[col_code_idx] != null || catalog_col.isNullable()) : String.format(
                        "The code %s column at '%d' is null for %s id=%s\n%s", catalog_col.fullName(),
                        col_code_idx, catalog_tbl.getName(), tuple[0], Arrays.toString(tuple));
                if (tuple[col_code_idx] != null) {
                    String code = tuple[col_code_idx].toString();
                    tuple[col_code_idx] = mapping_columns.get(col_code_idx).get(code);
                    if (LOG.isTraceEnabled())
                        LOG.trace(String.format("Mapped %s '%s' -> %s '%s'", catalog_col.fullName(), code,
                                catalog_col.getForeignKey().fullName(), tuple[col_code_idx]));
                }
            } // FOR

            for (int i = 0; i < tuple.length; i++) {
                try {
                    if (tuple[i] != null) {
                        insert_stmt.setObject(i + 1, tuple[i]);
                    } else {
                        insert_stmt.setNull(i + 1, sqlTypes[i]);
                    }
                } catch (SQLDataException ex) {
                    LOG.error("INVALID " + catalog_tbl.getName() + " TUPLE: " + Arrays.toString(tuple));
                    throw new RuntimeException("Failed to set value for " + catalog_tbl.getColumn(i).fullName(),
                            ex);
                }
            } // FOR
            insert_stmt.addBatch();
            row_idx++;

            if (++row_batch >= batch_size) {
                LOG.debug(String.format("Loading %s batch [total=%d]", catalog_tbl.getName(), row_idx));
                insert_stmt.executeBatch();
                conn.commit();
                insert_stmt.clearBatch();
                row_batch = 0;
            }

        } // FOR

        if (row_batch > 0) {
            insert_stmt.executeBatch();
            conn.commit();
        }
        insert_stmt.close();
    } catch (Exception ex) {
        throw new RuntimeException("Failed to load table " + catalog_tbl.getName(), ex);
    }

    if (is_airport)
        assert (this.profile.getAirportCount() == row_idx) : String.format("%d != %d",
                profile.getAirportCount(), row_idx);

    // Record the number of tuples that we loaded for this table in the profile
    if (catalog_tbl.getName().equals(SEATSConstants.TABLENAME_RESERVATION)) {
        this.profile.num_reservations = row_idx + 1;
    }

    LOG.info(String.format("Finished loading all %d tuples for %s [%d / %d]", row_idx, catalog_tbl.getName(),
            this.finished.incrementAndGet(), this.getCatalog().getTableCount()));
    return;
}

From source file:com.flexive.ejb.beans.structure.TypeEngineBean.java

/**
 * Update an existing type/*  ww w. j  a  v a 2  s  . c  om*/
 *
 * @param type the type to update
 * @return id of the type
 * @throws FxApplicationException on errors
 */
private long update(FxTypeEdit type) throws FxApplicationException {
    if (!type.isChanged())
        return type.getId();

    final UserTicket ticket = FxContext.getUserTicket();
    FxPermissionUtils.checkRole(ticket, Role.StructureManagement);
    final FxEnvironment environment = CacheAdmin.getEnvironment();

    if (StringUtils.isEmpty(type.getName()))
        throw new FxInvalidParameterException("NAME", "ex.structure.update.nameMissing");

    //security checks start
    if (!ticket.mayEditACL(type.getACL().getId(), 0))
        throw new FxNoAccessException("ex.acl.noAccess.edit", type.getACL().getName());
    //security checks end

    boolean needReload = false; //full reload only needed if assignments have changed
    Connection con = null;
    PreparedStatement ps = null;
    FxType orgType = environment.getType(type.getId());

    StringBuilder sql = new StringBuilder(500);

    try {
        con = Database.getDbConnection();
        long instanceCount = -1; //number of instances
        //start name change
        if (!orgType.getName().equals(type.getName())) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET NAME=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setString(1, type.getName());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            //update all xpaths affected
            ps.close();
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_ASSIGNMENTS)
                    .append(" SET XPATH=REPLACE(XPATH, ?, ?) WHERE TYPEDEF=? AND ")
                    .append(StorageManager.getRegExpLikeOperator("XPATH", "?"));
            ps = con.prepareStatement(sql.toString());
            ps.setString(1, orgType.getName() + "/");
            ps.setString(2, type.getName() + "/");
            ps.setLong(3, type.getId());
            ps.setString(4, "^" + orgType.getName() + "/");
            int changed = ps.executeUpdate();
            if (changed > 0)
                needReload = true;
            htracker.track(orgType, "history.type.update.name", orgType.getName(), type.getName(), type.getId(),
                    changed);
        }
        //end name change

        //start description change
        if (!orgType.getLabel().equals(type.getLabel())) {
            Database.storeFxString(type.getLabel(), con, TBL_STRUCT_TYPES, "DESCRIPTION", "ID", type.getId());
            htracker.track(orgType, "history.type.update.description", orgType.getLabel(), type.getLabel());
        }
        //end description change

        //start type mode changes
        if (type.getMode().getId() != orgType.getMode().getId()) {
            if (instanceCount < 0)
                instanceCount = getInstanceCount(con, type.getId());
            //allow relation => content (removing all relation specific entries) but content => relation requires 0 instances!
            if ((type.getMode() == TypeMode.Relation && orgType.getMode() == TypeMode.Content
                    && instanceCount > 0) || orgType.getMode() == TypeMode.Preload
                    || type.getMode() == TypeMode.Preload)
                throw new FxUpdateException("ex.structure.type.typeMode.notUpgradeable", orgType.getMode(),
                        type.getMode(), orgType.getName());
            if (type.getMode() == TypeMode.Content) {
                if (type.getRelations().size() > 0) {
                    //TODO: remove relation mappings
                    throw new FxUpdateException("ex.notImplemented", "Remove all relation mappings for type");
                }
                if (instanceCount > 0) {
                    //TODO: remove all relation specific entries for existing contents
                    throw new FxUpdateException("ex.notImplemented",
                            "Remove all relation specific entries for existing contents");
                }
            }
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET TYPE_MODE=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getMode().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.typeMode", orgType.getMode(), type.getMode());
        }
        //end type mode changes

        //start relation changes
        if (type.getAddedRelations().size() > 0) {
            sql.setLength(0);
            sql.append("INSERT INTO ").append(TBL_STRUCT_TYPERELATIONS)
                    .append(" (TYPEDEF,TYPESRC,TYPEDST,MAXSRC,MAXDST)VALUES(?,?,?,?,?)");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getId());
            for (FxTypeRelation rel : type.getAddedRelations()) {
                if (rel.getSource().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getSource().getName());
                if (rel.getDestination().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getDestination().getName());
                ps.setLong(2, rel.getSource().getId());
                ps.setLong(3, rel.getDestination().getId());
                ps.setLong(4, rel.getMaxSource());
                ps.setLong(5, rel.getMaxDestination());
                ps.addBatch();
                htracker.track(type, "history.type.update.relation.add", type.getName(),
                        rel.getSource().getName(), rel.getMaxSource(), rel.getDestination().getName(),
                        rel.getMaxDestination());
            }
            ps.executeBatch();
        }
        if (type.getUpdatedRelations().size() > 0) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPERELATIONS).
            //                  1        2               3             4             5
                    append(" SET MAXSRC=?,MAXDST=? WHERE TYPEDEF=? AND TYPESRC=? AND TYPEDST=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(3, type.getId());
            for (FxTypeRelation rel : type.getUpdatedRelations()) {
                if (rel.getSource().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getSource().getName());
                if (rel.getDestination().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getDestination().getName());
                //TODO: check if maxSource/maxDestination is not violated if > 0
                ps.setLong(4, rel.getSource().getId());
                ps.setLong(5, rel.getDestination().getId());
                ps.setLong(1, rel.getMaxSource());
                ps.setLong(2, rel.getMaxDestination());
                ps.addBatch();
                htracker.track(type, "history.type.update.relation.update", type.getName(),
                        rel.getSource().getName(), rel.getMaxSource(), rel.getDestination().getName(),
                        rel.getMaxDestination());
            }
            ps.executeBatch();
        }
        if (type.getRemovedRelations().size() > 0) {
            sql.setLength(0);
            sql.append("DELETE FROM ").append(TBL_STRUCT_TYPERELATIONS).
            //                     1            2              3
                    append(" WHERE TYPEDEF=? AND TYPESRC=? AND TYPEDST=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getId());
            for (FxTypeRelation rel : type.getRemovedRelations()) {
                if (rel.getSource().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getSource().getName());
                if (rel.getDestination().isRelation())
                    throw new FxInvalidParameterException("ex.structure.type.relation.wrongTarget",
                            type.getName(), rel.getDestination().getName());
                int[] rels = getRelationCount(con, type.getId(), rel.getSource().getId(),
                        rel.getDestination().getId());
                if (!type.isRemoveInstancesWithRelationTypes() && rels[0] > 0)
                    throw new FxRemoveException("ex.structure.type.relation.relationsExist", type.getName(),
                            rel.getSource().getName(), rel.getDestination().getName(), rels[0]);
                else if (type.isRemoveInstancesWithRelationTypes() && rels[0] > 0) {
                    removeRelationEntries(con, type.getId(), rel.getSource().getId(),
                            rel.getDestination().getId());
                }
                ps.setLong(2, rel.getSource().getId());
                ps.setLong(3, rel.getDestination().getId());
                ps.addBatch();
                htracker.track(type, "history.type.update.relation.remove", type.getName(),
                        rel.getSource().getName(), rel.getMaxSource(), rel.getDestination().getName(),
                        rel.getMaxDestination());
            }
            ps.executeBatch();
        }
        //end relation changes

        //start ACL changes
        if (!type.getACL().equals(orgType.getACL())) {
            if (type.getACL().getCategory() != ACLCategory.STRUCTURE)
                throw new FxInvalidParameterException("ACL", "ex.acl.category.invalid",
                        type.getACL().getCategory(), ACLCategory.STRUCTURE);
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET ACL=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getACL().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.acl", orgType.getACL(), type.getACL());
        }
        //end ACL changes

        //start default instance ACL changes
        if (type.hasDefaultInstanceACL() != orgType.hasDefaultInstanceACL()
                || !type.getDefaultInstanceACL().equals(orgType.getDefaultInstanceACL())) {
            if (type.hasDefaultInstanceACL()
                    && type.getDefaultInstanceACL().getCategory() != ACLCategory.INSTANCE)
                throw new FxInvalidParameterException("DEFACL", "ex.acl.category.invalid",
                        type.getDefaultInstanceACL().getCategory(), ACLCategory.INSTANCE);
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET DEFACL=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            if (type.hasDefaultInstanceACL())
                ps.setLong(1, type.getDefaultInstanceACL().getId());
            else
                ps.setNull(1, java.sql.Types.INTEGER);
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.defacl",
                    orgType.hasDefaultInstanceACL() ? orgType.getACL() : "-",
                    type.hasDefaultInstanceACL() ? type.getDefaultInstanceACL() : "-");
        }
        //end default instance ACL changes

        //start Workflow changes
        if (!type.getWorkflow().equals(orgType.getWorkflow())) {

            if (instanceCount < 0)
                instanceCount = getInstanceCount(con, type.getId());
            if (instanceCount > 0) {
                //Workflow can not be changed with existing instances -> there is no way to reliably
                //map steps of one workflow to steps of another (even not using stepdefinitions since they
                //can be used multiple times). A possible solution would be to provide a mapping when changing
                //workflows but this should be to seldom the case to bother implementing it
                throw new FxUpdateException("ex.notImplemented", "Workflow changes with existing instance");
            }
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET WORKFLOW=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getWorkflow().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.workflow", orgType.getWorkflow(), type.getWorkflow());
        }
        //end Workflow changes

        //start Category changes
        if (!type.getCategory().equals(orgType.getCategory())) {
            if (!ticket.isGlobalSupervisor())
                throw new FxUpdateException("ex.structure.type.category.notSupervisor", orgType.getCategory(),
                        type.getCategory(), orgType.getName());
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET CATEGORY=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setInt(1, type.getCategory().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
        }
        //end Category changes

        //start language mode changes
        if (!type.getLanguage().equals(orgType.getLanguage())) {
            if (instanceCount < 0)
                instanceCount = getInstanceCount(con, type.getId());
            if (instanceCount <= 0 || orgType.getLanguage().isUpgradeable(type.getLanguage())) {
                sql.setLength(0);
                sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET LANG_MODE=? WHERE ID=?");
                if (ps != null)
                    ps.close();
                ps = con.prepareStatement(sql.toString());
                ps.setInt(1, type.getLanguage().getId());
                ps.setLong(2, type.getId());
                ps.executeUpdate();
                htracker.track(type, "history.type.update.languageMode", orgType.getLanguage().name(),
                        type.getLanguage().name());
            } else
                throw new FxUpdateException("ex.structure.type.languageMode.notUpgradeable",
                        orgType.getLanguage(), type.getLanguage(), orgType.getName());
        }
        //end language mode changes

        //start state changes
        if (type.getState().getId() != orgType.getState().getId()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET TYPE_STATE=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setInt(1, type.getState().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.state", orgType.getState().name(),
                    type.getState().name());
        }
        //end state changes

        //start permission changes
        if (type.getBitCodedPermissions() != orgType.getBitCodedPermissions()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET SECURITY_MODE=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setInt(1, type.getBitCodedPermissions());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.perm",
                    FxPermissionUtils.toString(orgType.getBitCodedPermissions()),
                    FxPermissionUtils.toString(type.getBitCodedPermissions()));
        }
        //end permission changes

        //start multiple ACL setting changes
        if (type.isMultipleContentACLs() != orgType.isMultipleContentACLs()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET MULTIPLE_CONTENT_ACLS=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setBoolean(1, type.isMultipleContentACLs());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.multipleContentACLs", orgType.isMultipleContentACLs(),
                    type.isMultipleContentACLs());
        }
        //end multiple ACL setting changes

        //start isIncludedInSupertypeQueries setting changes
        if (type.isIncludedInSupertypeQueries() != orgType.isIncludedInSupertypeQueries()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET INSUPERTYPEQUERY=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setBoolean(1, type.isIncludedInSupertypeQueries());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.inSupertypeQueries",
                    orgType.isIncludedInSupertypeQueries(), type.isIncludedInSupertypeQueries());
        }
        //end isIncludedInSupertypeQueries setting changes

        //start history track/age changes
        if (type.isTrackHistory() != orgType.isTrackHistory()
                || type.getHistoryAge() != orgType.getHistoryAge()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES)
                    .append(" SET TRACKHISTORY=?, HISTORY_AGE=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setBoolean(1, type.isTrackHistory());
            ps.setLong(2, type.getHistoryAge());
            ps.setLong(3, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.history", orgType.isTrackHistory(), type.isTrackHistory(),
                    orgType.getHistoryAge(), type.getHistoryAge());
        }
        //end history track/age changes

        //start max.ver changes
        if (type.getMaxVersions() != orgType.getMaxVersions()) {
            //TODO: remove any versions that would exceed this count
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET MAX_VERSIONS=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getMaxVersions());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.maxVersions", orgType.getMaxVersions(),
                    type.getMaxVersions());
        }
        //end max.ver changes

        //start isAutoVersion setting changes
        if (type.isAutoVersion() != orgType.isAutoVersion()) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET AUTO_VERSION=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setBoolean(1, type.isAutoVersion());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.isAutoVersion", orgType.isAutoVersion(),
                    type.isAutoVersion());
        }
        //end isAutoVersion setting changes

        //start max source relations changes
        if (type.isRelation() && type.getMaxRelSource() != orgType.getMaxRelSource()) {
            //TODO: check if the new condition is not violated by existing data
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET REL_TOTAL_MAXSRC=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getMaxRelSource());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.maxRelSource", orgType.getMaxRelSource(),
                    type.getMaxRelSource());
        }
        //end max source relations changes

        //start max destination relations changes
        if (type.isRelation() && type.getMaxRelDestination() != orgType.getMaxRelDestination()) {
            //TODO: check if the new condition is not violated by existing data
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET REL_TOTAL_MAXDST=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            ps.setLong(1, type.getMaxRelDestination());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            htracker.track(type, "history.type.update.maxRelDest", orgType.getMaxRelDestination(),
                    type.getMaxRelDestination());
        }
        //end max destination relations changes

        //start icon
        if (!type.getIcon().equals(orgType.getIcon())) {
            sql.setLength(0);
            sql.append("UPDATE ").append(TBL_STRUCT_TYPES).append(" SET ICON_REF=? WHERE ID=?");
            if (ps != null)
                ps.close();
            ps = con.prepareStatement(sql.toString());
            if (type.getIcon().isEmpty())
                ps.setNull(1, java.sql.Types.INTEGER);
            else
                ps.setLong(1, type.getIcon().getDefaultTranslation().getId());
            ps.setLong(2, type.getId());
            ps.executeUpdate();
            needReload = true;
            htracker.track(type, "history.type.update.icon", orgType.getIcon().getDefaultTranslation().getId(),
                    type.getIcon().getDefaultTranslation().getId());
        }
        //end icon

        // structure option changes
        boolean optionsChanged = updateTypeOptions(con, type, orgType);
        // check if any type options must be propagated to derived types
        if (type.getDerivedTypes().size() > 0) {
            final List<FxStructureOption> inherit = new ArrayList<FxStructureOption>(type.getOptions().size());
            for (FxStructureOption o : type.getOptions()) {
                if (o.getIsInherited()) {
                    inherit.add(o);
                }
            }
            if (inherit.size() > 0) {
                for (FxType derived : type.getDerivedTypes()) {
                    updateDerivedTypeOptions(con, derived, inherit);
                }
            }
        }

        //sync back to cache
        try {
            if (needReload)
                StructureLoader.reload(con);
            else {
                StructureLoader.updateType(FxContext.get().getDivisionId(), loadType(con, type.getId()));
                // reload any derived types if type options have changed
                if (optionsChanged && type.getDerivedTypes().size() > 0) {
                    for (FxType derivedType : type.getDerivedTypes()) {
                        StructureLoader.updateType(FxContext.get().getDivisionId(),
                                loadType(con, derivedType.getId()));
                    }
                }
            }
        } catch (FxLoadException e) {
            throw new FxUpdateException(e);
        } catch (FxCacheException e) {
            LOG.fatal(e.getMessage(), e);
        }
    } catch (SQLException e) {
        EJBUtils.rollback(ctx);
        throw new FxUpdateException(LOG, e, "ex.db.sqlError", e.getMessage());
    } finally {
        Database.closeObjects(TypeEngineBean.class, con, ps);
    }
    return type.getId();
}

From source file:org.seasar.dbflute.logic.replaceschema.loaddata.impl.DfDelimiterDataWriterImpl.java

public void writeData(DfDelimiterDataResultInfo resultInfo) throws IOException {
    _log.info("/= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ");
    _log.info("writeData(" + _fileName + ")");
    _log.info("= = = = = = =/");
    FileInputStream fis = null;//from  w  w  w  .  j  a va 2  s.  c  o m
    InputStreamReader ir = null;
    BufferedReader br = null;

    final String dataDirectory = Srl.substringLastFront(_fileName, "/");
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final String tableDbName;
    {
        String tmp = _fileName.substring(_fileName.lastIndexOf("/") + 1, _fileName.lastIndexOf("."));
        if (tmp.indexOf("-") >= 0) {
            tmp = tmp.substring(tmp.indexOf("-") + "-".length());
        }
        tableDbName = tmp;
    }
    final Map<String, DfColumnMeta> columnInfoMap = getColumnMetaMap(tableDbName);
    if (columnInfoMap.isEmpty()) {
        throwTableNotFoundException(_fileName, tableDbName);
    }

    // process before handling table
    beforeHandlingTable(tableDbName, columnInfoMap);

    String lineString = null;
    String preContinueString = null;
    String executedSql = null;
    final List<String> columnNameList = new ArrayList<String>();
    final List<String> additionalColumnList = new ArrayList<String>();
    final List<String> valueList = new ArrayList<String>();

    final File dataFile = new File(_fileName);
    Connection conn = null;
    PreparedStatement ps = null;
    try {
        fis = new FileInputStream(dataFile);
        ir = new InputStreamReader(fis, _encoding);
        br = new BufferedReader(ir);

        FirstLineInfo firstLineInfo = null;
        int loopIndex = -1;
        int rowNumber = 0;
        int addedBatchSize = 0;
        while (true) {
            ++loopIndex;

            lineString = br.readLine();
            if (lineString == null) {
                break;
            }

            // /- - - - - - - - - - - - - - - - - - - - - -
            // initialize column definition from first line
            // - - - - - - - - - -/
            if (loopIndex == 0) {
                firstLineInfo = getFirstLineInfo(_delimiter, lineString);
                columnNameList.addAll(firstLineInfo.getColumnNameList());
                if (columnNameList.isEmpty()) {
                    throwDelimiterDataColumnDefNotFoundException(_fileName, tableDbName);
                }
                final StringSet columnSet = StringSet.createAsFlexible();
                columnSet.addAll(columnNameList);
                for (String defaultColumn : _defaultValueMap.keySet()) {
                    if (columnSet.contains(defaultColumn)) {
                        continue;
                    }
                    additionalColumnList.add(defaultColumn);
                }
                columnNameList.addAll(additionalColumnList);
                continue;
            }

            // /- - - - - - - - - - - - - - -
            // analyze values in line strings
            // - - - - - - - - - -/
            lineString = filterLineString(lineString);
            {
                if (preContinueString != null && !preContinueString.equals("")) {
                    lineString = preContinueString + "\n" + lineString;
                }
                final ValueLineInfo valueLineInfo = arrangeValueList(lineString, _delimiter);
                final List<String> ls = valueLineInfo.getValueList();
                if (valueLineInfo.isContinueNextLine()) {
                    preContinueString = ls.remove(ls.size() - 1);
                    valueList.addAll(ls);
                    continue;
                }
                valueList.addAll(ls);
            }
            // *one record is prepared here

            // /- - - - - - - - - - - - - -
            // check definition differences
            // - - - - - - - - - -/
            if (isDifferentColumnValueCount(firstLineInfo, valueList)) {
                String msg = "The count of values wasn't correct:";
                msg = msg + " column=" + firstLineInfo.getColumnNameList().size();
                msg = msg + " value=" + valueList.size();
                msg = msg + " -> " + valueList;
                resultInfo.registerWarningFile(_fileName, msg);

                // clear temporary variables
                valueList.clear();
                preContinueString = null;
                continue;
            }
            // *valid record is prepared here
            ++rowNumber;

            // /- - - - - - - - - - - - - - - -
            // process registration to database
            // - - - - - - - - - -/
            final DfDelimiterDataWriteSqlBuilder sqlBuilder = new DfDelimiterDataWriteSqlBuilder();
            sqlBuilder.setTableDbName(tableDbName);
            sqlBuilder.setColumnInfoMap(columnInfoMap);
            sqlBuilder.setColumnNameList(columnNameList);
            sqlBuilder.setValueList(valueList);
            sqlBuilder.setNotFoundColumnMap(resultInfo.getNotFoundColumnMap());
            sqlBuilder.setConvertValueMap(_convertValueMap);
            sqlBuilder.setDefaultValueMap(_defaultValueMap);
            sqlBuilder.setBindTypeProvider(new DfColumnBindTypeProvider() {
                public Class<?> provideBindType(String tableName, DfColumnMeta columnMeta) {
                    return getBindType(tableName, columnMeta);
                }
            });
            if (conn == null) {
                conn = _dataSource.getConnection();
            }
            if (ps == null) {
                executedSql = sqlBuilder.buildSql();
                ps = conn.prepareStatement(executedSql);
            }
            final Map<String, Object> columnValueMap = sqlBuilder.setupParameter();
            handleLoggingInsert(tableDbName, columnNameList, columnValueMap, loggingInsertType, rowNumber);

            int bindCount = 1;
            final Set<Entry<String, Object>> entrySet = columnValueMap.entrySet();
            for (Entry<String, Object> entry : entrySet) {
                final String columnName = entry.getKey();
                final Object obj = entry.getValue();

                // /- - - - - - - - - - - - - - - - - -
                // process Null (against Null Headache)
                // - - - - - - - - - -/
                if (processNull(tableDbName, columnName, obj, ps, bindCount, columnInfoMap)) {
                    bindCount++;
                    continue;
                }

                // /- - - - - - - - - - - - - - -
                // process NotNull and NotString
                // - - - - - - - - - -/
                // If the value is not null and the value has the own type except string,
                // It registers the value to statement by the type.
                if (processNotNullNotString(tableDbName, columnName, obj, conn, ps, bindCount, columnInfoMap)) {
                    bindCount++;
                    continue;
                }

                // /- - - - - - - - - - - - - - - - - -
                // process NotNull and StringExpression
                // - - - - - - - - - -/
                final String value = (String) obj;
                processNotNullString(dataFile, tableDbName, columnName, value, conn, ps, bindCount,
                        columnInfoMap);
                bindCount++;
            }
            if (isMergedSuppressBatchUpdate(dataDirectory)) {
                ps.execute();
            } else {
                ps.addBatch();
                ++addedBatchSize;
                if (addedBatchSize == 100000) {
                    // this is supported in only delimiter data writer
                    // because delimiter data can treat large data
                    ps.executeBatch(); // to avoid OutOfMemory
                    ps.clearBatch(); // for next batch
                    addedBatchSize = 0;
                }
            }
            // *one record is finished here

            // clear temporary variables
            // if an exception occurs from execute() or addBatch(),
            // this valueList is to be information for debug
            valueList.clear();
            preContinueString = null;
        }
        if (ps != null && addedBatchSize > 0) {
            ps.executeBatch();
        }
        noticeLoadedRowSize(tableDbName, rowNumber);
        checkImplicitClassification(dataFile, tableDbName, columnNameList, conn);
    } catch (FileNotFoundException e) {
        throw e;
    } catch (IOException e) {
        throw e;
    } catch (SQLException e) {
        final SQLException nextEx = e.getNextException();
        if (nextEx != null && !e.equals(nextEx)) { // focus on next exception
            _log.warn("*Failed to register: " + e.getMessage());
            String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, nextEx);
            throw new DfDelimiterDataRegistrationFailureException(msg, nextEx); // switch!
        } else {
            String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, e);
            throw new DfDelimiterDataRegistrationFailureException(msg, e);
        }
    } catch (RuntimeException e) {
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, e);
        throw new DfDelimiterDataRegistrationFailureException(msg, e);
    } finally {
        try {
            if (fis != null) {
                fis.close();
            }
            if (ir != null) {
                ir.close();
            }
            if (br != null) {
                br.close();
            }
        } catch (java.io.IOException ignored) {
            _log.warn("File-close threw the exception: ", ignored);
        }
        if (ps != null) {
            try {
                ps.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnInfoMap);
    }
}

From source file:com.att.pirates.controller.ProjectController.java

private void processProjectAppOwnersByArtifactName(List<ProjectAppOwnerModel> existingOwners,
        List<ProjectAppOwnerModel> userSelectedOwners, String artifactName, String dueDateForArtifact,
        String UUID, int impactId) {
    if (impactId == 4 && artifactName.equalsIgnoreCase(PiratesConstants.ISTExec)) {
        return;/* w  ww .  j a v a 2s.c o m*/
    }

    Connection con = null;
    PreparedStatement updateRow = null;
    PreparedStatement deleteRow = null;
    PreparedStatement insertRow = null;
    PreparedStatement syncRow = null;
    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    String today = format.format(Calendar.getInstance().getTime());

    String insertSQL = " INSERT INTO [AppProjectArtifactOwners] " + "           ([UUID] "
            + "           ,[DueDate] " + "           ,[ArtifactName] " + "           ,[PRISMId] "
            + "           ,[ApplicationName] " + "           ,[ModuleId] " + "           ,[IsPrimaryOwner] "
            + "           ,[MileStoneId] " + "           ,[DateCreated] " + "           ,[UpdatedByUUID] "
            + "           ,[SystemNote]) " + "     VALUES " + "           ( ?,?,?,?,?,?,?,?,?,?,?)  ";

    String updateSQL = " UPDATE AppProjectArtifactOwners " + "   SET DueDate = ? " + "      ,UpdatedByUUID = ? "
            + "      ,SystemNote = ? " + "       WHERE  "
            + "   UUID = ? and ArtifactName = ? and PRISMId = ? and ApplicationName = ? and ModuleId = ? and MileStoneId = ? ";

    String deleteSQL = " DELETE FROM AppProjectArtifactOwners " + "       WHERE  "
            + "   UUID = ? and ArtifactName = ? and PRISMId = ? and ApplicationName = ? and ModuleId = ? and MileStoneId = ? ";

    // we need to make sure duedates for each artifact for all applications in this project
    // to be in sync
    String syncSQL = " Update a " + " Set a.duedate = t.duedate " + " from AppProjectArtifactOwners a "
            + " join " + " ( " + " select prismid, applicationname, artifactname, duedate "
            + " from AppProjectArtifactOwners " + " Where artifactname = ? and prismid = ? "
            + " And applicationname = ? " + " and isprimaryowner = 1 " + " ) t "
            + " on a.prismid=t.prismid and a.artifactname=t.artifactname "
            + " and a.applicationname <> t.applicationname " + " where a.isprimaryowner = 1 ";

    try {
        List<String> artifactNames = new ArrayList<String>();
        String prismId = "";
        String applicationName = "";

        con = DBUtility.getDBConnection();
        con.setAutoCommit(false);
        insertRow = con.prepareStatement(insertSQL);
        // inserts
        boolean isAdd = false;
        for (ProjectAppOwnerModel o : userSelectedOwners) {
            if (o.getIsNew()) {
                if (!isAdd)
                    isAdd = true;
                logger.error(msgHeader + " userSelectedOwners " + artifactName + " " + o.getUUID()
                        + " is being added..");
                insertRow.setString(1, o.getUUID());
                insertRow.setString(2, o.getDueDate());
                insertRow.setString(3, o.getArtifactName());
                insertRow.setString(4, o.getPrismId());
                insertRow.setString(5, o.getApplicationName());
                insertRow.setInt(6, Integer.valueOf(o.getModuleId()));
                insertRow.setInt(7, 1);
                insertRow.setInt(8, Integer.valueOf(o.getMileStoneId()));
                insertRow.setString(9, today);
                insertRow.setString(10, o.getUpdatedByUUID());
                insertRow.setString(11,
                        "Record created by " + o.getUpdatedByUUID() + " on " + o.getDateCreated());
                insertRow.addBatch();

                // update artifactNames list
                if (!artifactNames.contains(o.getArtifactName())) {
                    artifactNames.add(o.getArtifactName());
                }
                if (prismId.isEmpty()) {
                    prismId = o.getPrismId();
                }
                if (applicationName.isEmpty()) {
                    applicationName = o.getApplicationName();
                }
            }
        }
        if (isAdd)
            insertRow.executeBatch();
        if (!isAdd)
            logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to insert");

        // updates
        boolean isUpdate = false;
        updateRow = con.prepareStatement(updateSQL);
        for (ProjectAppOwnerModel o : existingOwners) {
            if (o.getIsNew()) {
                // do nothing here... 
            } else {
                SimpleDateFormat yFormat = new SimpleDateFormat("MM/dd/yyyy");
                Date userSelected = yFormat.parse(dueDateForArtifact);
                Date existing = yFormat.parse(o.getDueDate());
                if (existing.compareTo(userSelected) == 0) {
                    logger.error(msgHeader + " new duedate: " + dueDateForArtifact
                            + " is the same as existing duedate " + o.getDueDate()
                            + " , nothing to do here...");
                } else {
                    if (!isUpdate)
                        isUpdate = true;
                    logger.error(msgHeader + " existingOwners " + artifactName + " " + o.getUUID()
                            + " is being updated..");
                    updateRow.setString(1, dueDateForArtifact);
                    updateRow.setString(2, UUID);
                    updateRow.setString(3, "Record updated by " + UUID + " on " + today);
                    updateRow.setString(4, o.getUUID());
                    updateRow.setString(5, o.getArtifactName());
                    updateRow.setString(6, o.getPrismId());
                    updateRow.setString(7, o.getApplicationName());
                    updateRow.setInt(8, Integer.valueOf(o.getModuleId()));
                    updateRow.setInt(9, Integer.valueOf(o.getMileStoneId()));
                    updateRow.addBatch();

                    // update artifactNames list
                    if (!artifactNames.contains(o.getArtifactName())) {
                        artifactNames.add(o.getArtifactName());
                    }
                    if (prismId.isEmpty()) {
                        prismId = o.getPrismId();
                    }
                    if (applicationName.isEmpty()) {
                        applicationName = o.getApplicationName();
                    }
                }
            }
        }
        if (isUpdate)
            updateRow.executeBatch();
        if (!isUpdate)
            logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to update");

        // deletes
        boolean isDelete = false;
        deleteRow = con.prepareStatement(deleteSQL);
        for (ProjectAppOwnerModel o : existingOwners) {
            if (o.getIsNew()) {
                if (!isDelete)
                    isDelete = true;
                logger.error(msgHeader + " existingOwners " + artifactName + " " + o.getUUID()
                        + " is being deleted..");
                deleteRow.setString(1, o.getUUID());
                deleteRow.setString(2, o.getArtifactName());
                deleteRow.setString(3, o.getPrismId());
                deleteRow.setString(4, o.getApplicationName());
                deleteRow.setInt(5, Integer.valueOf(o.getModuleId()));
                deleteRow.setInt(6, Integer.valueOf(o.getMileStoneId()));

                deleteRow.addBatch();
            } else {
                // do nothing here
            }
        }
        if (isDelete)
            deleteRow.executeBatch();
        if (!isDelete)
            logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to delete");

        if (isAdd || isUpdate || isDelete) {
            // sync up same artifact, same project different application's due dates
            if (!artifactNames.isEmpty()) {
                syncRow = con.prepareStatement(syncSQL);
                for (String a : artifactNames) {
                    logger.error("Setting syncup parameters.. artifactname: " + a + ", prismId: " + prismId
                            + ", applicationName: " + applicationName);
                    syncRow.setString(1, a);
                    syncRow.setString(2, prismId);
                    syncRow.setString(3, applicationName);
                    syncRow.addBatch();
                }
                syncRow.executeBatch();
            }

            con.commit();
        } else {
            logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to commit");
        }

    } catch (SQLException e) {
        if (con != null) {
            try {
                logger.error(e.getMessage()
                        + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.. "
                        + e.getMessage());
                con.rollback();
            } catch (SQLException excep) {
                logger.error(excep.getMessage()
                        + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.."
                        + excep.getMessage());
                try {
                    con.rollback();
                } catch (SQLException logOrIgnore) {
                }
            }
        }
    } catch (Exception ex) {
        logger.error(ex + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.."
                + ex.getMessage());
        try {
            con.rollback();
        } catch (SQLException logOrIgnore) {
        }
    } finally {
        if (updateRow != null) {
            try {
                updateRow.close();
            } catch (SQLException e) {
            }
        }
        if (deleteRow != null) {
            try {
                deleteRow.close();
            } catch (SQLException e) {
            }
        }
        if (insertRow != null) {
            try {
                insertRow.close();
            } catch (SQLException e) {
            }
        }
        if (con != null) {
            try {
                con.setAutoCommit(true);
                con.close();
            } catch (SQLException logOrIgnore) {
            }
        }
    }
}

From source file:org.wso2.carbon.appmgt.impl.dao.AppMDAO.java

/**
 * Save XACML policies, policy group wise
 *
 * @param policyGroupId Policy Group Id// ww  w.  j ava 2  s.  com
 * @param objPartialMappings XACML policy related details object array
 * @param conn sql connection
 * @throws AppManagementException if any an error found while saving data to DB
 */
private static void savePolicyPartialMappings(int policyGroupId, Object[] objPartialMappings, Connection conn)
        throws SQLException {
    String query = "INSERT INTO APM_POLICY_GRP_PARTIAL_MAPPING(POLICY_GRP_ID, POLICY_PARTIAL_ID) "
            + "VALUES(?,?) ";
    PreparedStatement preparedStatement = null;

    try {
        preparedStatement = conn.prepareStatement(query);

        for (int i = 0; i < objPartialMappings.length; i++) {
            preparedStatement.setInt(1, policyGroupId);
            preparedStatement.setInt(2, ((Double) (objPartialMappings[i])).intValue());
            preparedStatement.addBatch();
        }
        preparedStatement.executeBatch();
    } catch (SQLException e) {
        log.error("SQL Error while executing the query to save policy partial mappings: " + query
                + " : (Policy Group Id:" + policyGroupId + ", Policy Partial Mappings:" + objPartialMappings
                + ")", e);
        /* In the code im using a single SQL connection passed from the parent function so I'm logging the error here
        and throwing the SQLException so  the connection will be disposed by the parent function. */
        throw e;
    } finally {
        APIMgtDBUtil.closeAllConnections(preparedStatement, null, null);
    }
}

From source file:org.ramadda.geodata.cdmdata.PointDatabaseTypeHandler.java

/**
 * _more_/*from   w  w w. j av a2s  . c om*/
 *
 *
 * @param request _more_
 * @param entry _more_
 * @param metadata _more_
 * @param fdp _more_
 * @param connection _more_
 * @param newEntry _more_
 *
 * @throws Exception _more_
 */
private void insertData(Request request, Entry entry, List<PointDataMetadata> metadata, FeatureDatasetPoint fdp,
        Connection connection, boolean newEntry) throws Exception {

    String tableName = getTableName(entry);
    String[] ARRAY = new String[metadata.size()];
    for (PointDataMetadata pdm : metadata) {
        ARRAY[pdm.getColumnNumber()] = pdm.getColumnName();
    }
    String insertString = SqlUtil.makeInsert(tableName, SqlUtil.commaNoDot(ARRAY),
            SqlUtil.getQuestionMarks(ARRAY.length));

    double north = 0, south = 0, east = 0, west = 0;

    long minTime = (newEntry ? Long.MAX_VALUE : entry.getStartDate());
    long maxTime = (newEntry ? Long.MIN_VALUE : entry.getEndDate());
    PreparedStatement insertStmt = connection.prepareStatement(insertString);
    Object[] values = new Object[metadata.size()];
    int cnt = 0;
    int batchCnt = 0;
    GregorianCalendar calendar = new GregorianCalendar(RepositoryUtil.TIMEZONE_DEFAULT);
    boolean didone = false;

    Hashtable properties = getProperties(entry);
    int baseId = Misc.getProperty(properties, PROP_ID, 0);
    int totalCnt = Misc.getProperty(properties, PROP_CNT, 0);
    long t1 = System.currentTimeMillis();

    long tt1 = System.currentTimeMillis();
    //        for(int i=0;i<200;i++) {

    PointFeatureIterator pfi = CdmDataOutputHandler.getPointIterator(fdp);
    while (pfi.hasNext()) {
        PointFeature po = (PointFeature) pfi.next();
        ucar.unidata.geoloc.EarthLocation el = po.getLocation();
        if (el == null) {
            continue;
        }

        double lat = el.getLatitude();
        double lon = el.getLongitude();
        double alt = el.getAltitude();
        Date time = po.getNominalTimeAsDate();

        long tmpTime = time.getTime();
        if (tmpTime < minTime) {
            minTime = tmpTime;
        }
        if (tmpTime > maxTime) {
            maxTime = tmpTime;
        }

        if (didone) {
            north = Math.max(north, lat);
            south = Math.min(south, lat);
            west = Math.min(west, lon);
            east = Math.max(east, lon);
        } else {
            north = (newEntry ? lat : entry.hasNorth() ? entry.getNorth() : lat);
            south = (newEntry ? lat : entry.hasSouth() ? entry.getSouth() : lat);
            east = (newEntry ? lon : entry.hasEast() ? entry.getEast() : lon);
            west = (newEntry ? lon : entry.hasWest() ? entry.getWest() : lon);
        }
        didone = true;

        calendar.setTime(time);
        StructureData structure = po.getData();
        boolean hadAnyNumericValues = false;
        boolean hadGoodNumericValue = false;
        /*
        if(totalCnt<5) {
        StructureMembers.Member member =
            structure.findMember("altitude");
        if(member!=null) {
            double d = structure.convertScalarFloat(member);
        } else {
            System.err.println("no member");
                
        }
        }
        */

        for (PointDataMetadata pdm : metadata) {
            Object value;
            if (COL_ID.equals(pdm.getColumnName())) {
                value = new Integer(baseId);
                baseId++;
            } else if (COL_LATITUDE.equals(pdm.getColumnName())) {
                value = new Double(checkWriteValue(lat));
            } else if (COL_LONGITUDE.equals(pdm.getColumnName())) {
                value = new Double(checkWriteValue(lon));
            } else if (COL_ALTITUDE.equals(pdm.getColumnName())) {
                value = new Double(checkWriteValue(alt));
            } else if (COL_DATE.equals(pdm.getColumnName())) {
                value = time;
            } else if (COL_HOUR.equals(pdm.getColumnName())) {
                value = new Integer(calendar.get(GregorianCalendar.HOUR));
            } else if (COL_MONTH.equals(pdm.getColumnName())) {
                value = new Integer(calendar.get(GregorianCalendar.MONTH));
            } else {
                StructureMembers.Member member = structure.findMember((String) pdm.shortName);
                if (pdm.isString()) {
                    value = structure.getScalarString(member);
                    if (value == null) {
                        value = "";
                    }
                    value = value.toString().trim();
                } else {
                    double d = structure.convertScalarFloat(member);
                    hadAnyNumericValues = true;
                    if (d == d) {
                        hadGoodNumericValue = true;
                    }
                    value = new Double(checkWriteValue(d));
                }
            }
            values[pdm.getColumnNumber()] = value;
        }
        if (hadAnyNumericValues && !hadGoodNumericValue) {
            continue;
        }
        totalCnt++;
        getDatabaseManager().setValues(insertStmt, values);
        insertStmt.addBatch();
        batchCnt++;
        if (batchCnt > 100) {
            insertStmt.executeBatch();
            batchCnt = 0;
        }
        if (((++cnt) % 5000) == 0) {
            System.err.println("added " + cnt + " observations " + (System.currentTimeMillis() - tt1));
        }
    }

    //        }

    if (batchCnt > 0) {
        insertStmt.executeBatch();
    }
    insertStmt.close();

    long t2 = System.currentTimeMillis();
    System.err.println("inserted " + cnt + " observations in " + (t2 - t1) + "ms");

    properties.put(PROP_CNT, totalCnt + "");
    properties.put(PROP_ID, baseId + "");
    setProperties(entry, properties);

    if (didone) {
        entry.setWest(west);
        entry.setEast(east);
        entry.setNorth(north);
        entry.setSouth(south);
    }

    if (minTime != Long.MAX_VALUE) {
        entry.setStartDate(minTime);
        entry.setEndDate(maxTime);
    }

}

From source file:com.emr.utilities.CSVLoader.java

/**
* Parse CSV file using OpenCSV library and load in 
* given database table. /*  www .  j  av  a 2s. c  o  m*/
* @param csvFile {@link String} Input CSV file
* @param tableName {@link String} Database table name to import data
* @param truncateBeforeLoad {@link boolean} Truncate the table before inserting 
*          new records.
 * @param destinationColumns {@link String[]} Array containing the destination columns
*/
public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad, String[] destinationColumns,
        List columnsToBeMapped) throws Exception {
    CSVReader csvReader = null;
    if (null == this.connection) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = new CSVReader(new FileReader(csvFile), this.seprator);

    } catch (Exception e) {
        String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e);
        JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace,
                "File Error", JOptionPane.ERROR_MESSAGE);
        throw new Exception("Error occured while executing file. " + stacktrace);
    }
    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }
    //Get indices of columns to be mapped
    List mapColumnsIndices = new ArrayList();
    for (Object o : columnsToBeMapped) {
        String column = (String) o;
        column = column.substring(column.lastIndexOf(".") + 1, column.length());
        int i;

        for (i = 0; i < headerRow.length; i++) {

            if (headerRow[i].equals(column)) {
                mapColumnsIndices.add(i);
            }
        }
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(destinationColumns, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    String log_query = query.substring(0, query.indexOf("VALUES("));

    String[] nextLine;
    Connection con = null;
    PreparedStatement ps = null;
    PreparedStatement ps2 = null;
    PreparedStatement reader = null;
    ResultSet rs = null;
    try {
        con = this.connection;
        con.setAutoCommit(false);
        ps = con.prepareStatement(query);

        File file = new File("sqlite/db");
        if (!file.exists()) {
            file.createNewFile();
        }
        db = new SQLiteConnection(file);
        db.open(true);

        //if destination table==person, also add an entry in the table person_identifier
        //get column indices for the person_id and uuid columns
        int person_id_column_index = -1;
        int uuid_column_index = -1;
        int maxLength = 100;
        int firstname_index = -1;
        int middlename_index = -1;
        int lastname_index = -1;
        int clanname_index = -1;
        int othername_index = -1;
        if (tableName.equals("person")) {
            int i;
            ps2 = con.prepareStatement(
                    "insert ignore into person_identifier(person_id,identifier_type_id,identifier) values(?,?,?)");
            for (i = 0; i < headerRow.length; i++) {
                if (headerRow[i].equals("person_id")) {
                    person_id_column_index = i;
                }
                if (headerRow[i].equals("uuid")) {
                    uuid_column_index = i;
                }
                /*if(headerRow[i].equals("first_name")){
                    System.out.println("Found firstname index: " + i);
                    firstname_index=i;
                }
                if(headerRow[i].equals("middle_name")){
                    System.out.println("Found firstname index: " + i);
                    middlename_index=i;
                }
                if(headerRow[i].equals("last_name")){
                    System.out.println("Found firstname index: " + i);
                    lastname_index=i;
                }
                if(headerRow[i].equals("clan_name")){
                    System.out.println("Found firstname index: " + i);
                    clanname_index=i;
                }
                if(headerRow[i].equals("other_name")){
                    System.out.println("Found firstname index: " + i);
                    othername_index=i;
                }*/
            }
        }

        if (truncateBeforeLoad) {
            //delete data from table before loading csv
            try (Statement stmnt = con.createStatement()) {
                stmnt.execute("DELETE FROM " + tableName);
                stmnt.close();
            }
        }
        if (tableName.equals("person")) {
            try (Statement stmt2 = con.createStatement()) {
                stmt2.execute(
                        "ALTER TABLE person CHANGE COLUMN first_name first_name VARCHAR(50) NULL DEFAULT NULL AFTER person_guid,CHANGE COLUMN middle_name middle_name VARCHAR(50) NULL DEFAULT NULL AFTER first_name,CHANGE COLUMN last_name last_name VARCHAR(50) NULL DEFAULT NULL AFTER middle_name;");
                stmt2.close();
            }
        }
        final int batchSize = 1000;
        int count = 0;
        Date date = null;

        while ((nextLine = csvReader.readNext()) != null) {

            if (null != nextLine) {
                int index = 1;
                int person_id = -1;
                String uuid = "";
                int identifier_type_id = 3;
                if (tableName.equals("person")) {
                    reader = con.prepareStatement(
                            "select identifier_type_id from identifier_type where identifier_type_name='UUID'");
                    rs = reader.executeQuery();
                    if (!rs.isBeforeFirst()) {
                        //no uuid row
                        //insert it
                        Integer numero = 0;
                        Statement stmt = con.createStatement();
                        numero = stmt.executeUpdate(
                                "insert into identifier_type(identifier_type_id,identifier_type_name) values(50,'UUID')",
                                Statement.RETURN_GENERATED_KEYS);
                        ResultSet rs2 = stmt.getGeneratedKeys();
                        if (rs2.next()) {
                            identifier_type_id = rs2.getInt(1);
                        }
                        rs2.close();
                        stmt.close();
                    } else {
                        while (rs.next()) {
                            identifier_type_id = rs.getInt("identifier_type_id");
                        }
                    }

                }
                int counter = 1;
                String temp_log = log_query + "VALUES("; //string to be logged

                for (String string : nextLine) {
                    //if current index is in the list of columns to be mapped, we apply that mapping
                    for (Object o : mapColumnsIndices) {
                        int i = (int) o;
                        if (index == (i + 1)) {
                            //apply mapping to this column
                            string = applyDataMapping(string);
                        }
                    }
                    if (tableName.equals("person")) {
                        //get person_id and uuid

                        if (index == (person_id_column_index + 1)) {
                            person_id = Integer.parseInt(string);
                        }

                        if (index == (uuid_column_index + 1)) {
                            uuid = string;
                        }

                    }
                    //check if string is a date
                    if (string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}:\\d{2}")
                            || string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4}")) {
                        java.sql.Date dt = formatDate(string);
                        temp_log = temp_log + "'" + dt.toString() + "'";
                        ps.setDate(index++, dt);
                    } else {
                        if ("".equals(string)) {
                            temp_log = temp_log + "''";
                            ps.setNull(index++, Types.NULL);
                        } else {
                            temp_log = temp_log + "'" + string + "'";
                            ps.setString(index++, string);
                        }

                    }
                    if (counter < headerRow.length) {
                        temp_log = temp_log + ",";
                    } else {
                        temp_log = temp_log + ");";
                        System.out.println(temp_log);
                    }
                    counter++;
                }
                if (tableName.equals("person")) {
                    if (!"".equals(uuid) && person_id != -1) {
                        ps2.setInt(1, person_id);
                        ps2.setInt(2, identifier_type_id);
                        ps2.setString(3, uuid);

                        ps2.addBatch();
                    }
                }

                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
                if (tableName.equals("person")) {
                    ps2.executeBatch();
                }
            }
        }
        ps.executeBatch(); // insert remaining records
        if (tableName.equals("person")) {
            ps2.executeBatch();
        }

        con.commit();
    } catch (Exception e) {
        if (con != null)
            con.rollback();
        if (db != null)
            db.dispose();
        String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e);
        JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace,
                "File Error", JOptionPane.ERROR_MESSAGE);
        throw new Exception("Error occured while executing file. " + stacktrace);
    } finally {
        if (null != reader)
            reader.close();
        if (null != ps)
            ps.close();
        if (null != ps2)
            ps2.close();
        if (null != con)
            con.close();

        csvReader.close();
    }
}

From source file:com.zimbra.cs.db.DbMailItem.java

public static void setFolder(List<Message> msgs, Folder folder) throws ServiceException {
    if (msgs == null || msgs.isEmpty()) {
        return;//from   w w  w.  ja  v  a2  s.  c om
    }
    Mailbox mbox = folder.getMailbox();
    DbConnection conn = mbox.getOperationConnection();
    PreparedStatement stmt = null;
    try {
        // commented out because at present messages cannot have names (and thus can't have naming conflicts)
        //            if (!Db.supports(Db.Capability.UNIQUE_NAME_INDEX) || Db.supports(Db.Capability.CASE_SENSITIVE_COMPARISON)) {
        //                stmt = conn.prepareStatement("SELECT mi.name" +
        //                        " FROM " + getMailItemTableName(mbox, "mi") + ", " + getMailItemTableName(mbox, "m2") +
        //                        " WHERE mi.id IN " + DbUtil.suitableNumberOfVariables(itemIDs) +
        //                        " AND mi.name IS NOT NULL and m2.name IS NOT NULL" +
        //                        " AND m2.folder_id = ? AND mi.id <> m2.id" +
        //                        " AND " + (Db.supports(Db.Capability.CASE_SENSITIVE_COMPARISON) ? "UPPER(mi.name) = UPPER(m2.name)" : "mi.name = m2.name") +
        //                        " AND mi.mailbox_id = ? AND m2.mailbox_id = ?");
        //                int pos = 1;
        //                for (Message msg : msgs)
        //                    stmt.setInt(pos++, msg.getId());
        //                stmt.setInt(pos++, folder.getId());
        //                stmt.setInt(pos++, mbox.getId());
        //                stmt.setInt(pos++, mbox.getId());
        //                rs = stmt.executeQuery();
        //                if (rs.next())
        //                    throw MailServiceException.ALREADY_EXISTS(rs.getString(1));
        //                rs.close();
        //                stmt.close();
        //            }
        int count = 0;
        int batchSize = 500;
        String imapRenumber = mbox.isTrackingImap()
                ? ", imap_id = CASE WHEN imap_id IS NULL THEN NULL ELSE 0 END"
                : "";
        stmt = conn.prepareStatement("UPDATE " + getMailItemTableName(folder)
                + " SET folder_id = ?, prev_folders=?, mod_metadata = ?, change_date = ?" + imapRenumber
                + " WHERE " + IN_THIS_MAILBOX_AND + "id = ?");
        int modseq = mbox.getOperationChangeID();
        for (int j = 0; j < msgs.size(); j++) {
            int pos = 1;
            stmt.setInt(pos++, folder.getId());
            UnderlyingData ud = msgs.get(j).getUnderlyingData();
            //prev folders ordered by modseq ascending, e.g. 100:2;200:101;300:5
            if (msgs.get(j).getFolderId() != folder.getId()) {
                String prevFolders = ud.getPrevFolders();
                if (!StringUtil.isNullOrEmpty(prevFolders)) {
                    String[] modseq2FolderId = prevFolders.split(";");
                    int maxCount = mbox.getAccount().getServer().getPrevFoldersToTrackMax();
                    if (modseq2FolderId.length < maxCount) {
                        prevFolders += ";" + modseq + ":" + ud.folderId;
                    } else {
                        //reached max, get rid of the oldest one
                        String[] tmp = new String[maxCount];
                        System.arraycopy(modseq2FolderId, 1, tmp, 0, maxCount - 1);
                        tmp[maxCount - 1] = modseq + ":" + ud.folderId;
                        prevFolders = StringUtil.join(";", tmp);
                    }
                } else {
                    prevFolders = modseq + ":" + ud.folderId;
                }
                stmt.setString(pos++, prevFolders);
                ud.setPrevFolders(prevFolders);
            } else {
                stmt.setString(pos++, msgs.get(j).getUnderlyingData().getPrevFolders());
            }
            stmt.setInt(pos++, modseq);
            stmt.setInt(pos++, mbox.getOperationTimestamp());
            pos = setMailboxId(stmt, mbox, pos);
            stmt.setInt(pos++, msgs.get(j).getId());
            stmt.addBatch();
            if (++count % batchSize == 0) {
                stmt.executeBatch();
            }
        }
        stmt.executeBatch();
        stmt.close();
        stmt = null;
    } catch (SQLException e) {
        // catch item_id uniqueness constraint violation and return failure
        //            if (Db.errorMatches(e, Db.Error.DUPLICATE_ROW))
        //                throw MailServiceException.ALREADY_EXISTS(msgs.toString(), e);
        //            else
        throw ServiceException.FAILURE("writing new folder data for messages", e);
    } finally {
        DbPool.closeStatement(stmt);
    }
}

From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSpreaded.java

protected long _reorganizeSpace(Connection con, SequencerEngine seq, FxTreeMode sourceMode, FxTreeMode destMode,
        long nodeId, boolean includeNodeId, BigInteger overrideSpacing, BigInteger overrideLeft,
        FxTreeNodeInfo insertParent, int insertPosition, BigInteger insertSpace, BigInteger insertBoundaries[],
        int depthDelta, Long destinationNode, boolean createMode, boolean createKeepIds,
        boolean disableSpaceOptimization) throws FxTreeException {
    long firstCreatedNodeId = -1;
    FxTreeNodeInfoSpreaded nodeInfo;//from   w w w.j  a va2 s .c  o m
    try {
        nodeInfo = (FxTreeNodeInfoSpreaded) getTreeNodeInfo(con, sourceMode, nodeId);
    } catch (Exception e) {
        return -1;
    }

    if (!nodeInfo.isSpaceOptimizable() && !disableSpaceOptimization) {
        // The Root node and cant be optimize any more ... so all we can do is fail :-/
        // This should never really happen
        if (nodeId == ROOT_NODE) {
            return -1;
        }
        //System.out.println("### UP we go, depthDelta=" + depthDelta);
        return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                depthDelta, destinationNode, createMode, createKeepIds, false);
    }

    BigInteger spacing = nodeInfo.getDefaultSpacing();
    if (overrideSpacing != null && (overrideSpacing.compareTo(spacing) < 0 || overrideLeft != null)) {
        // override spacing unless it is greater OR overrideLeft is specified (in that case we
        // have to use the spacing for valid tree ranges)  
        spacing = overrideSpacing;
    } else {
        if (spacing.compareTo(GO_UP) < 0 && !createMode && !disableSpaceOptimization) {
            return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                    overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                    depthDelta, destinationNode, createMode, createKeepIds, false);
        }
    }

    if (insertBoundaries != null && insertPosition == -1) {
        insertPosition = 0; // insertPosition cannot be negative
    }

    Statement stmt = null;
    PreparedStatement ps = null;
    ResultSet rs;
    BigInteger left = overrideLeft == null ? nodeInfo.getLeft() : overrideLeft;
    BigInteger right = null;
    String includeNode = includeNodeId ? "=" : "";
    long counter = 0;
    long newId = -1;
    try {
        final long start = System.currentTimeMillis();
        String createProps = createMode ? ",PARENT,REF,NAME,TEMPLATE" : "";
        String sql = " SELECT ID," + StorageManager.getIfFunction( // compute total child count only when the node has children
                "CHILDCOUNT = 0", "0",
                "(SELECT COUNT(*) FROM " + getTable(sourceMode) + " WHERE LFT > NODE.LFT AND RGT < NODE.RGT)") +
        // 3           4             5   6
                ", CHILDCOUNT, LFT AS LFTORD,RGT,DEPTH" + createProps
                + " FROM (SELECT ID,CHILDCOUNT,LFT,RGT,DEPTH" + createProps + " FROM " + getTable(sourceMode)
                + " WHERE " + "LFT>" + includeNode + nodeInfo.getLeft() + " AND LFT<" + includeNode
                + nodeInfo.getRight() + ") NODE " + "ORDER BY LFTORD ASC";
        stmt = con.createStatement();
        rs = stmt.executeQuery(sql);
        if (createMode) {
            //                                                                 1  2      3     4     5   6        7   8
            ps = con.prepareStatement(
                    "INSERT INTO " + getTable(destMode) + " (ID,PARENT,DEPTH,DIRTY,REF,TEMPLATE,LFT,RGT," +
                    //9           10    11
                            "CHILDCOUNT,NAME,MODIFIED_AT) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)");
        } else {
            ps = con.prepareStatement("UPDATE " + getTable(sourceMode) + " SET LFT=?,RGT=?,DEPTH=? WHERE ID=?");
        }
        long id;
        int total_childs;
        int direct_childs;
        BigInteger nextLeft;
        int lastDepth = nodeInfo.getDepth() + (includeNodeId ? 0 : 1);
        int depth;
        BigInteger _rgt;
        BigInteger _lft;
        Long ref = null;
        String data = null;
        String name = "";

        Stack<Long> currentParent = null;
        if (createMode) {
            currentParent = new Stack<Long>();
            currentParent.push(destinationNode);
        }

        //System.out.println("Spacing:"+SPACING);
        while (rs.next()) {
            //System.out.println("------------------");
            id = rs.getLong(1);
            total_childs = rs.getInt(2);
            direct_childs = rs.getInt(3);
            _lft = getNodeBounds(rs, 4);
            _rgt = getNodeBounds(rs, 5);
            depth = rs.getInt(6);
            if (createMode) {
                // Reading these properties is slow, only do it when needed
                ref = rs.getLong(8);
                if (rs.wasNull())
                    ref = null;
                name = rs.getString(9);
                data = rs.getString(10);
                if (rs.wasNull())
                    data = null;
            }
            left = left.add(spacing).add(BigInteger.ONE);

            // Handle depth differences
            if (lastDepth - depth > 0) {
                BigInteger depthDifference = spacing.add(BigInteger.ONE);
                left = left.add(depthDifference.multiply(BigInteger.valueOf(lastDepth - depth)));
            }
            if (createMode) {
                if (lastDepth < depth) {
                    currentParent.push(newId);
                } else if (lastDepth > depth) {
                    for (int p = 0; p < (lastDepth - depth); p++)
                        currentParent.pop();
                }
            }

            right = left.add(spacing).add(BigInteger.ONE);

            // add child space if needed
            if (total_childs > 0) {
                BigInteger childSpace = spacing.multiply(BigInteger.valueOf(total_childs * 2));
                childSpace = childSpace.add(BigInteger.valueOf((total_childs * 2) - 1));
                right = right.add(childSpace);
                nextLeft = left;
            } else {
                nextLeft = right;
            }

            if (insertBoundaries != null) {
                // insert gap at requested position
                // If we're past the gap, keep adding the insert space to left/right because the added
                // space is never "injected" into the loop, i.e. without adding it the left/right boundaries of
                // nodes after the gap would be too far to the left.
                if (_lft.compareTo(insertBoundaries[0]) > 0) {
                    left = left.add(insertSpace);
                }
                if (_rgt.compareTo(insertBoundaries[0]) > 0) {
                    right = right.add(insertSpace);
                }
            }

            // sanity checks
            if (left.compareTo(right) >= 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "left greater than right");
            }
            if (insertParent != null && right.compareTo((BigInteger) insertParent.getRight()) > 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "wrote past parent node bounds");
            }

            // Update the node
            if (createMode) {
                newId = createKeepIds ? id : seq.getId(destMode.getSequencer());
                if (firstCreatedNodeId == -1)
                    firstCreatedNodeId = newId;

                // Create the main entry
                ps.setLong(1, newId);
                ps.setLong(2, currentParent.peek());
                ps.setLong(3, depth + depthDelta);
                ps.setBoolean(4, destMode != FxTreeMode.Live); //only flag non-live tree's dirty
                if (ref == null) {
                    ps.setNull(5, java.sql.Types.NUMERIC);
                } else {
                    ps.setLong(5, ref);
                }
                if (data == null) {
                    ps.setNull(6, java.sql.Types.VARCHAR);
                } else {
                    ps.setString(6, data);
                }
                //                    System.out.println("=> id:"+newId+" left:"+left+" right:"+right);
                setNodeBounds(ps, 7, left);
                setNodeBounds(ps, 8, right);
                ps.setInt(9, direct_childs);
                ps.setString(10, name);
                ps.setLong(11, System.currentTimeMillis());
                ps.addBatch();
            } else {
                setNodeBounds(ps, 1, left);
                setNodeBounds(ps, 2, right);
                ps.setInt(3, depth + depthDelta);
                ps.setLong(4, id);
                ps.addBatch();
                //                    ps.executeBatch();
                //                    ps.clearBatch();
            }

            // Prepare variables for the next node
            left = nextLeft;
            lastDepth = depth;
            counter++;

            // Execute batch every 10000 items to avoid out of memory
            if (counter % 10000 == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }
        }
        rs.close();
        stmt.close();
        stmt = null;
        ps.executeBatch();

        if (LOG.isDebugEnabled()) {
            final long time = System.currentTimeMillis() - start;

            LOG.debug("Tree reorganization of " + counter + " items completed in " + time + " ms (spaceLen="
                    + spacing + ")");
        }
        return firstCreatedNodeId;
    } catch (FxApplicationException e) {
        throw e instanceof FxTreeException ? (FxTreeException) e : new FxTreeException(e);
    } catch (SQLException e) {
        String next = "";
        if (e.getNextException() != null)
            next = " next:" + e.getNextException().getMessage();
        if (StorageManager.isDuplicateKeyViolation(e))
            throw new FxTreeException(LOG, e, "ex.tree.reorganize.duplicateKey");
        throw new FxTreeException(LOG, e, "ex.tree.reorganize.failed", counter, left, right,
                e.getMessage() + next);
    } catch (Exception e) {
        throw new FxTreeException(e);
    } finally {
        try {
            if (stmt != null)
                stmt.close();
        } catch (Throwable t) {
            /*ignore*/}
        try {
            if (ps != null)
                ps.close();
        } catch (Throwable t) {
            /*ignore*/}
    }
}

From source file:com.pari.nm.utils.db.InventoryDBHelper.java

public static void insertSnmpScalarsInBatch(int deviceId, Map<String, ISnmpScalar> snmpScalars, String type) {
    String insQuery = DBHelperConstants.INSERT_SNMP_MIB_QUERY;
    Connection con = null;//from w ww. j  a v a2  s . com
    PreparedStatement ps = null;
    try {
        con = DBHelper.getConnection();
        ps = con.prepareStatement(insQuery);
        con.setAutoCommit(false);
        for (String oid : snmpScalars.keySet()) {
            ISnmpScalar value = snmpScalars.get(oid);
            String xmlValue = value.toXml();
            ps.setInt(1, deviceId);
            ps.setString(2, oid);
            // There's no table oid for scalar values
            ps.setString(3, null);
            ps.setString(6, null);
            // TODO: Do we need to compress data before storing?
            if (ps instanceof OraclePreparedStatement) {
                ((OraclePreparedStatement) ps).setStringForClob(4, xmlValue);
            } else {
                logger.debug("PS is not OraclePreparedStatement, inserting as regular string");
                ps.setString(4, xmlValue);
            }
            ps.setString(5, type);
            ps.addBatch();
        }
        ps.executeBatch();
        con.commit();
    } catch (SQLException sqlex) {
        logger.error("Error while inserting rows to database", sqlex);
        try {
            if (con != null) {
                con.rollback();
            }
        } catch (SQLException ex) {
            logger.error("Error while calling rollback on db conn", ex);
        }
    } catch (Exception ex) {
        logger.error("Error while inserting snmp data in batch", ex);
    } finally {
        try {
            if (con != null) {
                con.setAutoCommit(true);
            }
        } catch (SQLException sqlex) {
            logger.error("Error while calling setAutoCommit", sqlex);
        }
        try {
            ps.close();
        } catch (SQLException sqlex) {
            logger.error("Error while closing ps", sqlex);
        }
        DBHelper.releaseConnection(con);
    }
}