Example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

List of usage examples for java.sql ResultSet TYPE_SCROLL_INSENSITIVE

Introduction

In this page you can find the example usage for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Prototype

int TYPE_SCROLL_INSENSITIVE

To view the source code for java.sql ResultSet TYPE_SCROLL_INSENSITIVE.

Click Source Link

Document

The constant indicating the type for a ResultSet object that is scrollable but generally not sensitive to changes to the data that underlies the ResultSet.

Usage

From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java

/**
 * @param connection//w  ww .  j av  a2s  .  c  o  m
 * @param tableName
 * @param mySwitch
 * @param currentServerType
 */
public static void setIdentityInserCommandForSQLServer(final Connection connection, final String tableName,
        final String mySwitch, final SERVERTYPE currentServerType) {
    //REQUIRED FOR SQL SERVER IN ORDER TO PROGRAMMATICALLY SET DEFAULT VALUES
    if (currentServerType == SERVERTYPE.MS_SQLServer) {
        try {
            Statement cntStmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            String str = "SET IDENTITY_INSERT " + tableName + " " + mySwitch;
            cntStmt.execute(str);
            str = "SET QUOTED_IDENTIFIER OFF";
            cntStmt.execute(str);
            cntStmt.close();

        } catch (SQLException ex) {
            edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount();
            edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(BasicSQLUtils.class, ex);
            log.error(ex);
            ex.printStackTrace();
            System.exit(0);
        }
    }
}

From source file:edu.ku.brc.specify.conversion.ConvertVerifier.java

/**
 * /*from   w  w  w  .  j av a 2  s. c  o m*/
 */
private void verifyAgents() {

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        ResultSet rs = stmt.executeQuery("SELECT OldID, NewID FROM agent_AgentID");
        while (rs.next()) {
            int oldId = rs.getInt(1);
            int newId = rs.getInt(2);

            newSQL = "SELECT a.AgentType, a.LastName, a.MiddleInitial, a.FirstName, "
                    + "adr.Phone1, adr.Phone2, adr.Address, adr.City, adr.State, adr.PostalCode, adr.Country "
                    + "FROM agent AS a Left Join address AS adr ON a.AgentID = adr.AgentID WHERE a.AgentID = "
                    + newId + " ORDER BY adr.Phone1, adr.Address, adr.City, adr.State, adr.PostalCode";

            oldSQL = "SELECT a.AgentType, IF (a.LastName IS null OR LENGTH(a.LastName) = 0, a.Name, a.LastName), a.MiddleInitial, a.FirstName,"
                    + "aa.Phone1, aa.Phone2 ,adr.Address, adr.City, adr.State ,adr.Postalcode, adr.Country FROM agent AS a "
                    + "Left Join agentaddress AS aa ON a.AgentID = aa.AgentID "
                    + "Left Join address AS adr ON aa.AddressID = adr.AddressID WHERE a.AgentID = " + oldId
                    + " ORDER BY aa.Phone1, adr.Address, adr.City, adr.State ,adr.Postalcode";

            //log.info(newSQL);
            //log.info(oldSQL);
            getResultSets(oldSQL, newSQL);

            while (true) {
                boolean hasOldRec = oldDBRS.next();
                boolean hasNewRec = newDBRS.next();

                if (!hasOldRec || !hasNewRec) {
                    break;
                }

                for (int i = 1; i <= newDBRS.getMetaData().getColumnCount(); i++) {
                    String newStr = newDBRS.getString(i);
                    String oldStr = oldDBRS.getString(i);
                    if (!compareStr(oldStr, newStr)) {
                        String fldName = newDBRS.getMetaData().getColumnName(i);
                        String oldNewIdStr = oldId + " / " + newId;
                        String msg = " Fields " + fldName + " don't match. [" + oldStr + "][" + newStr + "]";
                        tblWriter.logErrors(oldNewIdStr, msg);
                        log.error(oldNewIdStr + msg);
                    }
                }
            }
        }
        rs.close();
        stmt.close();

        oldDBRS.close();
        newDBRS.close();

    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java

public static void removeForeignKeyConstraints(final Connection connection, final String tableName,
        final SERVERTYPE currentServerType) {
    try {/*  ww  w  .j a  v a 2 s.  c  o  m*/
        if (currentServerType == SERVERTYPE.MS_SQLServer) {
            Statement cntStmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            String str = "ALTER TABLE " + tableName + " NOCHECK CONSTRAINT ALL";
            cntStmt.execute(str);
            cntStmt.close();

        } else {
            Statement cntStmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            String str = "SET FOREIGN_KEY_CHECKS = 0";
            cntStmt.execute(str);
            cntStmt.close();

        }
    } catch (SQLException ex) {
        edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount();
        edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(BasicSQLUtils.class, ex);
        log.error("Error encountered trying to turn off foreign key constraints on database");
        log.error(ex);
    }
}

From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java

/**
 * @param connection/*from   ww  w  .  j a v  a 2 s.c o  m*/
 * @param currentServerType
 */
public static void removeForeignKeyConstraints(final Connection connection,
        final SERVERTYPE currentServerType) {
    try {
        if (currentServerType == SERVERTYPE.MS_SQLServer) {
            List<String> myTables = getTableNames(connection);
            for (Iterator<String> i = myTables.iterator(); i.hasNext();) {
                String s = i.next();

                Statement cntStmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                        ResultSet.CONCUR_READ_ONLY);
                String str = "ALTER TABLE " + s + " NOCHECK CONSTRAINT ALL";
                cntStmt.execute(str);
                cntStmt.close();
            }
        } else {
            Statement cntStmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            String str = "SET FOREIGN_KEY_CHECKS = 0";
            cntStmt.execute(str);
            cntStmt.close();
        }
    } catch (SQLException ex) {
        edu.ku.brc.af.core.UsageTracker.incrSQLUsageCount();
        edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(BasicSQLUtils.class, ex);
        log.error(ex);
    }
}

From source file:ProcessRequest.java

public ResultSet executeQuery(String query) {
    Connection conn = null;/*from ww  w . j  a v a2 s .  co m*/
    Statement stmt = null;
    ResultSet rs = null;
    System.out.println("query: " + query);
    try {
        Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
        if (mSQLPort != null && mSQLHost != null && mDbName != null && mDbPass != null && mDbUser != null
                && !mSQLPort.equals("") && !mSQLHost.equals("") && !mDbName.equals("") && !mDbPass.equals("")
                && !mDbUser.equals("")) {
            conn = DriverManager.getConnection("jdbc:sqlserver://" + mSQLHost + ":" + mSQLPort
                    + ";databaseName=" + mDbName + ";user=" + mDbUser + ";password=" + mDbPass + ";");
            //System.out.println("DB connect: jdbc:sqlserver://"+mSQLHost+":"+mSQLPort+";databaseName=TMSV73;user=sa;password=keith;");
        } else {
            throw new IllegalArgumentException(
                    "config file error.  something was missing and a default value wasn't used.  this is a programmer error.  please contact support.");
        }
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(query);
        //NOTE: if we close this immediately, we will lose the ResultSet.  the Statement will close itself anyway
        //      after the garbage collector notices it has no references.
        //stmt.close();
    } catch (Exception e) {
        System.out.println(e.toString());
        e.printStackTrace();
        return null;
    }
    return rs;
}

From source file:org.LexGrid.util.sql.lgTables.SQLTableUtilities.java

/**
 * Runs SQL Statement "SELECT" with supplied attributes and where clause
 * /*from ww w  .  j a v a 2s.  c om*/
 * @param tableName
 * @param attributeNames
 * @param whereClause
 * @return
 * @throws SQLException
 */
public ResultSet extractDataFromDB(String tableName, Map attributeNames, String whereClause, String dbType)
        throws SQLException {

    StringBuffer stmt = new StringBuffer();
    PreparedStatement prepStmt = null;
    ResultSet resultSet = null;

    stmt.append("SELECT ");

    for (int i = 0; i < attributeNames.size(); i++) {
        stmt.append(attributeNames.get("" + (i + 1)) + ",");
    }

    stmt = stmt.deleteCharAt(stmt.length() - 1);

    stmt.append(" FROM ");
    stmt.append(tablePrefix_ + tableName);

    if (whereClause != null && !whereClause.equals("")) {
        stmt.append(" WHERE ");
        stmt.append(whereClause);
    }

    log.debug("************ SELECT QUERY ************");
    log.debug(stmt.toString());
    log.debug("**************************************");

    try {
        String statement = new GenericSQLModifier(dbType, false).modifySQL(stmt.toString());

        prepStmt = sqlConnection_.prepareStatement(statement, ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        resultSet = prepStmt.executeQuery();
    } catch (Exception e) {
        log.error("Exception @ extractDataFromDB: " + e.getMessage());
    } finally {
        // prepStmt.close();
    }

    return resultSet;
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Looks up all the current Permit Types and uses them, instead of the usystable
 *///from  ww  w  .j  a  v a  2 s  .com
@SuppressWarnings("unchecked")
public void createPermitTypePickList() {
    /*
     * try { Statement stmt = oldDBConn.createStatement(); String sqlStr = "select count(Type)
     * from (select distinct Type from permit where Type is not null) as t";
     * 
     * log.info(sqlStr);
     * 
     * boolean useField = false; ResultSet rs = stmt.executeQuery(sqlStr); } catch (SQLException
     * e) { e.printStackTrace(); log.error(e); }
     */

    Session localSession = HibernateUtil.getCurrentSession();
    PickList pl = new PickList();
    pl.initialize();
    Set<PickListItemIFace> items = pl.getItems();

    try {
        pl.setName("Permit");
        pl.setSizeLimit(-1);

        HibernateUtil.beginTransaction();
        localSession.saveOrUpdate(pl);
        HibernateUtil.commitTransaction();

    } catch (Exception ex) {
        log.error("******* " + ex);
        HibernateUtil.rollbackTransaction();
        throw new RuntimeException("Couldn't create PickList for [Permit]");
    }

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        String sqlStr = "select distinct Type from permit where Type is not null";

        log.info(sqlStr);

        ResultSet rs = stmt.executeQuery(sqlStr);

        // check for no records which is OK
        if (!rs.first()) {
            return;
        }

        int count = 0;
        do {
            String typeStr = rs.getString(1);
            if (typeStr != null) {
                log.info("Permit Type[" + typeStr + "]");
                PickListItem pli = new PickListItem();
                pli.initialize();
                pli.setTitle(typeStr);
                pli.setValue(typeStr);
                pli.setTimestampCreated(now);
                items.add(pli);
                pli.setPickList(pl);
                count++;

            }
        } while (rs.next());

        log.info("Processed Permit Types " + count + " records.");

        HibernateUtil.beginTransaction();

        localSession.saveOrUpdate(pl);

        HibernateUtil.commitTransaction();

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }

}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Converts an old USYS table to a PickList.
 * @param usysTableName old table name//from  w  w w  .  java2s  .  co m
 * @param pickListName new pciklist name
 * @return true on success, false on failure
 */
@SuppressWarnings("unchecked")
public boolean convertUSYSToPicklist(final Session localSession, final Collection collection,
        final String usysTableName, final String pickListName) {
    List<FieldMetaData> fieldMetaData = getFieldMetaDataFromSchema(oldDBConn, usysTableName);

    int ifaceInx = -1;
    int dataInx = -1;
    int fieldSetInx = -1;
    int i = 0;
    for (FieldMetaData md : fieldMetaData) {
        if (ifaceInx == -1 && md.getName().equals("InterfaceID")) {
            ifaceInx = i + 1;

        } else if (fieldSetInx == -1 && md.getName().equals("FieldSetSubTypeID")) {
            fieldSetInx = i + 1;

        } else if (dataInx == -1 && md.getType().toLowerCase().indexOf("varchar") > -1) {
            dataInx = i + 1;
        }
        i++;
    }

    if (ifaceInx == -1 || dataInx == -1 || fieldSetInx == -1) {
        throw new RuntimeException("Couldn't decypher USYS table ifaceInx[" + ifaceInx + "] dataInx[" + dataInx
                + "] fieldSetInx[" + fieldSetInx + "]");
    }

    PickList pl = new PickList();
    pl.initialize();

    try {
        pl.setName(pickListName);

        if (pickListName.equals("PrepType")) {
            pl.setReadOnly(true);
            pl.setSizeLimit(-1);
            pl.setIsSystem(true);
            pl.setTableName("preptype");
            pl.setType((byte) 1);

        } else {
            pl.setReadOnly(false);
            pl.setSizeLimit(-1);
        }
        pl.setCollection(collection);
        collection.getPickLists().add(pl);

        Transaction trans = localSession.beginTransaction();
        localSession.saveOrUpdate(pl);
        localSession.saveOrUpdate(collection);
        trans.commit();
        localSession.flush();

    } catch (Exception ex) {
        log.error("******* " + ex);
        HibernateUtil.rollbackTransaction();
        throw new RuntimeException("Couldn't create PickList for [" + usysTableName + "]");
    }

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        String sqlStr = "select * from " + usysTableName + " where InterfaceID is not null";

        log.info(sqlStr);

        boolean useField = false;
        ResultSet rs = stmt.executeQuery(sqlStr);

        // check for no records which is OK
        if (!rs.first()) {
            return true;
        }

        do {
            Object fieldObj = rs.getObject(fieldSetInx);
            if (fieldObj != null) {
                useField = true;
                break;
            }
        } while (rs.next());

        Hashtable<String, String> values = new Hashtable<String, String>();

        //log.info("Using FieldSetSubTypeID " + useField);
        rs.first();
        int count = 0;
        do {
            if (!useField || rs.getObject(fieldSetInx) != null) {
                String val = rs.getString(dataInx);
                String lowerStr = val.toLowerCase();
                if (values.get(lowerStr) == null) {
                    //log.info("[" + val + "]");
                    pl.addItem(val, val);
                    values.put(lowerStr, val);
                    count++;
                } else {
                    log.info("Discarding duplicate picklist value[" + val + "]");
                }
            }
        } while (rs.next());

        log.info("Processed " + usysTableName + "  " + count + " records.");

        Transaction trans = localSession.beginTransaction();

        localSession.saveOrUpdate(pl);

        trans.commit();

        localSession.flush();

        return true;

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);

    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Convert all the biological attributes to Collection Object Attributes. Each old record may
 * end up being multiple records in the new schema. This will first figure out which columns in
 * the old schema were used and only map those columns to the new database.<br>
 * <br>/*from w ww  .j a  va 2 s  .  com*/
 * It also will use the old name if there is not mapping for it. The old name is converted from
 * lower/upper case to be space separated where each part of the name starts with a capital
 * letter.
 * 
 * @param discipline the Discipline
 * @param colToNameMap a mape for old names to new names
 * @param typeMap a map for changing the type of the data (meaning an old value may be a boolean
 *            stored in a float)
 * @return true for success
 */
public boolean convertBiologicalAttrs(final Discipline discipline,
        @SuppressWarnings("unused") final Map<String, String> colToNameMap, final Map<String, Short> typeMap) {
    AttributeIFace.FieldType[] attrTypes = { AttributeIFace.FieldType.IntegerType,
            AttributeIFace.FieldType.FloatType, AttributeIFace.FieldType.DoubleType,
            AttributeIFace.FieldType.BooleanType, AttributeIFace.FieldType.StringType,
            // AttributeIFace.FieldType.MemoType
    };

    Session localSession = HibernateUtil.getCurrentSession();

    deleteAllRecordsFromTable(newDBConn, "collectionobjectattr", BasicSQLUtils.myDestinationServerType);
    deleteAllRecordsFromTable(newDBConn, "attributedef", BasicSQLUtils.myDestinationServerType);

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        // grab the field and their type from the old schema
        List<FieldMetaData> oldFieldMetaData = new ArrayList<FieldMetaData>();
        Map<String, FieldMetaData> oldFieldMetaDataMap = getFieldMetaDataFromSchemaHash(oldDBConn,
                "biologicalobjectattributes");

        // create maps to figure which columns where used
        List<String> columnsInUse = new ArrayList<String>();
        Map<String, AttributeDef> attrDefs = new Hashtable<String, AttributeDef>();

        List<Integer> counts = new ArrayList<Integer>();

        int totalCount = 0;

        for (FieldMetaData md : oldFieldMetaData) {
            // Skip these fields
            if (md.getName().indexOf("ID") == -1 && md.getName().indexOf("Timestamp") == -1
                    && md.getName().indexOf("LastEditedBy") == -1) {
                oldFieldMetaDataMap.put(md.getName(), md); // add to map for later

                // log.info(convertColumnName(md.getName())+" "+ md.getType());
                String sqlStr = "select count(" + md.getName() + ") from biologicalobjectattributes where "
                        + md.getName() + " is not null";
                ResultSet rs = stmt.executeQuery(sqlStr);
                if (rs.first() && rs.getInt(1) > 0) {
                    int rowCount = rs.getInt(1);
                    totalCount += rowCount;
                    counts.add(rowCount);

                    log.info(md.getName() + " has " + rowCount + " rows of values");

                    columnsInUse.add(md.getName());
                    AttributeDef attrDef = new AttributeDef();

                    String newName = convertColumnName(md.getName());
                    attrDef.setFieldName(newName);
                    log.debug("mapping[" + newName + "][" + md.getName() + "]");

                    // newNameToOldNameMap.put(newName, md.getName());

                    short dataType = -1;
                    if (typeMap != null) {
                        Short type = typeMap.get(md.getName());
                        if (type == null) {
                            dataType = type;
                        }
                    }

                    if (dataType == -1) {
                        dataType = getDataType(md.getName(), md.getType()).getType();
                    }

                    attrDef.setDataType(dataType);
                    attrDef.setDiscipline(discipline);
                    attrDef.setTableType(GenericDBConversion.TableType.CollectionObject.getType());
                    attrDef.setTimestampCreated(now);

                    attrDefs.put(md.getName(), attrDef);

                    try {
                        HibernateUtil.beginTransaction();
                        localSession.save(attrDef);
                        HibernateUtil.commitTransaction();

                    } catch (Exception e) {
                        log.error("******* " + e);
                        HibernateUtil.rollbackTransaction();
                        throw new RuntimeException(e);
                    }

                }
                rs.close();
            }
        } // for
        log.info("Total Number of Attrs: " + totalCount);

        // Now that we know which columns are being used we can start the conversion process

        log.info("biologicalobjectattributes columns in use: " + columnsInUse.size());
        if (columnsInUse.size() > 0) {
            int inx = 0;
            StringBuilder str = new StringBuilder("select BiologicalObjectAttributesID");
            for (String name : columnsInUse) {
                str.append(", ");
                str.append(name);
                inx++;
            }

            str.append(" from biologicalobjectattributes order by BiologicalObjectAttributesID");
            log.info("sql: " + str.toString());
            ResultSet rs = stmt.executeQuery(str.toString());

            int[] countVerify = new int[counts.size()];
            for (int i = 0; i < countVerify.length; i++) {
                countVerify[i] = 0;
            }
            boolean useHibernate = false;
            StringBuilder strBufInner = new StringBuilder();
            int recordCount = 0;
            while (rs.next()) {

                if (useHibernate) {
                    Criteria criteria = localSession.createCriteria(CollectionObject.class);
                    criteria.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
                    criteria.add(Restrictions.eq("collectionObjectId", rs.getInt(1)));
                    List<?> list = criteria.list();
                    if (list.size() == 0) {
                        log.error("**** Can't find the CollectionObject " + rs.getInt(1));
                    } else {
                        CollectionObject colObj = (CollectionObject) list.get(0);

                        inx = 2; // skip the first column (the ID)
                        for (String name : columnsInUse) {
                            AttributeDef attrDef = attrDefs.get(name); // the needed
                                                                       // AttributeDef by name
                            FieldMetaData md = oldFieldMetaDataMap.get(name);

                            // Create the new Collection Object Attribute
                            CollectionObjectAttr colObjAttr = new CollectionObjectAttr();
                            colObjAttr.setCollectionObject(colObj);
                            colObjAttr.setDefinition(attrDef);
                            colObjAttr.setTimestampCreated(now);

                            // String oldName = newNameToOldNameMap.get(attrDef.getFieldName());
                            // log.debug("["+attrDef.getFieldName()+"]["+oldName+"]");

                            // log.debug(inx+" "+attrTypes[attrDef.getDataType()]+"
                            // "+md.getName()+" "+md.getType());
                            setData(rs, inx, attrTypes[attrDef.getDataType()], md, colObjAttr);

                            HibernateUtil.beginTransaction();
                            localSession.save(colObjAttr);
                            HibernateUtil.commitTransaction();

                            inx++;
                            if (recordCount % 2000 == 0) {
                                log.info("CollectionObjectAttr Records Processed: " + recordCount);
                            }
                            recordCount++;
                        } // for
                          // log.info("Done - CollectionObjectAttr Records Processed:
                          // "+recordCount);
                    }
                } else {
                    inx = 2; // skip the first column (the ID)
                    for (String name : columnsInUse) {
                        AttributeDef attrDef = attrDefs.get(name); // the needed AttributeDef
                                                                   // by name
                        FieldMetaData md = oldFieldMetaDataMap.get(name);

                        if (rs.getObject(inx) != null) {
                            Integer newRecId = (Integer) getMappedId(rs.getInt(1), "biologicalobjectattributes",
                                    "BiologicalObjectAttributesID");

                            Object data = getData(rs, inx, attrTypes[attrDef.getDataType()], md);
                            boolean isStr = data instanceof String;

                            countVerify[inx - 2]++;

                            strBufInner.setLength(0);
                            strBufInner.append("INSERT INTO collectionobjectattr VALUES (");
                            strBufInner.append("NULL");// Integer.toString(recordCount));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(isStr ? data : null));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(isStr ? null : data));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(now));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(now));
                            strBufInner.append(",");
                            strBufInner.append(newRecId.intValue());
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(attrDef.getAttributeDefId()));
                            strBufInner.append(")");

                            try {
                                Statement updateStatement = newDBConn.createStatement();
                                // updateStatement.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
                                removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType);
                                if (false) {
                                    log.debug(strBufInner.toString());
                                }
                                updateStatement.executeUpdate(strBufInner.toString());
                                updateStatement.clearBatch();
                                updateStatement.close();
                                updateStatement = null;

                            } catch (SQLException e) {
                                log.error(strBufInner.toString());
                                log.error("Count: " + recordCount);
                                e.printStackTrace();
                                log.error(e);
                                throw new RuntimeException(e);
                            }

                            if (recordCount % 2000 == 0) {
                                log.info("CollectionObjectAttr Records Processed: " + recordCount);
                            }
                            recordCount++;
                        }
                        inx++;
                    } // for
                } // if
            } // while
            rs.close();
            stmt.close();

            log.info("Count Verification:");
            for (int i = 0; i < counts.size(); i++) {
                log.info(columnsInUse.get(i) + " [" + counts.get(i) + "][" + countVerify[i] + "] "
                        + (counts.get(i) - countVerify[i]));
            }
        }

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }
    return true;
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Converts all the CollectionObject Physical records and CollectionObjectCatalog Records into
 * the new schema Preparation table./*w w w .  j  a v  a2  s .c o m*/
 * @return true if no errors
 */
public boolean convertLoanRecords(final boolean doingGifts) {
    String newTableName = doingGifts ? "gift" : "loan";
    setIdentityInsertONCommandForSQLServer(newDBConn, newTableName, BasicSQLUtils.myDestinationServerType);

    deleteAllRecordsFromTable(newDBConn, newTableName, BasicSQLUtils.myDestinationServerType); // automatically closes the connection

    if (getNumRecords(oldDBConn, "loan") == 0) {
        return true;
    }

    String[] ignoredFields = { "SpecialConditions", "AddressOfRecordID", "DateReceived", "ReceivedComments",
            "PurposeOfLoan", "OverdueNotiSetDate", "IsFinancialResponsibility", "Version", "CreatedByAgentID",
            "IsFinancialResponsibility", "SrcTaxonomy", "SrcGeography", "CollectionMemberID", "PurposeOfGift",
            "IsFinancialResponsibility", "SpecialConditions", "ReceivedComments", "AddressOfRecordID" };

    Hashtable<String, Boolean> fieldToSkip = new Hashtable<String, Boolean>();
    for (String nm : ignoredFields) {
        fieldToSkip.put(nm, true);
    }

    IdTableMapper loanIdMapper = (IdTableMapper) idMapperMgr.get(newTableName,
            doingGifts ? "GiftID" : "LoanID");
    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        StringBuilder str = new StringBuilder();

        List<String> oldFieldNames = getFieldNamesFromSchema(oldDBConn, "loan");

        StringBuilder sql = new StringBuilder("SELECT ");
        sql.append(buildSelectFieldList(oldFieldNames, "loan"));
        sql.append(" FROM loan WHERE loan.Category = ");
        sql.append(doingGifts ? "1" : "0");
        sql.append(" ORDER BY loan.LoanID");
        log.info(sql);

        List<FieldMetaData> newFieldMetaData = getFieldMetaDataFromSchema(newDBConn, newTableName);
        log.info("Number of Fields in New " + newTableName + " " + newFieldMetaData.size());
        String sqlStr = sql.toString();

        if (doingGifts && loanIdMapper == null) {
            StringBuilder mapSQL = new StringBuilder("SELECT LoanID FROM loan WHERE loan.Category = ");
            mapSQL.append(doingGifts ? "1" : "0");
            mapSQL.append(" ORDER BY loan.LoanID");
            log.info(mapSQL.toString());

            BasicSQLUtils.deleteAllRecordsFromTable(oldDBConn, "gift_GiftID",
                    BasicSQLUtils.myDestinationServerType);
            loanIdMapper = new IdTableMapper(newTableName, "GiftID", mapSQL.toString(), false, false);
            idMapperMgr.addMapper(loanIdMapper);
            loanIdMapper.mapAllIdsWithSQL();
        }

        Map<String, Integer> oldNameIndex = new Hashtable<String, Integer>();
        int inx = 1;
        for (String name : oldFieldNames) {
            oldNameIndex.put(name, inx++);
        }

        Map<String, String> colNewToOldMap = doingGifts
                ? createFieldNameMap(new String[] { "GiftNumber", "LoanNumber", "GiftDate", "LoanDate",
                        "IsCurrent", "Current", "IsClosed", "Closed" })
                : createFieldNameMap(new String[] { "IsCurrent", "Current", "IsClosed", "Closed", });

        log.info(sqlStr);
        ResultSet rs = stmt.executeQuery(sqlStr);

        if (hasFrame) {
            if (rs.last()) {
                setProcess(0, rs.getRow());
                rs.first();

            } else {
                rs.close();
                stmt.close();
                return true;
            }
        } else {
            if (!rs.first()) {
                rs.close();
                stmt.close();
                return true;
            }
        }

        PartialDateConv partialDateConv = new PartialDateConv();

        int lastEditedByInx = oldNameIndex.get("LastEditedBy");

        int count = 0;
        do {
            partialDateConv.nullAll();

            str.setLength(0);
            StringBuffer fieldList = new StringBuffer();
            fieldList.append("( ");
            for (int i = 0; i < newFieldMetaData.size(); i++) {
                if ((i > 0) && (i < newFieldMetaData.size())) {
                    fieldList.append(", ");
                }
                String newFieldName = newFieldMetaData.get(i).getName();
                fieldList.append(newFieldName);
            }

            fieldList.append(")");

            str.append("INSERT INTO " + newTableName + " " + fieldList + " VALUES (");
            for (int i = 0; i < newFieldMetaData.size(); i++) {
                if (i > 0) {
                    str.append(", ");
                }

                String newFieldName = newFieldMetaData.get(i).getName();

                if (i == 0) {
                    Integer oldID = rs.getInt(1);
                    Integer newID = loanIdMapper.get(oldID);
                    if (newID != null) {
                        str.append(getStrValue(newID));
                    } else {
                        log.error(newTableName + " Old/New ID problem [" + oldID + "][" + newID + "]");
                    }

                } else if (newFieldName.equals("Version")) // User/Security changes
                {
                    str.append("0");

                } else if (newFieldName.equals("CreatedByAgentID")) // User/Security changes
                {
                    str.append(getCreatorAgentId(null));

                } else if (newFieldName.equals("ModifiedByAgentID")) // User/Security changes
                {
                    String lastEditedByStr = rs.getString(lastEditedByInx);
                    str.append(getModifiedByAgentId(lastEditedByStr));

                } else if (fieldToSkip.get(newFieldName) != null) {
                    str.append("NULL");

                } else if (newFieldName.equals("DisciplineID")) // User/Security changes
                {
                    str.append(curDisciplineID);

                } else if (newFieldName.equals("DivisionID")) // User/Security changes
                {
                    str.append(curDivisionID);

                } else {
                    Integer index = null;
                    String oldMappedColName = colNewToOldMap.get(newFieldName);
                    if (oldMappedColName != null) {
                        index = oldNameIndex.get(oldMappedColName);

                    } else {
                        index = oldNameIndex.get(newFieldName);
                        oldMappedColName = newFieldName;
                    }

                    Object data;
                    if (index == null) {
                        String msg = "convertLoanRecords - Couldn't find new field name[" + newFieldName
                                + "] in old field name in index Map";
                        log.warn(msg);
                        //                            stmt.close();
                        //                            throw new RuntimeException(msg);
                        data = null;
                    } else {

                        data = rs.getObject(index);
                    }
                    if (data != null) {
                        int idInx = newFieldName.lastIndexOf("ID");
                        if (idMapperMgr != null && idInx > -1) {
                            IdMapperIFace idMapper = idMapperMgr.get("loan", oldMappedColName);
                            if (idMapper != null) {
                                data = idMapper.get(rs.getInt(index));
                            } else {
                                log.error("No Map for [" + "loan" + "][" + oldMappedColName + "]");
                            }
                        }
                    }

                    // hack for ??bug?? found in Sp5 that inserted null values in
                    // timestampmodified field of determination table?
                    BasicSQLUtils.fixTimestamps(newFieldName, newFieldMetaData.get(i).getType(), data, str);
                }
            }
            str.append(")");

            if (hasFrame) {
                if (count % 500 == 0) {
                    setProcess(count);
                }

            } else {
                if (count % 2000 == 0) {
                    log.info("Loan/Gifts Records: " + count);
                }
            }

            try {
                //log.debug(str.toString());
                Statement updateStatement = newDBConn.createStatement();
                // updateStatement.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
                updateStatement.executeUpdate(str.toString());
                updateStatement.clearBatch();
                updateStatement.close();
                updateStatement = null;

            } catch (SQLException e) {
                log.error("Count: " + count);
                log.error("Exception on insert: " + str.toString());
                e.printStackTrace();
                log.error(e);
                rs.close();
                stmt.close();
                throw new RuntimeException(e);
            }

            count++;
            // if (count > 10) break;
        } while (rs.next());

        if (hasFrame) {
            setProcess(count);
        } else {
            log.info("Processed Loan/Gift " + count + " records.");
        }
        rs.close();

        stmt.close();

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }

    setIdentityInsertOFFCommandForSQLServer(newDBConn, "determination", BasicSQLUtils.myDestinationServerType);

    return true;
}