Example usage for java.sql Statement setFetchSize

List of usage examples for java.sql Statement setFetchSize

Introduction

In this page you can find the example usage for java.sql Statement setFetchSize.

Prototype

void setFetchSize(int rows) throws SQLException;

Source Link

Document

Gives the JDBC driver a hint as to the number of rows that should be fetched from the database when more rows are needed for ResultSet objects generated by this Statement.

Usage

From source file:edu.ku.brc.specify.conversion.SpecifyDBConverter.java

/**
 * @param oldDBConn// w w w .j av a2s .  c o m
 * @param newDBConn
 */
public void doSetDisciplineIntoCEs(final Connection oldDBConn, final Connection newDBConn) {
    //ProgressFrame frame = conversion.getFrame();

    //IdMapperMgr.getInstance().setDBs(oldDBConn, newDBConn);
    IdMapperIFace ceMapper = IdMapperMgr.getInstance().addTableMapper("collectingevent", "CollectingEventID",
            false);

    HashMap<Integer, Integer> catSeriesToDisciplineHash = new HashMap<Integer, Integer>();
    for (CollectionInfo ci : CollectionInfo.getCollectionInfoList()) {
        catSeriesToDisciplineHash.put(ci.getCatSeriesId(), ci.getDisciplineId());
    }

    //catSeriesToDisciplineHash.put(0, 3);
    //catSeriesToDisciplineHash.put(-568842536, 7);

    String sql = "SELECT csd.CatalogSeriesID, ce.CollectingEventID FROM catalogseriesdefinition AS csd "
            + "Inner Join collectingevent AS ce ON csd.ObjectTypeID = ce.BiologicalObjectTypeCollectedID";

    PreparedStatement pStmt = null;
    Statement stmt = null;
    try {
        pStmt = newDBConn
                .prepareStatement("UPDATE collectingevent SET DisciplineID=? WHERE CollectingEventID=?");
        int totalCnt = BasicSQLUtils.getNumRecords(oldDBConn, "collectingevent");

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        ResultSet rs = stmt.executeQuery(sql);
        if (frame != null) {
            frame.setProcess(0, totalCnt);
            frame.setDesc("Setting Discipline Ids in CollectingEvents");
        }

        int count = 0;
        while (rs.next()) {
            int catSerId = rs.getInt(1);
            int id = rs.getInt(2);

            Integer dispId = catSeriesToDisciplineHash.get(catSerId);
            if (dispId != null) {
                Integer newId = ceMapper.get(id);
                if (newId != null) {
                    pStmt.setInt(1, dispId);
                    pStmt.setInt(2, newId);
                    pStmt.executeUpdate();

                } else {
                    System.err.println(String.format("Unable to map oldId %d", id));
                }
            } else {
                System.err.println(String.format("Unable to map Cat Series %d to a discipline", catSerId));
            }

            count++;
            if (count % 1000 == 0) {
                if (frame != null) {
                    frame.setProcess(count);
                } else {
                    log.info(String.format("CE Records: %d / %d", count, totalCnt));
                }
            }
        }
        rs.close();

    } catch (Exception ex) {
        ex.printStackTrace();

    } finally {
        try {
            if (pStmt != null)
                pStmt.close();
            if (stmt != null)
                stmt.close();

        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
}

From source file:edu.ku.brc.specify.conversion.SpecifyDBConverter.java

/**
 * @param oldDBConn/*from   ww w . j  a v a 2 s .com*/
 * @param newDBConn
 */
public void doSetDisciplineIntoLocalities(final Connection oldDBConn, final Connection newDBConn) {
    TableWriter tblWriter = convLogger.getWriter("LocalityDisciplines.html",
            "Setting Discipline into Localities");
    setTblWriter(tblWriter);
    IdHashMapper.setTblWriter(tblWriter);

    IdMapperMgr.getInstance().setDBs(oldDBConn, newDBConn);
    IdMapperIFace ceMapper = IdMapperMgr.getInstance().addTableMapper("locality", "LocalityID", false);

    HashMap<Integer, Integer> catSeriesToDisciplineHash = new HashMap<Integer, Integer>();
    for (CollectionInfo ci : CollectionInfo.getCollectionInfoList()) {
        catSeriesToDisciplineHash.put(ci.getCatSeriesId(), ci.getDisciplineId());
    }

    catSeriesToDisciplineHash.put(0, 3);
    catSeriesToDisciplineHash.put(-568842536, 7);

    String sql = " SELECT l.LocalityName, l.LocalityID FROM locality AS l Left Join collectingevent AS ce ON l.LocalityID = ce.LocalityID WHERE ce.CollectingEventID IS NULL";
    Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sql);
    tblWriter.println(String.format("Unused Localities in the Sp5 database: %d<br>", rows.size()));
    if (rows.size() > 0) {
        tblWriter.startTable();
        tblWriter.logHdr("Id", "Locality Name");
        for (Object[] row : rows) {
            tblWriter.logObjRow(row);
        }
        tblWriter.endTable();
    }

    HashSet<Integer> sharedLocDifObjTypeSet = new HashSet<Integer>();
    int numSharedLocaltiesDifObjTypes = 0;

    // Find the Localities that are being shared.
    sql = " SELECT * FROM (SELECT l.LocalityID, COUNT(l.LocalityID) cnt, l.LocalityName FROM locality AS l Left Join collectingevent AS ce ON l.LocalityID = ce.LocalityID WHERE ce.CollectingEventID IS NOT NULL GROUP BY l.LocalityID) T1 WHERE cnt > 1";
    rows = BasicSQLUtils.query(oldDBConn, sql);
    tblWriter.println(String.format("Localities being Shared: %d<br>", rows.size()));
    tblWriter.println("Shared Localities with different ObjectTypes<br>");
    if (rows.size() > 0) {
        tblWriter.startTable();
        tblWriter.logHdr("Id", "Count", "Locality Name");
        for (Object[] row : rows) {
            Integer localityId = (Integer) row[0];
            sql = String.format(
                    "SELECT COUNT(*) FROM (SELECT ce.BiologicalObjectTypeCollectedID, COUNT(ce.BiologicalObjectTypeCollectedID) "
                            + "FROM locality AS l Left Join collectingevent AS ce ON l.LocalityID = ce.LocalityID "
                            + "WHERE l.LocalityID = %d GROUP BY ce.BiologicalObjectTypeCollectedID) T1",
                    localityId);
            int count = BasicSQLUtils.getCountAsInt(oldDBConn, sql);
            if (count > 1) {
                tblWriter.logObjRow(row);
                numSharedLocaltiesDifObjTypes++;
                sharedLocDifObjTypeSet.add(localityId);
            }
        }
        tblWriter.endTable();
    }
    tblWriter.println(String.format("Number of Shared Localities with different ObjectTypes: %d<br>",
            numSharedLocaltiesDifObjTypes));

    sql = "SELECT csd.CatalogSeriesID, l.LocalityID FROM locality AS l Left Join collectingevent AS ce ON l.LocalityID = ce.LocalityID "
            + "Inner Join catalogseriesdefinition AS csd ON ce.BiologicalObjectTypeCollectedID = csd.ObjectTypeID WHERE ce.CollectingEventID IS NOT NULL "
            + "GROUP BY l.LocalityID";

    PreparedStatement pStmt = null;
    Statement stmt = null;
    try {
        pStmt = newDBConn.prepareStatement("UPDATE locality SET DisciplineID=? WHERE LocalityID=?");
        int totalCnt = BasicSQLUtils.getNumRecords(oldDBConn, "locality");

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        ResultSet rs = stmt.executeQuery(sql);
        if (frame != null) {
            frame.setProcess(0, totalCnt);
            frame.setDesc("Setting Discipline Ids in Locality");
        }

        int count = 0;
        while (rs.next()) {
            int catSerId = rs.getInt(1);
            int id = rs.getInt(2);

            if (sharedLocDifObjTypeSet.contains(id)) {
                continue;
            }

            Integer dispId = catSeriesToDisciplineHash.get(catSerId);
            if (dispId != null) {
                Integer newId = ceMapper.get(id);
                if (newId != null) {
                    pStmt.setInt(1, dispId);
                    pStmt.setInt(2, newId);
                    pStmt.executeUpdate();

                } else {
                    System.err.println(String.format("Unable to map oldId %d", id));
                }
            } else {
                System.err.println(String.format("Unable to map Cat Series %d to a discipline", catSerId));
            }

            count++;
            if (count % 1000 == 0) {
                if (frame != null) {
                    frame.setProcess(count);
                } else {
                    log.info(String.format("CE Records: %d / %d", count, totalCnt));
                }
            }
        }
        rs.close();

    } catch (Exception ex) {
        ex.printStackTrace();

    } finally {
        try {
            if (pStmt != null)
                pStmt.close();
            if (stmt != null)
                stmt.close();

        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Walks the old GTP records and creates a GTP tree def and items based on the ranks and rank
 * names found in the old records//from  w w w  .j  a v  a2 s .c o m
 * 
 * @return the new tree def
 * @throws SQLException on any error while contacting the old database
 */
public GeologicTimePeriodTreeDef convertGTPDefAndItems(final boolean isPaleo) throws SQLException {
    deleteAllRecordsFromTable("geologictimeperiodtreedef", BasicSQLUtils.myDestinationServerType);
    deleteAllRecordsFromTable("geologictimeperiodtreedefitem", BasicSQLUtils.myDestinationServerType);
    log.info("Inferring geologic time period definition from old records");
    int count = 0;

    // get all of the old records
    String sql = "SELECT RankCode, RankName from geologictimeperiod";
    Statement statement = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
            ResultSet.CONCUR_READ_ONLY);
    statement.setFetchSize(Integer.MIN_VALUE);

    ResultSet oldGtpRecords = statement.executeQuery(sql);

    Session localSession = HibernateUtil.getCurrentSession();
    HibernateUtil.beginTransaction();

    GeologicTimePeriodTreeDef def = new GeologicTimePeriodTreeDef();
    def.initialize();
    def.setName("Inferred Geologic Time Period Definition");
    def.setRemarks("");
    def.setFullNameDirection(TreeDefIface.REVERSE);
    localSession.save(def);

    Vector<GeologicTimePeriodTreeDefItem> newItems = new Vector<GeologicTimePeriodTreeDefItem>();

    GeologicTimePeriodTreeDefItem rootItem = addGtpDefItem(0, "Time Root", def);
    rootItem.setIsEnforced(true);
    rootItem.setIsInFullName(false);
    rootItem.setFullNameSeparator(", ");
    localSession.save(rootItem);
    newItems.add(rootItem);
    ++count;

    if (isPaleo) {
        while (oldGtpRecords.next()) {
            // we're modifying the rank since the originals were 1,2,3,...
            // to make them 100, 200, 300, ... (more like the other trees)
            Integer rankCode = oldGtpRecords.getInt(1) * 100;
            String rankName = oldGtpRecords.getString(2);
            GeologicTimePeriodTreeDefItem newItem = addGtpDefItem(rankCode, rankName, def);
            if (newItem != null) {
                newItem.setFullNameSeparator(", ");
                localSession.save(newItem);
                newItems.add(newItem);
            }
            if (++count % 1000 == 0) {
                log.info(count + " geologic time period records processed");
            }
        }
    }

    // sort the vector to put them in parent/child order
    Comparator<GeologicTimePeriodTreeDefItem> itemComparator = new Comparator<GeologicTimePeriodTreeDefItem>() {
        public int compare(GeologicTimePeriodTreeDefItem o1, GeologicTimePeriodTreeDefItem o2) {
            return o1.getRankId().compareTo(o2.getRankId());
        }
    };
    Collections.sort(newItems, itemComparator);

    // set the parent/child pointers
    for (int i = 0; i < newItems.size() - 1; ++i) {
        GeologicTimePeriodTreeDefItem parent = newItems.get(i);
        GeologicTimePeriodTreeDefItem child = newItems.get(i + 1);
        parent.setChild(child);
        child.setParent(parent);
    }

    HibernateUtil.commitTransaction();

    log.info("Finished inferring GTP tree definition and items");
    return def;
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Looks up all the current Permit Types and uses them, instead of the usystable
 *///from  w w  w  .  j  a v a2  s. com
@SuppressWarnings("unchecked")
public void createPermitTypePickList() {
    /*
     * try { Statement stmt = oldDBConn.createStatement(); String sqlStr = "select count(Type)
     * from (select distinct Type from permit where Type is not null) as t";
     * 
     * log.info(sqlStr);
     * 
     * boolean useField = false; ResultSet rs = stmt.executeQuery(sqlStr); } catch (SQLException
     * e) { e.printStackTrace(); log.error(e); }
     */

    Session localSession = HibernateUtil.getCurrentSession();
    PickList pl = new PickList();
    pl.initialize();
    Set<PickListItemIFace> items = pl.getItems();

    try {
        pl.setName("Permit");
        pl.setSizeLimit(-1);

        HibernateUtil.beginTransaction();
        localSession.saveOrUpdate(pl);
        HibernateUtil.commitTransaction();

    } catch (Exception ex) {
        log.error("******* " + ex);
        HibernateUtil.rollbackTransaction();
        throw new RuntimeException("Couldn't create PickList for [Permit]");
    }

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        String sqlStr = "select distinct Type from permit where Type is not null";

        log.info(sqlStr);

        ResultSet rs = stmt.executeQuery(sqlStr);

        // check for no records which is OK
        if (!rs.first()) {
            return;
        }

        int count = 0;
        do {
            String typeStr = rs.getString(1);
            if (typeStr != null) {
                log.info("Permit Type[" + typeStr + "]");
                PickListItem pli = new PickListItem();
                pli.initialize();
                pli.setTitle(typeStr);
                pli.setValue(typeStr);
                pli.setTimestampCreated(now);
                items.add(pli);
                pli.setPickList(pl);
                count++;

            }
        } while (rs.next());

        log.info("Processed Permit Types " + count + " records.");

        HibernateUtil.beginTransaction();

        localSession.saveOrUpdate(pl);

        HibernateUtil.commitTransaction();

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }

}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Converts an old USYS table to a PickList.
 * @param usysTableName old table name//  w  w  w.j a v a2  s .  c  o m
 * @param pickListName new pciklist name
 * @return true on success, false on failure
 */
@SuppressWarnings("unchecked")
public boolean convertUSYSToPicklist(final Session localSession, final Collection collection,
        final String usysTableName, final String pickListName) {
    List<FieldMetaData> fieldMetaData = getFieldMetaDataFromSchema(oldDBConn, usysTableName);

    int ifaceInx = -1;
    int dataInx = -1;
    int fieldSetInx = -1;
    int i = 0;
    for (FieldMetaData md : fieldMetaData) {
        if (ifaceInx == -1 && md.getName().equals("InterfaceID")) {
            ifaceInx = i + 1;

        } else if (fieldSetInx == -1 && md.getName().equals("FieldSetSubTypeID")) {
            fieldSetInx = i + 1;

        } else if (dataInx == -1 && md.getType().toLowerCase().indexOf("varchar") > -1) {
            dataInx = i + 1;
        }
        i++;
    }

    if (ifaceInx == -1 || dataInx == -1 || fieldSetInx == -1) {
        throw new RuntimeException("Couldn't decypher USYS table ifaceInx[" + ifaceInx + "] dataInx[" + dataInx
                + "] fieldSetInx[" + fieldSetInx + "]");
    }

    PickList pl = new PickList();
    pl.initialize();

    try {
        pl.setName(pickListName);

        if (pickListName.equals("PrepType")) {
            pl.setReadOnly(true);
            pl.setSizeLimit(-1);
            pl.setIsSystem(true);
            pl.setTableName("preptype");
            pl.setType((byte) 1);

        } else {
            pl.setReadOnly(false);
            pl.setSizeLimit(-1);
        }
        pl.setCollection(collection);
        collection.getPickLists().add(pl);

        Transaction trans = localSession.beginTransaction();
        localSession.saveOrUpdate(pl);
        localSession.saveOrUpdate(collection);
        trans.commit();
        localSession.flush();

    } catch (Exception ex) {
        log.error("******* " + ex);
        HibernateUtil.rollbackTransaction();
        throw new RuntimeException("Couldn't create PickList for [" + usysTableName + "]");
    }

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        String sqlStr = "select * from " + usysTableName + " where InterfaceID is not null";

        log.info(sqlStr);

        boolean useField = false;
        ResultSet rs = stmt.executeQuery(sqlStr);

        // check for no records which is OK
        if (!rs.first()) {
            return true;
        }

        do {
            Object fieldObj = rs.getObject(fieldSetInx);
            if (fieldObj != null) {
                useField = true;
                break;
            }
        } while (rs.next());

        Hashtable<String, String> values = new Hashtable<String, String>();

        //log.info("Using FieldSetSubTypeID " + useField);
        rs.first();
        int count = 0;
        do {
            if (!useField || rs.getObject(fieldSetInx) != null) {
                String val = rs.getString(dataInx);
                String lowerStr = val.toLowerCase();
                if (values.get(lowerStr) == null) {
                    //log.info("[" + val + "]");
                    pl.addItem(val, val);
                    values.put(lowerStr, val);
                    count++;
                } else {
                    log.info("Discarding duplicate picklist value[" + val + "]");
                }
            }
        } while (rs.next());

        log.info("Processed " + usysTableName + "  " + count + " records.");

        Transaction trans = localSession.beginTransaction();

        localSession.saveOrUpdate(pl);

        trans.commit();

        localSession.flush();

        return true;

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);

    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param treeDef//ww  w.j a v a 2s .c  om
 * @throws SQLException
 */
public void convertGeography(final GeographyTreeDef treeDef, final String dispName, final boolean firstTime)
        throws SQLException {
    TableWriter tblWriter = convLogger.getWriter("Geography" + (dispName != null ? dispName : "") + ".html",
            "Geography");
    setTblWriter(tblWriter);

    IdHashMapper.setTblWriter(tblWriter);

    if (firstTime) {
        // empty out any pre-existing records
        deleteAllRecordsFromTable(newDBConn, "geography", BasicSQLUtils.myDestinationServerType);
    }

    IdTableMapper geoIdMapper = (IdTableMapper) IdMapperMgr.getInstance().get("geography", "GeographyID");
    if (geoIdMapper == null) {
        // create an ID mapper for the geography table (mainly for use in converting localities)
        geoIdMapper = IdMapperMgr.getInstance().addTableMapper("geography", "GeographyID");
    } else {
        geoIdMapper.clearRecords();
    }

    Hashtable<Integer, Geography> oldIdToGeoMap = new Hashtable<Integer, Geography>();

    // get a Hibernate session for saving the new records
    Session localSession = HibernateUtil.getCurrentSession();
    HibernateUtil.beginTransaction();

    // get all of the old records
    String sql = "SELECT GeographyID,ContinentOrOcean,Country,State,County,LastEditedBy FROM geography";
    Statement statement = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
            ResultSet.CONCUR_READ_ONLY);
    statement.setFetchSize(Integer.MIN_VALUE);

    ResultSet oldGeoRecords = statement.executeQuery(sql);

    fixGeography("ContinentOrOcean");
    fixGeography("Country");
    fixGeography("State");
    fixGeography("County");

    if (hasFrame) {
        if (oldGeoRecords.last()) {
            setProcess(0, oldGeoRecords.getRow());
            oldGeoRecords.first();
        }
    } else {
        oldGeoRecords.first();
    }

    // setup the root Geography record (planet Earth)
    Geography planetEarth = new Geography();
    planetEarth.initialize();
    planetEarth.setName("Earth");
    planetEarth.setCommonName("Earth");
    planetEarth.setRankId(0);
    planetEarth.setDefinition(treeDef);
    for (GeographyTreeDefItem defItem : treeDef.getTreeDefItems()) {
        if (defItem.getRankId() == 0) {
            planetEarth.setDefinitionItem(defItem);
            break;
        }
    }
    GeographyTreeDefItem defItem = treeDef.getDefItemByRank(0);
    planetEarth.setDefinitionItem(defItem);

    int counter = 0;
    // for each old record, convert the record
    do {
        if (counter % 500 == 0) {
            if (hasFrame) {
                setProcess(counter);

            } else {
                log.info("Converted " + counter + " geography records");
            }
        }

        // grab the important data fields from the old record
        int oldId = oldGeoRecords.getInt(1);
        String cont = fixSize(tblWriter, oldId, "continent", oldGeoRecords.getString(2), 64);
        String country = fixSize(tblWriter, oldId, "country", oldGeoRecords.getString(3), 64);
        String state = fixSize(tblWriter, oldId, "state", oldGeoRecords.getString(4), 64);
        String county = fixSize(tblWriter, oldId, "county", oldGeoRecords.getString(5), 64);
        String lastEditedBy = oldGeoRecords.getString(6);

        Integer agtId = getCreatorAgentId(lastEditedBy);
        Agent createdByAgent = getCreatedByAgent(localSession, agtId);
        Agent modifiedByAgent = getAgentObj(localSession, getCurAgentModifierID());

        /*cont    = isNotEmpty(county)  && cont.equals("null")    ? null : cont;
        country = isNotEmpty(country) && country.equals("null") ? null : country;
        state   = isNotEmpty(state)   && state.equals("null")   ? null : state;
        county  = isNotEmpty(county)  && county.equals("null")  ? null : county;
        */

        if (isEmpty(cont) && isEmpty(country) && isEmpty(state) && isEmpty(county)) {
            //String msg = "For Record Id["+oldId+"] Continent, Country, State and County are all null.";
            //log.error(msg);
            //tblWriter.logError(msg);

            cont = "Undefined";
            country = "Undefined";
            state = "Undefined";
            county = "Undefined";

        } else if (isEmpty(cont) && isEmpty(country) && isEmpty(state)) {
            //String msg = "For Record Id["+oldId+"] Continent, Country and State are all null.";
            //log.error(msg);
            //tblWriter.logError(msg);

            cont = "Undefined";
            country = "Undefined";
            state = "Undefined";

        } else if (isEmpty(cont) && isEmpty(country)) {
            //String msg = "For Record Id["+oldId+"] Country is null.";
            //log.error(msg);
            //tblWriter.logError(msg);

            cont = "Undefined";
            country = "Undefined";

        } else if (isEmpty(cont)) {
            //String msg = "For Record Id["+oldId+"] Country is null.";
            //log.error(msg);
            //tblWriter.logError(msg);

            cont = "Undefined";
        }

        // create a new Geography object from the old data
        List<Geography> newGeos = convertOldGeoRecord(cont, country, state, county, createdByAgent,
                modifiedByAgent, planetEarth);
        if (newGeos.size() > 0) {
            Geography lowestLevel = newGeos.get(newGeos.size() - 1);

            oldIdToGeoMap.put(oldId, lowestLevel);
        }

        counter++;

    } while (oldGeoRecords.next());

    if (hasFrame) {
        setProcess(counter);

    } else {
        log.info("Converted " + counter + " geography records");
    }

    TreeHelper.fixFullnameForNodeAndDescendants(planetEarth);
    planetEarth.setNodeNumber(1);
    fixNodeNumbersFromRoot(planetEarth);

    localSession.save(planetEarth);

    HibernateUtil.commitTransaction();
    log.info("Converted " + counter + " geography records");

    if (shouldCreateMapTables) {
        // add all of the ID mappings
        for (Integer oldId : oldIdToGeoMap.keySet()) {
            Geography geo = oldIdToGeoMap.get(oldId);
            geoIdMapper.put(oldId, geo.getId());
        }
    }

    if (firstTime) {
        // set up Geography foreign key mapping for locality
        idMapperMgr.mapForeignKey("Locality", "GeographyID", "Geography", "GeographyID");
    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param tblWriter/*from   w w  w .j  a va 2 s .  co  m*/
 * @param treeDef
 * @param isPaleo
 * @throws SQLException
 */
public void convertGTP(final TableWriter tblWriter, final GeologicTimePeriodTreeDef treeDef,
        final boolean isPaleo) throws SQLException {
    deleteAllRecordsFromTable("geologictimeperiod", BasicSQLUtils.myDestinationServerType);

    log.info("Converting old geologic time period records");
    int count = 0;

    // create an ID mapper for the geologictimeperiod table
    IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod",
            "GeologicTimePeriodID");
    Hashtable<Integer, GeologicTimePeriod> oldIdToGTPMap = new Hashtable<Integer, GeologicTimePeriod>();

    //        String    sql = "SELECT g.GeologicTimePeriodID,g.RankCode,g.Name,g.Standard,g.Remarks,g.TimestampModified,g.TimestampCreated,p1.Age as Upper," +
    //                     "p1.AgeUncertainty as UpperUncertainty,p2.Age as Lower,p2.AgeUncertainty as LowerUncertainty FROM geologictimeperiod g, " +
    //                     "geologictimeboundary p1, geologictimeboundary p2 WHERE g.UpperBoundaryID=p1.GeologicTimeBoundaryID AND " +
    //                     "g.LowerBoundaryID=p2.GeologicTimeBoundaryID ORDER BY Lower DESC, RankCode";
    String sql = "SELECT g.GeologicTimePeriodID,g.RankCode,g.Name,g.Standard,g.Remarks,g.TimestampModified,g.TimestampCreated,gb1.Age as Upper,"
            + "gb1.AgeUncertainty as UpperUncertainty,gb2.Age as Lower,gb2.AgeUncertainty as LowerUncertainty FROM geologictimeperiod g "
            + "LEFT OUTER JOIN geologictimeboundary gb1 ON g.UpperBoundaryID = gb1.GeologicTimeBoundaryID "
            + "LEFT OUTER JOIN geologictimeboundary gb2 ON g.LowerBoundaryID = gb2.GeologicTimeBoundaryID "
            + "ORDER BY Lower DESC, RankCode";
    Statement statement = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
            ResultSet.CONCUR_READ_ONLY);
    statement.setFetchSize(Integer.MIN_VALUE);
    ResultSet rs = statement.executeQuery(sql);

    Session localSession = HibernateUtil.getCurrentSession();
    HibernateUtil.beginTransaction();

    Vector<GeologicTimePeriod> newItems = new Vector<GeologicTimePeriod>();

    GeologicTimePeriod allTime = new GeologicTimePeriod();
    allTime.initialize();
    allTime.setDefinition(treeDef);
    GeologicTimePeriodTreeDefItem rootDefItem = treeDef.getDefItemByRank(0);
    allTime.setDefinitionItem(rootDefItem);
    allTime.setRankId(0);
    allTime.setName("Time");
    allTime.setFullName("Time");
    allTime.setStartPeriod(100000f);
    allTime.setEndPeriod(0f);
    allTime.setEndUncertainty(0f);
    allTime.setTimestampCreated(now);
    ++count;
    newItems.add(allTime);

    ArrayList<GeologicTimePeriod> needsPlaceHolderList = new ArrayList<GeologicTimePeriod>();

    boolean needsTbl = true;

    if (isPaleo) {
        while (rs.next()) {
            Integer id = rs.getInt(1);
            Integer rank = rs.getInt(2) * 100;
            String name = rs.getString(3);
            String std = rs.getString(4);
            String rem = rs.getString(5);
            Date modTDate = rs.getDate(6);
            Date creTDate = rs.getDate(7);
            Timestamp modT = (modTDate != null) ? new Timestamp(modTDate.getTime()) : null;
            Timestamp creT = (creTDate != null) ? new Timestamp(creTDate.getTime()) : null;
            Float upper = (Float) rs.getObject(8);
            Float uError = (Float) rs.getObject(9);
            Float lower = (Float) rs.getObject(10);
            Float lError = (Float) rs.getObject(11);

            if (isEmpty(name)) {
                if (needsTbl) {
                    tblWriter.startTable();
                    tblWriter.logHdr("ID", "Rank Name", "Name", "Reason");
                    needsTbl = false;
                }
                tblWriter.log(id.toString(), rank.toString(), name, "Name is null, Name set to 'XXXX'");
                log.error("The Name is empty (or null) for GTP ID[" + id + "]  Rank[" + rank + "]");
                name = "XXXX";
            }

            if (modT == null && creT == null) {
                creT = now;
                modT = now;

            } else if (modT == null && creT != null) {
                modT = new Timestamp(creT.getTime());

            } else if (modT != null && creT == null) {
                creT = new Timestamp(modT.getTime());
            }
            // else (neither are null, so do nothing)

            GeologicTimePeriodTreeDefItem defItem = rank != null ? treeDef.getDefItemByRank(rank) : null;

            GeologicTimePeriod gtp = new GeologicTimePeriod();
            gtp.initialize();
            gtp.setName(name);
            gtp.setFullName(name);
            gtp.setDefinitionItem(defItem);
            gtp.setRankId(rank);
            gtp.setDefinition(treeDef);
            gtp.setStartPeriod(lower);
            gtp.setStartUncertainty(lError);
            gtp.setEndPeriod(upper);
            gtp.setEndUncertainty(uError);
            gtp.setStandard(std);
            gtp.setRemarks(rem);
            gtp.setTimestampCreated(creT);
            gtp.setTimestampModified(modT);

            if (lower == null || upper == null || rank == null) {
                needsPlaceHolderList.add(gtp);
                log.debug("PlaceHold Old ID: " + id);
            } else {
                newItems.add(gtp);
            }

            oldIdToGTPMap.put(id, gtp);

            if (++count % 500 == 0) {
                log.info(count + " geologic time period records converted");
            }
        }

        // now we need to fix the parent/pointers
        for (int i = 0; i < newItems.size(); ++i) {
            GeologicTimePeriod gtp = newItems.get(i);
            for (int j = 0; j < newItems.size(); ++j) {
                GeologicTimePeriod child = newItems.get(j);
                if (isParentChildPair(gtp, child)) {
                    gtp.addChild(child);
                }
            }
        }

        if (needsPlaceHolderList.size() > 0) {
            int rank = 100;
            for (GeologicTimePeriodTreeDefItem di : treeDef.getTreeDefItems()) {
                System.out.println(di.getName() + " -> " + di.getRankId());
            }
            GeologicTimePeriodTreeDefItem defItem = treeDef.getDefItemByRank(rank);

            GeologicTimePeriod gtp = new GeologicTimePeriod();
            gtp.initialize();
            gtp.setName("Placeholder");
            gtp.setFullName("Placeholder");
            gtp.setDefinitionItem(defItem);
            gtp.setRankId(rank);
            gtp.setDefinition(treeDef);
            gtp.setStartPeriod(0.0f);
            gtp.setStartUncertainty(0.0f);
            gtp.setEndPeriod(0.0f);
            gtp.setEndUncertainty(0.0f);
            gtp.setStandard(null);
            gtp.setRemarks(null);
            gtp.setTimestampCreated(now);
            gtp.setTimestampModified(now);
            allTime.addChild(gtp);

            rank = 200;
            defItem = treeDef.getDefItemByRank(rank);

            for (GeologicTimePeriod gtpPH : needsPlaceHolderList) {
                gtpPH.setDefinition(treeDef);
                gtpPH.setDefinitionItem(defItem);
                gtpPH.setRankId(rank);
                gtpPH.setStartPeriod(0.0f);
                gtpPH.setStartUncertainty(0.0f);
                gtpPH.setEndPeriod(0.0f);
                gtpPH.setEndUncertainty(0.0f);
                gtp.addChild(gtpPH);
            }
        }

        TreeHelper.fixFullnameForNodeAndDescendants(allTime);
    }

    // fix node number, child node number stuff
    allTime.setNodeNumber(1);
    fixNodeNumbersFromRoot(allTime);
    localSession.save(allTime);

    HibernateUtil.commitTransaction();

    if (shouldCreateMapTables) {
        // add all of the ID mappings
        for (Integer oldId : oldIdToGTPMap.keySet()) {
            if (oldId != null) {
                GeologicTimePeriod gtp = oldIdToGTPMap.get(oldId);
                if (gtp != null) {
                    if (gtp.getId() != null) {
                        gtpIdMapper.put(oldId, gtp.getId());
                    } else {
                        log.debug("GTP id is null: " + gtp.getName());
                    }
                } else {
                    log.debug("GTP missing in hash for Old ID: " + oldId);
                }
            } else {
                log.debug("Old ID in Hash is null: " + oldId);
            }
        }
    }

    // set up geologictimeperiod foreign key mapping for stratigraphy
    IdMapperMgr.getInstance().mapForeignKey("Stratigraphy", "GeologicTimePeriodID", "GeologicTimePeriod",
            "GeologicTimePeriodID");

    log.info(count + " geologic time period records converted");

    if (!needsTbl) {
        tblWriter.endTable();
    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param tableName/*from  w w  w .  j a  v a 2  s  . c o  m*/
 */
protected void convertLocalityExtraInfo(final String tableName, final boolean isGeoCoordDetail) {
    removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType);

    String capName = capitalize(tableName);
    TableWriter tblWriter = convLogger.getWriter(capName + ".html", capName);
    setTblWriter(tblWriter);
    IdHashMapper.setTblWriter(tblWriter);

    setDesc("Converting " + capName);

    List<String> localityDetailNamesTmp = getFieldNamesFromSchema(newDBConn, tableName);

    List<String> localityDetailNames = new ArrayList<String>();
    Hashtable<String, Boolean> nameHash = new Hashtable<String, Boolean>();

    for (String fieldName : localityDetailNamesTmp) {
        localityDetailNames.add(fieldName);
        nameHash.put(fieldName, true);
        System.out.println("[" + fieldName + "]");
    }

    String fieldList = buildSelectFieldList(localityDetailNames, null);
    log.info(fieldList);

    IdMapperIFace locIdMapper = idMapperMgr.get("locality", "LocalityID");
    IdMapperIFace agtIdMapper = idMapperMgr.get("agent", "AgentID");

    Statement updateStatement = null;
    try {
        updateStatement = newDBConn.createStatement();

        Hashtable<String, Boolean> usedFieldHash = new Hashtable<String, Boolean>();

        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        Integer countRows = getCount(
                "select count(LocalityID) from locality,geography where locality.GeographyID = geography.GeographyID");
        if (countRows != null) {
            frame.setProcess(0, countRows);
        }

        ResultSet rs = stmt.executeQuery(
                "select locality.*,geography.* from locality LEFT JOIN geography on locality.GeographyID = geography.GeographyID ");

        StringBuilder colSQL = new StringBuilder();
        StringBuilder valuesSQL = new StringBuilder();

        int rows = 0;
        while (rs.next()) {
            usedFieldHash.clear();
            valuesSQL.setLength(0);

            boolean hasData = false;
            ResultSetMetaData metaData = rs.getMetaData();
            int cols = metaData.getColumnCount();
            for (int i = 1; i <= cols; i++) {
                String colName = metaData.getColumnName(i); // Old Column Name

                if (colName.equals("GeoRefDetBy")) {
                    colName = "AgentID";
                }

                if ((nameHash.get(colName) == null || usedFieldHash.get(colName) != null)
                        && !colName.startsWith("Range")) {
                    if (rows == 0) {
                        log.debug("Skipping[" + colName + "]");
                    }
                    continue;
                }

                usedFieldHash.put(colName, true);

                if (rows == 0) {
                    System.err.println("[" + colName + "]");

                    if (colName.equals("Range")) {
                        if (!isGeoCoordDetail) {
                            if (colSQL.length() > 0)
                                colSQL.append(",");
                            colSQL.append("RangeDesc");
                        }

                    } else if (isGeoCoordDetail) {
                        if (!colName.equals("RangeDirection")) {
                            if (colSQL.length() > 0)
                                colSQL.append(",");
                            colSQL.append(colName);
                        }

                    } else {
                        if (colSQL.length() > 0)
                            colSQL.append(",");
                        colSQL.append(colName);
                    }
                }

                String value;
                if (colName.equals("LocalityID")) {
                    Integer oldId = rs.getInt(i);
                    Integer newId = locIdMapper.get(oldId);
                    if (newId != null) {
                        value = Integer.toString(newId);
                    } else {
                        String msg = "Couldn't map LocalityId oldId[" + rs.getInt(i) + "]";
                        log.error(msg);
                        tblWriter.logError(msg);
                        value = "NULL";
                    }

                } else if (isGeoCoordDetail && colName.equals("GeoRefDetDate")) {
                    Integer dateInt = rs.getInt(i);
                    value = getStrValue(dateInt, "date");

                } else if (colName.startsWith("YesNo")) {
                    Integer bool = rs.getInt(i);
                    if (bool == null) {
                        value = "NULL";

                    } else if (bool == 0) {
                        value = "0";
                    } else {
                        value = "1";
                    }
                } else if (isGeoCoordDetail && colName.equals("AgentID")) {
                    Integer agentID = (Integer) rs.getObject(i);
                    if (agentID != null) {
                        Integer newID = agtIdMapper.get(agentID);
                        if (newID != null) {
                            value = newID.toString();
                        } else {
                            String msg = "Couldn't map GeoRefDetBY (Agent) oldId[" + agentID + "]";
                            log.error(msg);
                            tblWriter.logError(msg);
                            value = "NULL";
                        }
                    } else {
                        value = "NULL";
                    }

                } else if (colName.equals("Range") || colName.equals("RangeDirection")) {
                    if (!isGeoCoordDetail) {
                        String range = rs.getString(i);
                        range = escapeStringLiterals(range);
                        if (range != null) {
                            hasData = true;
                            value = "'" + range + "'";
                        } else {
                            value = "NULL";
                        }
                    } else {
                        value = null;
                    }
                } else {
                    Object obj = rs.getObject(i);
                    if (obj != null && !colName.equals("TimestampCreated")
                            && !colName.equals("TimestampModified")) {
                        hasData = true;
                    }
                    /*if (obj instanceof String)
                    {
                    String str = (String)obj;
                    int inx = str.indexOf('\'');
                    if (inx > -1)
                    {
                        obj = escapeStringLiterals(str);
                    }
                    }*/
                    value = getStrValue(obj);
                }
                // log.debug(colName+" ["+value+"]");

                if (value != null) {
                    if (valuesSQL.length() > 0) {
                        valuesSQL.append(",");
                    }
                    valuesSQL.append(value);
                }
            }

            if (hasData) {
                String insertSQL = "INSERT INTO " + tableName + " (" + colSQL.toString()
                        + ", Version, CreatedByAgentID, ModifiedByAgentID) " + " VALUES(" + valuesSQL.toString()
                        + ", 0, " + getCreatorAgentId(null) + "," + getModifiedByAgentId(null) + ")";

                /*if (true)
                {
                log.info(insertSQL);
                }*/
                try {
                    updateStatement.executeUpdate(insertSQL);
                    updateStatement.clearBatch();

                } catch (Exception ex) {
                    System.out.println("isGeoCoordDetail: " + isGeoCoordDetail);
                    System.out.println(insertSQL);
                    ex.printStackTrace();
                }
            }
            rows++;
            if (rows % 500 == 0) {
                frame.setProcess(rows);
            }
        }

        rs.close();
        stmt.close();

    } catch (Exception ex) {
        ex.printStackTrace();

    } finally {
        try {
            updateStatement.close();
        } catch (SQLException ex) {
            ex.printStackTrace();
        }
    }

}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Convert all the biological attributes to Collection Object Attributes. Each old record may
 * end up being multiple records in the new schema. This will first figure out which columns in
 * the old schema were used and only map those columns to the new database.<br>
 * <br>//from  ww w  .j  a v  a 2  s.co  m
 * It also will use the old name if there is not mapping for it. The old name is converted from
 * lower/upper case to be space separated where each part of the name starts with a capital
 * letter.
 * 
 * @param discipline the Discipline
 * @param colToNameMap a mape for old names to new names
 * @param typeMap a map for changing the type of the data (meaning an old value may be a boolean
 *            stored in a float)
 * @return true for success
 */
public boolean convertBiologicalAttrs(final Discipline discipline,
        @SuppressWarnings("unused") final Map<String, String> colToNameMap, final Map<String, Short> typeMap) {
    AttributeIFace.FieldType[] attrTypes = { AttributeIFace.FieldType.IntegerType,
            AttributeIFace.FieldType.FloatType, AttributeIFace.FieldType.DoubleType,
            AttributeIFace.FieldType.BooleanType, AttributeIFace.FieldType.StringType,
            // AttributeIFace.FieldType.MemoType
    };

    Session localSession = HibernateUtil.getCurrentSession();

    deleteAllRecordsFromTable(newDBConn, "collectionobjectattr", BasicSQLUtils.myDestinationServerType);
    deleteAllRecordsFromTable(newDBConn, "attributedef", BasicSQLUtils.myDestinationServerType);

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        // grab the field and their type from the old schema
        List<FieldMetaData> oldFieldMetaData = new ArrayList<FieldMetaData>();
        Map<String, FieldMetaData> oldFieldMetaDataMap = getFieldMetaDataFromSchemaHash(oldDBConn,
                "biologicalobjectattributes");

        // create maps to figure which columns where used
        List<String> columnsInUse = new ArrayList<String>();
        Map<String, AttributeDef> attrDefs = new Hashtable<String, AttributeDef>();

        List<Integer> counts = new ArrayList<Integer>();

        int totalCount = 0;

        for (FieldMetaData md : oldFieldMetaData) {
            // Skip these fields
            if (md.getName().indexOf("ID") == -1 && md.getName().indexOf("Timestamp") == -1
                    && md.getName().indexOf("LastEditedBy") == -1) {
                oldFieldMetaDataMap.put(md.getName(), md); // add to map for later

                // log.info(convertColumnName(md.getName())+" "+ md.getType());
                String sqlStr = "select count(" + md.getName() + ") from biologicalobjectattributes where "
                        + md.getName() + " is not null";
                ResultSet rs = stmt.executeQuery(sqlStr);
                if (rs.first() && rs.getInt(1) > 0) {
                    int rowCount = rs.getInt(1);
                    totalCount += rowCount;
                    counts.add(rowCount);

                    log.info(md.getName() + " has " + rowCount + " rows of values");

                    columnsInUse.add(md.getName());
                    AttributeDef attrDef = new AttributeDef();

                    String newName = convertColumnName(md.getName());
                    attrDef.setFieldName(newName);
                    log.debug("mapping[" + newName + "][" + md.getName() + "]");

                    // newNameToOldNameMap.put(newName, md.getName());

                    short dataType = -1;
                    if (typeMap != null) {
                        Short type = typeMap.get(md.getName());
                        if (type == null) {
                            dataType = type;
                        }
                    }

                    if (dataType == -1) {
                        dataType = getDataType(md.getName(), md.getType()).getType();
                    }

                    attrDef.setDataType(dataType);
                    attrDef.setDiscipline(discipline);
                    attrDef.setTableType(GenericDBConversion.TableType.CollectionObject.getType());
                    attrDef.setTimestampCreated(now);

                    attrDefs.put(md.getName(), attrDef);

                    try {
                        HibernateUtil.beginTransaction();
                        localSession.save(attrDef);
                        HibernateUtil.commitTransaction();

                    } catch (Exception e) {
                        log.error("******* " + e);
                        HibernateUtil.rollbackTransaction();
                        throw new RuntimeException(e);
                    }

                }
                rs.close();
            }
        } // for
        log.info("Total Number of Attrs: " + totalCount);

        // Now that we know which columns are being used we can start the conversion process

        log.info("biologicalobjectattributes columns in use: " + columnsInUse.size());
        if (columnsInUse.size() > 0) {
            int inx = 0;
            StringBuilder str = new StringBuilder("select BiologicalObjectAttributesID");
            for (String name : columnsInUse) {
                str.append(", ");
                str.append(name);
                inx++;
            }

            str.append(" from biologicalobjectattributes order by BiologicalObjectAttributesID");
            log.info("sql: " + str.toString());
            ResultSet rs = stmt.executeQuery(str.toString());

            int[] countVerify = new int[counts.size()];
            for (int i = 0; i < countVerify.length; i++) {
                countVerify[i] = 0;
            }
            boolean useHibernate = false;
            StringBuilder strBufInner = new StringBuilder();
            int recordCount = 0;
            while (rs.next()) {

                if (useHibernate) {
                    Criteria criteria = localSession.createCriteria(CollectionObject.class);
                    criteria.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
                    criteria.add(Restrictions.eq("collectionObjectId", rs.getInt(1)));
                    List<?> list = criteria.list();
                    if (list.size() == 0) {
                        log.error("**** Can't find the CollectionObject " + rs.getInt(1));
                    } else {
                        CollectionObject colObj = (CollectionObject) list.get(0);

                        inx = 2; // skip the first column (the ID)
                        for (String name : columnsInUse) {
                            AttributeDef attrDef = attrDefs.get(name); // the needed
                                                                       // AttributeDef by name
                            FieldMetaData md = oldFieldMetaDataMap.get(name);

                            // Create the new Collection Object Attribute
                            CollectionObjectAttr colObjAttr = new CollectionObjectAttr();
                            colObjAttr.setCollectionObject(colObj);
                            colObjAttr.setDefinition(attrDef);
                            colObjAttr.setTimestampCreated(now);

                            // String oldName = newNameToOldNameMap.get(attrDef.getFieldName());
                            // log.debug("["+attrDef.getFieldName()+"]["+oldName+"]");

                            // log.debug(inx+" "+attrTypes[attrDef.getDataType()]+"
                            // "+md.getName()+" "+md.getType());
                            setData(rs, inx, attrTypes[attrDef.getDataType()], md, colObjAttr);

                            HibernateUtil.beginTransaction();
                            localSession.save(colObjAttr);
                            HibernateUtil.commitTransaction();

                            inx++;
                            if (recordCount % 2000 == 0) {
                                log.info("CollectionObjectAttr Records Processed: " + recordCount);
                            }
                            recordCount++;
                        } // for
                          // log.info("Done - CollectionObjectAttr Records Processed:
                          // "+recordCount);
                    }
                } else {
                    inx = 2; // skip the first column (the ID)
                    for (String name : columnsInUse) {
                        AttributeDef attrDef = attrDefs.get(name); // the needed AttributeDef
                                                                   // by name
                        FieldMetaData md = oldFieldMetaDataMap.get(name);

                        if (rs.getObject(inx) != null) {
                            Integer newRecId = (Integer) getMappedId(rs.getInt(1), "biologicalobjectattributes",
                                    "BiologicalObjectAttributesID");

                            Object data = getData(rs, inx, attrTypes[attrDef.getDataType()], md);
                            boolean isStr = data instanceof String;

                            countVerify[inx - 2]++;

                            strBufInner.setLength(0);
                            strBufInner.append("INSERT INTO collectionobjectattr VALUES (");
                            strBufInner.append("NULL");// Integer.toString(recordCount));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(isStr ? data : null));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(isStr ? null : data));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(now));
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(now));
                            strBufInner.append(",");
                            strBufInner.append(newRecId.intValue());
                            strBufInner.append(",");
                            strBufInner.append(getStrValue(attrDef.getAttributeDefId()));
                            strBufInner.append(")");

                            try {
                                Statement updateStatement = newDBConn.createStatement();
                                // updateStatement.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
                                removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType);
                                if (false) {
                                    log.debug(strBufInner.toString());
                                }
                                updateStatement.executeUpdate(strBufInner.toString());
                                updateStatement.clearBatch();
                                updateStatement.close();
                                updateStatement = null;

                            } catch (SQLException e) {
                                log.error(strBufInner.toString());
                                log.error("Count: " + recordCount);
                                e.printStackTrace();
                                log.error(e);
                                throw new RuntimeException(e);
                            }

                            if (recordCount % 2000 == 0) {
                                log.info("CollectionObjectAttr Records Processed: " + recordCount);
                            }
                            recordCount++;
                        }
                        inx++;
                    } // for
                } // if
            } // while
            rs.close();
            stmt.close();

            log.info("Count Verification:");
            for (int i = 0; i < counts.size(); i++) {
                log.info(columnsInUse.get(i) + " [" + counts.get(i) + "][" + countVerify[i] + "] "
                        + (counts.get(i) - countVerify[i]));
            }
        }

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }
    return true;
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Converts all the CollectionObject Physical records and CollectionObjectCatalog Records into
 * the new schema Preparation table./*from  www  .j a v a  2 s  . co m*/
 * @return true if no errors
 */
public boolean convertLoanRecords(final boolean doingGifts) {
    String newTableName = doingGifts ? "gift" : "loan";
    setIdentityInsertONCommandForSQLServer(newDBConn, newTableName, BasicSQLUtils.myDestinationServerType);

    deleteAllRecordsFromTable(newDBConn, newTableName, BasicSQLUtils.myDestinationServerType); // automatically closes the connection

    if (getNumRecords(oldDBConn, "loan") == 0) {
        return true;
    }

    String[] ignoredFields = { "SpecialConditions", "AddressOfRecordID", "DateReceived", "ReceivedComments",
            "PurposeOfLoan", "OverdueNotiSetDate", "IsFinancialResponsibility", "Version", "CreatedByAgentID",
            "IsFinancialResponsibility", "SrcTaxonomy", "SrcGeography", "CollectionMemberID", "PurposeOfGift",
            "IsFinancialResponsibility", "SpecialConditions", "ReceivedComments", "AddressOfRecordID" };

    Hashtable<String, Boolean> fieldToSkip = new Hashtable<String, Boolean>();
    for (String nm : ignoredFields) {
        fieldToSkip.put(nm, true);
    }

    IdTableMapper loanIdMapper = (IdTableMapper) idMapperMgr.get(newTableName,
            doingGifts ? "GiftID" : "LoanID");
    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        StringBuilder str = new StringBuilder();

        List<String> oldFieldNames = getFieldNamesFromSchema(oldDBConn, "loan");

        StringBuilder sql = new StringBuilder("SELECT ");
        sql.append(buildSelectFieldList(oldFieldNames, "loan"));
        sql.append(" FROM loan WHERE loan.Category = ");
        sql.append(doingGifts ? "1" : "0");
        sql.append(" ORDER BY loan.LoanID");
        log.info(sql);

        List<FieldMetaData> newFieldMetaData = getFieldMetaDataFromSchema(newDBConn, newTableName);
        log.info("Number of Fields in New " + newTableName + " " + newFieldMetaData.size());
        String sqlStr = sql.toString();

        if (doingGifts && loanIdMapper == null) {
            StringBuilder mapSQL = new StringBuilder("SELECT LoanID FROM loan WHERE loan.Category = ");
            mapSQL.append(doingGifts ? "1" : "0");
            mapSQL.append(" ORDER BY loan.LoanID");
            log.info(mapSQL.toString());

            BasicSQLUtils.deleteAllRecordsFromTable(oldDBConn, "gift_GiftID",
                    BasicSQLUtils.myDestinationServerType);
            loanIdMapper = new IdTableMapper(newTableName, "GiftID", mapSQL.toString(), false, false);
            idMapperMgr.addMapper(loanIdMapper);
            loanIdMapper.mapAllIdsWithSQL();
        }

        Map<String, Integer> oldNameIndex = new Hashtable<String, Integer>();
        int inx = 1;
        for (String name : oldFieldNames) {
            oldNameIndex.put(name, inx++);
        }

        Map<String, String> colNewToOldMap = doingGifts
                ? createFieldNameMap(new String[] { "GiftNumber", "LoanNumber", "GiftDate", "LoanDate",
                        "IsCurrent", "Current", "IsClosed", "Closed" })
                : createFieldNameMap(new String[] { "IsCurrent", "Current", "IsClosed", "Closed", });

        log.info(sqlStr);
        ResultSet rs = stmt.executeQuery(sqlStr);

        if (hasFrame) {
            if (rs.last()) {
                setProcess(0, rs.getRow());
                rs.first();

            } else {
                rs.close();
                stmt.close();
                return true;
            }
        } else {
            if (!rs.first()) {
                rs.close();
                stmt.close();
                return true;
            }
        }

        PartialDateConv partialDateConv = new PartialDateConv();

        int lastEditedByInx = oldNameIndex.get("LastEditedBy");

        int count = 0;
        do {
            partialDateConv.nullAll();

            str.setLength(0);
            StringBuffer fieldList = new StringBuffer();
            fieldList.append("( ");
            for (int i = 0; i < newFieldMetaData.size(); i++) {
                if ((i > 0) && (i < newFieldMetaData.size())) {
                    fieldList.append(", ");
                }
                String newFieldName = newFieldMetaData.get(i).getName();
                fieldList.append(newFieldName);
            }

            fieldList.append(")");

            str.append("INSERT INTO " + newTableName + " " + fieldList + " VALUES (");
            for (int i = 0; i < newFieldMetaData.size(); i++) {
                if (i > 0) {
                    str.append(", ");
                }

                String newFieldName = newFieldMetaData.get(i).getName();

                if (i == 0) {
                    Integer oldID = rs.getInt(1);
                    Integer newID = loanIdMapper.get(oldID);
                    if (newID != null) {
                        str.append(getStrValue(newID));
                    } else {
                        log.error(newTableName + " Old/New ID problem [" + oldID + "][" + newID + "]");
                    }

                } else if (newFieldName.equals("Version")) // User/Security changes
                {
                    str.append("0");

                } else if (newFieldName.equals("CreatedByAgentID")) // User/Security changes
                {
                    str.append(getCreatorAgentId(null));

                } else if (newFieldName.equals("ModifiedByAgentID")) // User/Security changes
                {
                    String lastEditedByStr = rs.getString(lastEditedByInx);
                    str.append(getModifiedByAgentId(lastEditedByStr));

                } else if (fieldToSkip.get(newFieldName) != null) {
                    str.append("NULL");

                } else if (newFieldName.equals("DisciplineID")) // User/Security changes
                {
                    str.append(curDisciplineID);

                } else if (newFieldName.equals("DivisionID")) // User/Security changes
                {
                    str.append(curDivisionID);

                } else {
                    Integer index = null;
                    String oldMappedColName = colNewToOldMap.get(newFieldName);
                    if (oldMappedColName != null) {
                        index = oldNameIndex.get(oldMappedColName);

                    } else {
                        index = oldNameIndex.get(newFieldName);
                        oldMappedColName = newFieldName;
                    }

                    Object data;
                    if (index == null) {
                        String msg = "convertLoanRecords - Couldn't find new field name[" + newFieldName
                                + "] in old field name in index Map";
                        log.warn(msg);
                        //                            stmt.close();
                        //                            throw new RuntimeException(msg);
                        data = null;
                    } else {

                        data = rs.getObject(index);
                    }
                    if (data != null) {
                        int idInx = newFieldName.lastIndexOf("ID");
                        if (idMapperMgr != null && idInx > -1) {
                            IdMapperIFace idMapper = idMapperMgr.get("loan", oldMappedColName);
                            if (idMapper != null) {
                                data = idMapper.get(rs.getInt(index));
                            } else {
                                log.error("No Map for [" + "loan" + "][" + oldMappedColName + "]");
                            }
                        }
                    }

                    // hack for ??bug?? found in Sp5 that inserted null values in
                    // timestampmodified field of determination table?
                    BasicSQLUtils.fixTimestamps(newFieldName, newFieldMetaData.get(i).getType(), data, str);
                }
            }
            str.append(")");

            if (hasFrame) {
                if (count % 500 == 0) {
                    setProcess(count);
                }

            } else {
                if (count % 2000 == 0) {
                    log.info("Loan/Gifts Records: " + count);
                }
            }

            try {
                //log.debug(str.toString());
                Statement updateStatement = newDBConn.createStatement();
                // updateStatement.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
                updateStatement.executeUpdate(str.toString());
                updateStatement.clearBatch();
                updateStatement.close();
                updateStatement = null;

            } catch (SQLException e) {
                log.error("Count: " + count);
                log.error("Exception on insert: " + str.toString());
                e.printStackTrace();
                log.error(e);
                rs.close();
                stmt.close();
                throw new RuntimeException(e);
            }

            count++;
            // if (count > 10) break;
        } while (rs.next());

        if (hasFrame) {
            setProcess(count);
        } else {
            log.info("Processed Loan/Gift " + count + " records.");
        }
        rs.close();

        stmt.close();

    } catch (SQLException e) {
        e.printStackTrace();
        log.error(e);
        throw new RuntimeException(e);
    }

    setIdentityInsertOFFCommandForSQLServer(newDBConn, "determination", BasicSQLUtils.myDestinationServerType);

    return true;
}