Example usage for java.util TreeSet add

List of usage examples for java.util TreeSet add

Introduction

In this page you can find the example usage for java.util TreeSet add.

Prototype

public boolean add(E e) 

Source Link

Document

Adds the specified element to this set if it is not already present.

Usage

From source file:com.eucalyptus.objectstorage.WalrusManager.java

public ListVersionsResponseType listVersions(ListVersionsType request) throws EucalyptusCloudException {
    ListVersionsResponseType reply = (ListVersionsResponseType) request.getReply();

    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {// www .ja  v a2  s  .  c o  m
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;

            if (ctx.hasAdministrativePrivileges() || (bucket.canRead(account.getAccountNumber())
                    && (bucket.isGlobalRead() || Lookups.checkPrivilege(PolicySpec.S3_LISTBUCKETVERSIONS,
                            PolicySpec.VENDOR_S3, PolicySpec.S3_RESOURCE_BUCKET, bucketName, null)))) {

                if (logData != null) {
                    updateLogData(bucket, logData);
                    reply.setLogData(logData);
                }

                if (Contexts.lookup().hasAdministrativePrivileges()) {
                    try {
                        if (bucketHasSnapshots(bucketName)) {
                            db.rollback();
                            throw new NoSuchBucketException(bucketName);
                        }
                    } catch (Exception e) {
                        db.rollback();
                        throw new EucalyptusCloudException(e);
                    }
                }

                String prefix = request.getPrefix();
                String keyMarker = request.getKeyMarker();
                String versionMarker = request.getVersionIdMarker();
                String delimiter = request.getDelimiter();

                reply.setName(bucketName);
                reply.setIsTruncated(false);
                reply.setPrefix(prefix);
                reply.setMaxKeys(maxKeys);
                reply.setDelimiter(delimiter);
                reply.setKeyMarker(keyMarker);
                reply.setVersionIdMarker(versionMarker);

                if (bucket.isVersioningDisabled()) {
                    db.commit();
                    return reply;
                }

                if (maxKeys == 0) {
                    // No keys requested, so just return
                    reply.setKeyEntries(new ArrayList<KeyEntry>());
                    reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                    db.commit();
                    return reply;
                }

                final int queryStrideSize = maxKeys + 1;
                EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

                ObjectInfo searchObj = new ObjectInfo();
                searchObj.setBucketName(bucketName);

                Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
                objCriteria.add(Example.create(searchObj));
                objCriteria.addOrder(Order.asc("objectKey"));
                objCriteria.addOrder(Order.desc("lastModified"));
                objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

                // Ensure these aren't null
                keyMarker = (Strings.isNullOrEmpty(keyMarker) ? "" : keyMarker);
                prefix = (Strings.isNullOrEmpty(prefix) ? "" : prefix);
                versionMarker = (Strings.isNullOrEmpty(versionMarker) ? "" : versionMarker);

                if (!Strings.isNullOrEmpty(keyMarker)) {
                    if (!Strings.isNullOrEmpty(versionMarker)) {
                        Date resumeDate = null;
                        try {
                            ObjectInfo markerObj = new ObjectInfo();
                            markerObj.setBucketName(bucketName);
                            markerObj.setVersionId(versionMarker);
                            markerObj.setObjectKey(keyMarker);
                            ObjectInfo lastFromPrevObj = dbObject.uniqueResultEscape(markerObj);
                            if (lastFromPrevObj != null && lastFromPrevObj.getLastModified() != null) {
                                resumeDate = lastFromPrevObj.getLastModified();
                            } else {
                                dbObject.rollback();
                                throw new NoSuchEntityException("VersionIDMarker " + versionMarker
                                        + " does not match an existing object version");
                            }
                        } catch (TransactionException e) {
                            LOG.error(e);
                            dbObject.rollback();
                            throw new EucalyptusCloudException(
                                    "Next-Key-Marker or Next-Version-Id marker invalid");
                        }
                        // The result set should be exclusive of the key with the key-marker version-id-marker pair. Look for keys that lexicographically
                        // follow the version-id-marker for a given key-marker and also the keys that follow the key-marker.
                        objCriteria.add(Restrictions.or(
                                Restrictions.and(Restrictions.eq("objectKey", keyMarker),
                                        Restrictions.lt("lastModified", resumeDate)),
                                Restrictions.gt("objectKey", keyMarker)));
                    } else {
                        // The result set should be exclusive of the key-marker. key-marker could be a common prefix from a previous response. Look for keys
                        // that lexicographically follow the key-marker and don't contain the key-marker as the prefix.
                        objCriteria.add(Restrictions.gt("objectKey", keyMarker));
                    }
                }

                if (!Strings.isNullOrEmpty(prefix)) {
                    objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
                } else {
                    prefix = ""; // ensure not null has already been set in the reply, so this is safe
                }

                List<ObjectInfo> objectInfos = null;
                int resultKeyCount = 0;
                ArrayList<KeyEntry> keyEntries = new ArrayList<KeyEntry>();
                String nextKeyMarker = null;
                String nextVersionIdMarker = null;
                TreeSet<String> commonPrefixes = new TreeSet<String>();
                int firstResult = -1;

                // Iterate over result sets of size maxkeys + 1
                do {
                    // Start listing from the 0th element and increment the first element to be listed by the query size
                    objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                    objectInfos = (List<ObjectInfo>) objCriteria.list();

                    if (objectInfos.size() > 0) {

                        for (ObjectInfo objectInfo : objectInfos) {
                            String objectKey = objectInfo.getObjectKey();

                            // Check if it will get aggregated as a commonprefix
                            if (!Strings.isNullOrEmpty(delimiter)) {
                                String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                                if (parts.length > 1) {
                                    String prefixString = prefix + parts[0] + delimiter;
                                    if (!StringUtils.equals(prefixString, keyMarker)
                                            && !commonPrefixes.contains(prefixString)) {
                                        if (resultKeyCount == maxKeys) {
                                            // This is a new record, so we know we're truncating if this is true
                                            reply.setNextKeyMarker(nextKeyMarker);
                                            reply.setNextVersionIdMarker(nextVersionIdMarker);
                                            reply.setIsTruncated(true);
                                            resultKeyCount++;
                                            break;
                                        }

                                        commonPrefixes.add(prefixString);
                                        resultKeyCount++; // count the unique commonprefix as a single return entry

                                        // If max keys have been collected, set the next-key-marker. It might be needed for the response if the list is
                                        // truncated
                                        // If the common prefixes hit the limit set by max-keys, next-key-marker is the last common prefix and there is no
                                        // version-id-marker
                                        if (resultKeyCount == maxKeys) {
                                            nextKeyMarker = prefixString;
                                            nextVersionIdMarker = null;
                                        }
                                    }
                                    continue;
                                }
                            }

                            if (resultKeyCount == maxKeys) {
                                // This is a new (non-commonprefix) record, so we know we're truncating
                                reply.setNextKeyMarker(nextKeyMarker);
                                reply.setNextVersionIdMarker(nextVersionIdMarker);
                                reply.setIsTruncated(true);
                                resultKeyCount++;
                                break;
                            }

                            // This is either a version entry or a delete marker
                            KeyEntry keyEntry = null;
                            if (!objectInfo.getDeleted()) {
                                keyEntry = new VersionEntry();
                                ((VersionEntry) keyEntry).setEtag(objectInfo.getEtag());
                                ((VersionEntry) keyEntry).setSize(objectInfo.getSize());
                                ((VersionEntry) keyEntry).setStorageClass(objectInfo.getStorageClass());
                            } else {
                                keyEntry = new DeleteMarkerEntry();
                            }
                            keyEntry.setKey(objectKey);
                            keyEntry.setVersionId(objectInfo.getVersionId());
                            keyEntry.setIsLatest(objectInfo.getLast());
                            keyEntry.setLastModified(DateUtils.format(objectInfo.getLastModified().getTime(),
                                    DateUtils.ALT_ISO8601_DATE_PATTERN));
                            try {
                                Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                                keyEntry.setOwner(new CanonicalUserType(ownerAccount.getCanonicalId(),
                                        ownerAccount.getName()));
                            } catch (AuthException e) {
                                db.rollback();
                                throw new AccessDeniedException("Bucket", bucketName, logData);
                            }
                            keyEntries.add(keyEntry);

                            resultKeyCount++;

                            // If max keys have been collected, set the next- markers. They might be needed for the response if the list is truncated
                            if (resultKeyCount == maxKeys) {
                                nextKeyMarker = objectKey;
                                nextVersionIdMarker = objectInfo.getVersionId();
                            }
                        }
                    }
                    if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                        break;
                    }
                } while (resultKeyCount <= maxKeys);

                reply.setKeyEntries(keyEntries);

                // Prefixes are already sorted, add them to the proper data structures and populate the reply
                if (!commonPrefixes.isEmpty()) {
                    ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                    for (String prefixEntry : commonPrefixes) {
                        commonPrefixesList.add(new CommonPrefixesEntry().add(new PrefixEntry(prefixEntry)));
                    }
                    reply.setCommonPrefixesList(commonPrefixesList);
                }
            } else {
                db.rollback();
                throw new AccessDeniedException("Bucket", bucketName, logData);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:com.eucalyptus.walrus.WalrusFSManager.java

@Override
public ListVersionsResponseType listVersions(ListVersionsType request) throws WalrusException {
    ListVersionsResponseType reply = (ListVersionsResponseType) request.getReply();
    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {// w  ww.  j  a  v a 2 s.  co m
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;

            if (logData != null) {
                updateLogData(bucket, logData);
                reply.setLogData(logData);
            }

            String prefix = request.getPrefix();
            String keyMarker = request.getKeyMarker();
            String versionMarker = request.getVersionIdMarker();
            String delimiter = request.getDelimiter();

            reply.setName(bucketName);
            reply.setIsTruncated(false);
            reply.setPrefix(prefix);
            reply.setMaxKeys(maxKeys);
            reply.setDelimiter(delimiter);
            reply.setKeyMarker(keyMarker);
            reply.setVersionIdMarker(versionMarker);

            if (bucket.isVersioningDisabled()) {
                db.commit();
                return reply;
            }

            if (maxKeys == 0) {
                // No keys requested, so just return
                reply.setKeyEntries(new ArrayList<KeyEntry>());
                reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                db.commit();
                return reply;
            }

            final int queryStrideSize = maxKeys + 1;
            EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

            ObjectInfo searchObj = new ObjectInfo();
            searchObj.setBucketName(bucketName);

            Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
            objCriteria.add(Example.create(searchObj));
            objCriteria.addOrder(Order.asc("objectKey"));
            objCriteria.addOrder(Order.desc("lastModified"));
            objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

            // Ensure these aren't null
            keyMarker = (Strings.isNullOrEmpty(keyMarker) ? "" : keyMarker);
            prefix = (Strings.isNullOrEmpty(prefix) ? "" : prefix);
            versionMarker = (Strings.isNullOrEmpty(versionMarker) ? "" : versionMarker);

            if (!Strings.isNullOrEmpty(keyMarker)) {
                if (!Strings.isNullOrEmpty(versionMarker)) {
                    Date resumeDate = null;
                    try {
                        ObjectInfo markerObj = new ObjectInfo();
                        markerObj.setBucketName(bucketName);
                        markerObj.setVersionId(versionMarker);
                        markerObj.setObjectKey(keyMarker);
                        ObjectInfo lastFromPrevObj = dbObject.uniqueResultEscape(markerObj);
                        if (lastFromPrevObj != null && lastFromPrevObj.getLastModified() != null) {
                            resumeDate = lastFromPrevObj.getLastModified();
                        } else {
                            dbObject.rollback();
                            throw new NoSuchEntityException("VersionIDMarker " + versionMarker
                                    + " does not match an existing object version");
                        }
                    } catch (TransactionException e) {
                        LOG.error(e);
                        dbObject.rollback();
                        throw new InternalErrorException("Next-Key-Marker or Next-Version-Id marker invalid");
                    }
                    // The result set should be exclusive of the key with the key-marker version-id-marker pair. Look for keys that lexicographically
                    // follow the version-id-marker for a given key-marker and also the keys that follow the key-marker.
                    objCriteria.add(Restrictions.or(
                            Restrictions.and(Restrictions.eq("objectKey", keyMarker),
                                    Restrictions.lt("lastModified", resumeDate)),
                            Restrictions.gt("objectKey", keyMarker)));
                } else {
                    // The result set should be exclusive of the key-marker. key-marker could be a common prefix from a previous response. Look for keys
                    // that lexicographically follow the key-marker and don't contain the key-marker as the prefix.
                    objCriteria.add(Restrictions.gt("objectKey", keyMarker));
                }
            }

            if (!Strings.isNullOrEmpty(prefix)) {
                objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
            } else {
                prefix = ""; // ensure not null has already been set in the reply, so this is safe
            }

            List<ObjectInfo> objectInfos = null;
            int resultKeyCount = 0;
            ArrayList<KeyEntry> keyEntries = new ArrayList<KeyEntry>();
            String nextKeyMarker = null;
            String nextVersionIdMarker = null;
            TreeSet<String> commonPrefixes = new TreeSet<String>();
            int firstResult = -1;

            // Iterate over result sets of size maxkeys + 1
            do {
                // Start listing from the 0th element and increment the first element to be listed by the query size
                objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                objectInfos = (List<ObjectInfo>) objCriteria.list();

                if (objectInfos.size() > 0) {

                    for (ObjectInfo objectInfo : objectInfos) {
                        String objectKey = objectInfo.getObjectKey();

                        // Check if it will get aggregated as a commonprefix
                        if (!Strings.isNullOrEmpty(delimiter)) {
                            String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                            if (parts.length > 1) {
                                String prefixString = prefix + parts[0] + delimiter;
                                if (!StringUtils.equals(prefixString, keyMarker)
                                        && !commonPrefixes.contains(prefixString)) {
                                    if (resultKeyCount == maxKeys) {
                                        // This is a new record, so we know we're truncating if this is true
                                        reply.setNextKeyMarker(nextKeyMarker);
                                        reply.setNextVersionIdMarker(nextVersionIdMarker);
                                        reply.setIsTruncated(true);
                                        resultKeyCount++;
                                        break;
                                    }

                                    commonPrefixes.add(prefixString);
                                    resultKeyCount++; // count the unique commonprefix as a single return entry

                                    // If max keys have been collected, set the next-key-marker. It might be needed for the response if the list is
                                    // truncated
                                    // If the common prefixes hit the limit set by max-keys, next-key-marker is the last common prefix and there is no
                                    // version-id-marker
                                    if (resultKeyCount == maxKeys) {
                                        nextKeyMarker = prefixString;
                                        nextVersionIdMarker = null;
                                    }
                                }
                                continue;
                            }
                        }

                        if (resultKeyCount == maxKeys) {
                            // This is a new (non-commonprefix) record, so we know we're truncating
                            reply.setNextKeyMarker(nextKeyMarker);
                            reply.setNextVersionIdMarker(nextVersionIdMarker);
                            reply.setIsTruncated(true);
                            resultKeyCount++;
                            break;
                        }

                        // This is either a version entry or a delete marker
                        KeyEntry keyEntry = null;
                        if (!objectInfo.getDeleted()) {
                            keyEntry = new VersionEntry();
                            ((VersionEntry) keyEntry).setEtag(objectInfo.getEtag());
                            ((VersionEntry) keyEntry).setSize(objectInfo.getSize());
                            ((VersionEntry) keyEntry).setStorageClass(objectInfo.getStorageClass());
                        } else {
                            keyEntry = new DeleteMarkerEntry();
                        }
                        keyEntry.setKey(objectKey);
                        keyEntry.setVersionId(objectInfo.getVersionId());
                        keyEntry.setIsLatest(objectInfo.getLast());
                        keyEntry.setLastModified(
                                DateFormatter.dateToListingFormattedString(objectInfo.getLastModified()));
                        try {
                            Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                            keyEntry.setOwner(
                                    new CanonicalUser(ownerAccount.getCanonicalId(), ownerAccount.getName()));
                        } catch (AuthException e) {
                            db.rollback();
                            throw new AccessDeniedException("Bucket", bucketName, logData);
                        }
                        keyEntries.add(keyEntry);

                        resultKeyCount++;

                        // If max keys have been collected, set the next- markers. They might be needed for the response if the list is truncated
                        if (resultKeyCount == maxKeys) {
                            nextKeyMarker = objectKey;
                            nextVersionIdMarker = objectInfo.getVersionId();
                        }
                    }
                }
                if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                    break;
                }
            } while (resultKeyCount <= maxKeys);

            reply.setKeyEntries(keyEntries);

            // Prefixes are already sorted, add them to the proper data structures and populate the reply
            if (!commonPrefixes.isEmpty()) {
                ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                for (String prefixEntry : commonPrefixes) {
                    commonPrefixesList.add(new CommonPrefixesEntry(prefixEntry));
                }
                reply.setCommonPrefixesList(commonPrefixesList);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:library.Form_Library.java

License:asdf

private void fillData(File file) {
    Workbook workbook = null;/*from  w  w  w . j  av a2s.c  om*/
    try {
        try {
            workbook = Workbook.getWorkbook(file);
        } catch (IOException ex) {
            Logger.getLogger(Form_Library.class.getName()).log(Level.SEVERE, null, ex);
        }
        Sheet sheet = workbook.getSheet(0);

        ArrayList<Integer> trung = new ArrayList();
        TreeSet<Integer> kotrung = new TreeSet();
        for (int j = 1; j < sheet.getRows(); j++) {
            if (!sheet.getCell(0, j).getContents().startsWith("BK-")) {
                break;
            }
            for (int k = 0; k < dmBook.getRowCount(); k++) {
                if (sheet.getCell(0, j).getContents().equalsIgnoreCase(dmBook.getValueAt(k, 0).toString())) {
                    dmBook.setValueAt(Integer.parseInt((Integer.parseInt(dmBook.getValueAt(k, 7).toString())
                            + Integer.parseInt(sheet.getCell(7, j).getContents())) + ""), k, 7);
                    BookList.updateQuantity(Integer.parseInt(dmBook.getValueAt(k, 7).toString()),
                            dmBook.getValueAt(k, 0).toString());
                    trung.add(j);
                    System.out.println("====" + j);
                    break;
                } else {
                    System.out.println("====" + j);
                    kotrung.add(j);
                }
            }

        }
        kotrung.removeAll(trung);
        for (Integer integer : trung) {
            System.out.println(integer);
        }
        for (Integer integer : kotrung) {
            System.out.println(integer);
        }
        for (Integer j : kotrung) {
            if (!sheet.getCell(0, j).getContents().startsWith("BK-")) {
                break;
            }
            String BookID = "";
            String bookName = "", authorID = "", publisherID = "", supplierID = "", categoryID = "", shelf = "",
                    image = "";
            int price = 0, quantity = 0, rowNum = 0, colNum = 0;
            Vector d = new Vector();
            for (int i = 0; i < sheet.getColumns(); i++) {
                Cell cell = sheet.getCell(i, j);

                if (i == 0) {
                    BookID += sheet.getCell(i, j).getContents();
                } else if (i == 1) {
                    bookName += sheet.getCell(i, j).getContents();
                } else if (i == 2) {
                    authorID += sheet.getCell(i, j).getContents();
                } else if (i == 3) {
                    publisherID += sheet.getCell(i, j).getContents();
                } else if (i == 4) {
                    supplierID += sheet.getCell(i, j).getContents();
                } else if (i == 5) {
                    categoryID += sheet.getCell(i, j).getContents();
                } else if (i == 6) {
                    price = Integer.parseInt(sheet.getCell(i, j).getContents());
                } else if (i == 7) {
                    quantity = Integer.parseInt(sheet.getCell(i, j).getContents());
                } else if (i == 8) {
                    shelf += sheet.getCell(i, j).getContents();
                } else if (i == 9) {
                    rowNum = Integer.parseInt(sheet.getCell(i, j).getContents());
                } else if (i == 10) {
                    colNum = Integer.parseInt(sheet.getCell(i, j).getContents());
                } else if (i == 11) {
                    image += sheet.getCell(i, j).getContents();
                }

                d.add(cell.getContents());
                System.out.println(cell.getContents());
            }
            Book book = new Book(BookID, bookName, authorID, publisherID, supplierID, categoryID, price,
                    quantity, shelf, rowNum, colNum, image);
            BookList.add(book);
            System.out.println(d.get(0));
            d.add("\n");
            dmBook.addRow(d);
        }

    } catch (BiffException e) {
        e.printStackTrace();
    }
}

From source file:com.microsoft.tfs.core.clients.versioncontrol.VersionControlClient.java

/**
 * For the passed array of server paths, work out the list of Team Projects
 * associated with those items then query the server for the Checkin Note
 * Field Definitions for those projects.
 *
 * @param serverPaths/*from w ww. ja v a2s. co m*/
 *        Sting array containing paths to server items that the calling
 *        method requires the checkin note field definitions.
 * @return SortedSet of unique ACheckinNoteFieldDefinition objects in
 *         display order.
 */
public final SortedSet<CheckinNoteFieldDefinition> queryCheckinNoteFieldDefinitionsForServerPaths(
        final String[] serverPaths) {
    Check.notNullOrEmpty(serverPaths, "serverPaths"); //$NON-NLS-1$

    // Calculate unqiue list of Team Projects from itemPaths.
    final String[] projects = ServerPath.getTeamProjects(serverPaths);

    final CheckinNoteFieldDefinition[] definitions = getWebServiceLayer().queryCheckinNoteDefinition(projects);

    final TreeSet<CheckinNoteFieldDefinition> unique = new TreeSet<CheckinNoteFieldDefinition>();

    if (definitions != null) {
        for (final CheckinNoteFieldDefinition definition : definitions) {
            unique.add(definition);
        }
    }

    return unique;
}

From source file:com.knowgate.dfs.FileSystem.java

/**
 * Download an HTML page and all its referenced files into a ZIP
 * @param sBasePath String Base path for page and its referenced files
 * @param sFilePath String File path from sBasePath
 * @param oOutStrm OutputStream where ZIP is written
 * @param sDefaultEncoding Character encoding of file to be downloaded
 * @throws IOException// w  w  w  . j a v  a 2  s  .co m
 * @since 7.0
 */
public void downloadhtmlpage(String sBasePath, String sFilePath, OutputStream oOutStrm, String sDefaultEncoding)
        throws IOException {

    if (DebugFile.trace) {
        DebugFile.writeln("Begin FileSystem.downloadhtmlpage(" + sBasePath + "," + sFilePath
                + ",[OutputStream]," + sDefaultEncoding + ")");
        DebugFile.incIdent();
    }

    String sEncoding = sDefaultEncoding;
    String sBaseHref = "";
    boolean bAutoDetectEncoding = (sDefaultEncoding == null);
    TreeSet<String> oFiles = new TreeSet<String>();
    TreeSet<String> oEntries = new TreeSet<String>();
    Perl5Matcher oMatcher = new Perl5Matcher();
    Perl5Matcher oReplacer = new Perl5Matcher();
    Perl5Compiler oCompiler = new Perl5Compiler();

    if (sDefaultEncoding == null)
        sDefaultEncoding = "ASCII";

    try {
        String sHtml = readfilestr(sBasePath + sFilePath, sDefaultEncoding);

        if (null == sHtml) {
            if (DebugFile.trace) {
                DebugFile.writeln("Could not read file " + sBasePath + sFilePath);
                DebugFile.decIdent();
                throw new IOException("Could not read file " + sBasePath + sFilePath);
            }
        }

        if (DebugFile.trace) {
            DebugFile.writeln(
                    String.valueOf(sHtml.length()) + " characters readed from file " + sBasePath + sFilePath);
        }

        if (bAutoDetectEncoding) {
            if (oMatcher.contains(sHtml, oCompiler.compile(
                    "<meta\\x20+http-equiv=(\"|')?Content-Type(\"|')?\\x20+content=(\"|')?text/html;\\x20+charset=(\\w|-){3,32}(\"|')?>",
                    Perl5Compiler.CASE_INSENSITIVE_MASK))) {
                if (DebugFile.trace)
                    DebugFile.writeln("<meta http-equiv> tag found");
                String sHttpEquiv = oMatcher.getMatch().toString();
                int iCharset = Gadgets.indexOfIgnoreCase(sHttpEquiv, "charset=");
                if (iCharset > 0) {
                    int iQuoute = sHttpEquiv.indexOf('"', iCharset);
                    if (iQuoute < 0)
                        iQuoute = sHttpEquiv.indexOf((char) 39, iCharset);
                    if (iQuoute < 0) {
                        bAutoDetectEncoding = true;
                    } else {
                        sEncoding = sHttpEquiv.substring(iCharset + 8, iQuoute);
                        if (DebugFile.trace)
                            DebugFile.writeln("setting charset encoding to " + sEncoding);
                        bAutoDetectEncoding = false;
                        try {
                            byte[] aTest = new String("Test").getBytes(sEncoding);
                        } catch (UnsupportedEncodingException uex) {
                            bAutoDetectEncoding = true;
                        }
                    }
                } else {
                    bAutoDetectEncoding = true;
                }
            } else {
                bAutoDetectEncoding = true;
            }
        }

        if (bAutoDetectEncoding) {
            if (DebugFile.trace)
                DebugFile.writeln("Autodetecting encoding");
            ByteArrayInputStream oHtmlStrm = new ByteArrayInputStream(sHtml.getBytes(sDefaultEncoding));
            sEncoding = new CharacterSetDetector().detect(oHtmlStrm, sDefaultEncoding);
            oHtmlStrm.close();
            if (DebugFile.trace)
                DebugFile.writeln("Encoding set to " + sEncoding);
        }

        Pattern oPattern = oCompiler.compile("<base(\\x20)+href=(\"|')?([^'\"\\r\\n]+)(\"|')?(\\x20)*/?>",
                Perl5Compiler.CASE_INSENSITIVE_MASK);
        if (oMatcher.contains(sHtml, oPattern)) {
            sBaseHref = Gadgets.chomp(oMatcher.getMatch().group(3), "/");
            if (DebugFile.trace)
                DebugFile.writeln("<base href=" + sBaseHref + ">");
        }

        PatternMatcherInput oMatchInput = new PatternMatcherInput(sHtml);
        oPattern = oCompiler.compile(
                "\\x20(src=|background=|background-image:url\\x28)(\"|')?([^'\"\\r\\n]+)(\"|')?(\\x20|\\x29|/|>)",
                Perl5Compiler.CASE_INSENSITIVE_MASK);
        StringSubstitution oSrcSubs = new StringSubstitution();
        int nMatches = 0;
        while (oMatcher.contains(oMatchInput, oPattern)) {
            nMatches++;
            String sMatch = oMatcher.getMatch().toString();
            String sAttr = oMatcher.getMatch().group(1);
            String sQuo = oMatcher.getMatch().group(2);
            if (sQuo == null)
                sQuo = "";
            String sSrc = oMatcher.getMatch().group(3);
            if (DebugFile.trace)
                DebugFile.writeln("Source file found at " + sSrc);
            String sEnd = oMatcher.getMatch().group(5);
            if (!oFiles.contains(sSrc))
                oFiles.add(sSrc);
            String sFilename = sSrc.substring(sSrc.replace('\\', '/').lastIndexOf('/') + 1);
            if (DebugFile.trace)
                DebugFile.writeln("StringSubstitution.setSubstitution(" + sMatch + " replace with "
                        + sMatch.substring(0, sAttr.length() + 1) + sQuo + sFilename + sQuo + sEnd + ")");
            oSrcSubs.setSubstitution(sMatch.substring(0, sAttr.length() + 1) + sQuo + sFilename + sQuo + sEnd);
            sHtml = Util.substitute(oReplacer, oCompiler.compile(sMatch), oSrcSubs, sHtml, Util.SUBSTITUTE_ALL);
        } //wend

        oMatchInput = new PatternMatcherInput(sHtml);
        oPattern = oCompiler.compile(
                "<link\\x20+(rel=(\"|')?stylesheet(\"|')?\\x20+)?(type=(\"|')?text/css(\"|')?\\x20+)?href=(\"|')?([^'\"\\r\\n]+)(\"|')?");
        while (oMatcher.contains(oMatchInput, oPattern)) {
            nMatches++;
            String sMatch = oMatcher.getMatch().toString();
            String sSrc = oMatcher.getMatch().group(8);
            String sFilename = sSrc.substring(sSrc.replace('\\', '/').lastIndexOf('/') + 1);
            if (!oFiles.contains(sSrc))
                oFiles.add(sSrc);
            if (DebugFile.trace)
                DebugFile.writeln("StringSubstitution.setSubstitution(" + sMatch + " replace with "
                        + Gadgets.replace(sMatch, sSrc, sFilename) + ")");
            oSrcSubs.setSubstitution(Gadgets.replace(sMatch, sSrc, sFilename));
            sHtml = Util.substitute(oReplacer, oCompiler.compile(sMatch), oSrcSubs, sHtml);
        } // wend     

        if (DebugFile.trace) {
            DebugFile.writeln(String.valueOf(nMatches) + " matches found");
            DebugFile.write("\n" + sHtml + "\n");
        }

        ZipOutputStream oZOut = new ZipOutputStream(oOutStrm);
        String sLocalName = sFilePath.substring(sFilePath.replace('\\', '/').lastIndexOf('/') + 1);
        int iDot = sLocalName.lastIndexOf('.');
        if (iDot > 0)
            sLocalName = Gadgets.ASCIIEncode(sLocalName.substring(0, iDot)).toLowerCase() + ".html";
        else
            sLocalName = Gadgets.ASCIIEncode(sLocalName).toLowerCase();
        oEntries.add(sLocalName);
        if (DebugFile.trace)
            DebugFile.writeln("Putting entry " + sLocalName + " into ZIP");
        oZOut.putNextEntry(new ZipEntry(sLocalName));
        StringBufferInputStream oHtml = new StringBufferInputStream(sHtml);
        new StreamPipe().between(oHtml, oZOut);
        oHtml.close();
        oZOut.closeEntry();

        for (String sName : oFiles) {
            String sZipEntryName = sName.substring(sName.replace('\\', '/').lastIndexOf('/') + 1);
            if (!oEntries.contains(sZipEntryName)) {
                oEntries.add(sZipEntryName);
                if (DebugFile.trace)
                    DebugFile.writeln("Putting entry " + sZipEntryName + " into ZIP");
                oZOut.putNextEntry(new ZipEntry(sZipEntryName));
                if (sName.startsWith("http://") || sName.startsWith("https://") || sName.startsWith("file://")
                        || sBaseHref.length() > 0) {
                    try {
                        new StreamPipe().between(new ByteArrayInputStream(readfilebin(sBaseHref + sName)),
                                oZOut);
                    } catch (IOException ioe) {
                        if (DebugFile.trace) {
                            DebugFile.decIdent();
                            DebugFile.writeln("Could not download file " + sName);
                        }
                    }
                } else {
                    try {
                        byte[] aFile = readfilebin(
                                sBasePath + (sName.startsWith("/") ? sName.substring(1) : sName));
                        if (null != aFile) {
                            if (aFile.length > 0)
                                new StreamPipe().between(new ByteArrayInputStream(aFile), oZOut);
                        } else {
                            DebugFile.writeln("Could not find file " + sBasePath
                                    + (sName.startsWith("/") ? sName.substring(1) : sName));
                        }
                    } catch (IOException ioe) {
                        if (DebugFile.trace) {
                            DebugFile.decIdent();
                            DebugFile.writeln("Could not download file " + sBasePath
                                    + (sName.startsWith("/") ? sName.substring(1) : sName));
                        }
                    }
                }
                oZOut.closeEntry();
            } // fi (sName!=sLocalName)
        } // next
        oZOut.close();

    } catch (MalformedPatternException mpe) {

    } catch (FTPException ftpe) {

    }

    if (DebugFile.trace) {
        DebugFile.decIdent();
        DebugFile.writeln("End FileSystem.downloadhtmlpage()");
    }
}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param treeDef//from  w  w w  . ja  v  a2s. com
 * @throws SQLException
 */
public void convertLithoStratCustom(final LithoStratTreeDef treeDef, final LithoStrat earth,
        final TableWriter tblWriter, final String srcTableName, final boolean doMapGTPIds) throws SQLException {
    Statement stmt = null;
    ResultSet rs = null;
    String s = "";
    try {
        // get a Hibernate session for saving the new records
        Session localSession = HibernateUtil.getCurrentSession();
        HibernateUtil.beginTransaction();

        int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM " + srcTableName);
        if (count < 1)
            return;

        if (hasFrame) {
            setProcess(0, count);
        }

        // create an ID mapper for the geography table (mainly for use in converting localities)
        IdHashMapper lithoStratIdMapper = IdMapperMgr.getInstance().addHashMapper("stratigraphy_StratigraphyID",
                true);
        if (lithoStratIdMapper == null) {
            UIRegistry.showError("The lithoStratIdMapper was null.");
            return;
        }

        IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod",
                "GeologicTimePeriodID", null, false);
        if (doMapGTPIds) {
            gtpIdMapper.clearRecords();
            gtpIdMapper.mapAllIds();
        }

        Hashtable<Integer, Integer> stratGTPIdHash = new Hashtable<Integer, Integer>();
        //Hashtable<Integer, Integer> newCEIdToNewStratIdHash = new Hashtable<Integer, Integer>();

        // stratigraphy2 goes here.
        IdHashMapper newCEIdToNewStratIdHash = IdMapperMgr.getInstance()
                .addHashMapper("stratigraphy_StratigraphyID_2", true);
        newCEIdToNewStratIdHash.setShowLogErrors(false);

        IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID");
        if (ceMapper == null) {
            ceMapper = IdMapperMgr.getInstance().addTableMapper("collectingevent", "CollectingEventID", null,
                    false);
        }
        // get all of the old records
        //            String sql  = String.format("SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, " +
        //                                      "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s " +
        //                                       "WHERE s.SuperGroup IS NOT NULL OR s.Group IS NOT NULL OR s.Formation IS NOT NULL OR " +
        //                                       "s.Member IS NOT NULL OR s.Bed IS NOT NULL ORDER BY s.StratigraphyID", srcTableName);
        String sql = String.format(
                "SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, "
                        + "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s "
                        + "ORDER BY s.StratigraphyID",
                srcTableName);

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        rs = stmt.executeQuery(sql);

        int stratsWithNoGTP = 0;
        int stratsWithNoMappedGTP = 0;
        int missingCEMapping = 0;

        int lithoCnt = 0;

        int counter = 0;
        // for each old record, convert the record
        while (rs.next()) {
            if (counter % 500 == 0) {
                if (hasFrame) {
                    setProcess(counter);

                } else {
                    log.info("Converted " + counter + " Stratigraphy records");
                }
            }

            // grab the important data fields from the old record
            int oldStratId = rs.getInt(1); // This is a one-to-one with CollectingEvent
            String superGroup = rs.getString(2);
            String lithoGroup = rs.getString(3);
            String formation = rs.getString(4);
            String member = rs.getString(5);
            String bed = rs.getString(6);
            String remarks = escapeStringLiterals(rs.getString(7));
            String text1 = escapeStringLiterals(rs.getString(8));
            String text2 = escapeStringLiterals(rs.getString(9));
            Double number1 = rs.getObject(10) != null ? rs.getDouble(10) : null;
            Double number2 = rs.getObject(11) != null ? rs.getDouble(11) : null;
            Boolean yesNo1 = rs.getObject(12) != null ? rs.getBoolean(12) : null;
            Boolean yesNo2 = rs.getObject(13) != null ? rs.getBoolean(13) : null;
            Integer oldGTPId = rs.getObject(14) != null ? rs.getInt(14) : null;

            // Check to see if there is any Litho information OR an GTP Id
            // If both are missing then skip the record.
            boolean hasLithoFields = isNotEmpty(superGroup) || isNotEmpty(lithoGroup) || isNotEmpty(formation)
                    || isNotEmpty(member);
            if (!hasLithoFields && oldGTPId == null) {
                continue;
            }

            Integer gtpId = null;
            if (doMapGTPIds) {
                if (oldGTPId != null) {
                    gtpId = oldGTPId;
                }
            } else {
                gtpId = oldStratId;
            }

            if (gtpId != null) {
                gtpId = gtpIdMapper.get(gtpId);
                if (gtpId == null) {
                    tblWriter.logError("Old GTPID[" + gtpId
                            + "] in the Strat record could not be mapped for Old StratID[" + oldStratId + "]");
                    stratsWithNoMappedGTP++;
                }
            } else {
                stratsWithNoGTP++;
            }

            // There may not be any Litho information to add to the LithoStrat tree, 
            // but it did have GTP Information if we got here
            if (hasLithoFields) {
                // create a new Geography object from the old data
                LithoStrat[] newStrats = convertOldStratRecord(superGroup, lithoGroup, formation, member, bed,
                        remarks, text1, text2, number1, number2, yesNo1, yesNo2, earth, localSession);

                LithoStrat newStrat = getLastLithoStrat(newStrats);
                counter++;
                lithoCnt += newStrats.length;

                // Map Old LithoStrat ID to the new Tree Id
                //System.out.println(oldStratId + " " + newStrat.getLithoStratId());
                if (newStrat != null) {
                    lithoStratIdMapper.put(oldStratId, newStrat.getLithoStratId());

                    // Convert Old CEId (StratID) to new CEId, then map the new CEId -> new StratId
                    Integer newCEId = ceMapper.get(oldStratId);
                    if (newCEId != null) {
                        newCEIdToNewStratIdHash.put(newCEId, newStrat.getLithoStratId());
                    } else {
                        String msg = String.format(
                                "No CE mapping for Old StratId %d, when they are a one-to-one.", oldStratId);
                        tblWriter.logError(msg);
                        log.error(msg);
                        missingCEMapping++;
                    }

                    // Map the New StratId to the new GTP Id
                    if (gtpId != null && stratGTPIdHash.get(newStrat.getLithoStratId()) == null) {
                        stratGTPIdHash.put(newStrat.getLithoStratId(), gtpId); // new ID to new ID
                    }
                } else {
                    String msg = String.format("Strat Fields were all null for oldID", oldStratId);
                    tblWriter.logError(msg);
                    log.error(msg);
                    missingCEMapping++;
                }
            }
        }
        stmt.close();

        System.out.println("lithoCnt: " + lithoCnt);

        if (hasFrame) {
            setProcess(counter);

        } else {
            log.info("Converted " + counter + " Stratigraphy records");
        }

        TreeHelper.fixFullnameForNodeAndDescendants(earth);
        earth.setNodeNumber(1);
        fixNodeNumbersFromRoot(earth);

        HibernateUtil.commitTransaction();
        log.info("Converted " + counter + " Stratigraphy records");

        rs.close();

        Statement updateStatement = newDBConn.createStatement();

        //Hashtable<Integer, Integer> ceToPCHash = new Hashtable<Integer, Integer>();

        int ceCnt = BasicSQLUtils.getCountAsInt(oldDBConn,
                "SELECT Count(CollectingEventID) FROM collectingevent");
        int stratCnt = BasicSQLUtils.getCountAsInt(oldDBConn,
                String.format("SELECT Count(CollectingEventID) FROM collectingevent "
                        + "INNER JOIN %s ON CollectingEventID = StratigraphyID", srcTableName));

        String msg = String.format("There are %d CE->Strat and %d CEs. The diff is %d", stratCnt, ceCnt,
                (ceCnt - stratCnt));
        tblWriter.log(msg);
        log.debug(msg);

        // Create a PaleoContext for each ColObj
        stmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        int processCnt = BasicSQLUtils
                .getCountAsInt("SELECT COUNT(*) FROM collectionobject WHERE CollectingEventID IS NOT NULL");
        if (frame != null) {
            frame.setDesc("Converting PaleoContext...");
            frame.setProcess(0, processCnt);
        }

        TreeSet<Integer> missingStratIds = new TreeSet<Integer>();

        int missingStrat = 0;
        int missingGTP = 0;
        int coUpdateCnt = 0;
        int cnt = 0;
        sql = "SELECT CollectionObjectID, CollectingEventID FROM collectionobject WHERE CollectingEventID IS NOT NULL ORDER BY CollectionObjectID";
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            int coId = rs.getInt(1); // New CO Id
            Integer ceId = rs.getInt(2); // New CE Id

            // Use the new CE ID to get the new Strat Id
            Integer newLithoId = newCEIdToNewStratIdHash.get(ceId);
            Integer gtpId = null;

            if (newLithoId == null) {
                missingStrat++;
                missingStratIds.add(ceId);

                Integer oldStratID = ceMapper.reverseGet(ceId);
                if (oldStratID != null) {
                    sql = "SELECT GeologicTimePeriodID FROM stratigraphy WHERE StratigraphyID = " + oldStratID;
                    Integer oldGTPId = BasicSQLUtils.getCount(oldDBConn, sql);
                    if (oldGTPId != null) {
                        gtpId = gtpIdMapper.get(oldGTPId);
                    }
                }
                if (gtpId == null)
                    continue;
            }

            // Use the new StratID to get the new GTP Id (ChronosStratigraphy)
            if (gtpId == null) {
                gtpId = stratGTPIdHash.get(newLithoId);
                if (gtpId == null) {
                    missingGTP++;
                    if (newLithoId == null)
                        continue;
                }
            }

            try {
                String updateStr = "INSERT INTO paleocontext (TimestampCreated, TimestampModified, DisciplineID, Version, CreatedByAgentID, ModifiedByAgentID, LithoStratID, ChronosStratID) "
                        + "VALUES ('" + nowStr + "','" + nowStr + "'," + getDisciplineId() + ", 0, "
                        + getCreatorAgentId(null) + "," + getModifiedByAgentId(null) + ","
                        + (newLithoId != null ? newLithoId : "NULL") + "," + (gtpId != null ? gtpId : "NULL")
                        + ")";
                updateStatement.executeUpdate(updateStr, Statement.RETURN_GENERATED_KEYS);

                Integer paleoContextID = getInsertedId(updateStatement);
                if (paleoContextID == null) {
                    throw new RuntimeException("Couldn't get the Agent's inserted ID");
                }

                String sqlUpdate = "UPDATE collectionobject SET PaleoContextID=" + paleoContextID
                        + " WHERE CollectionObjectID = " + coId;
                updateStatement.executeUpdate(sqlUpdate);
                coUpdateCnt++;

            } catch (SQLException e) {
                e.printStackTrace();
                log.error(e);
                showError(e.getMessage());
                throw new RuntimeException(e);
            }
            processCnt++;
            if (frame != null && cnt % 100 == 0)
                frame.setProcess(cnt);

        }
        rs.close();
        stmt.close();

        if (frame != null)
            frame.setProcess(processCnt);

        msg = String.format("There are %d unmappable Strat Records and %d unmappable GTP records.",
                missingStrat, missingGTP);
        tblWriter.log(msg);
        log.debug(msg);

        msg = String.format("There are %d CO records updated.", coUpdateCnt);
        tblWriter.log(msg);
        log.debug(msg);
        updateStatement.close();

        msg = String.format("No CE mapping for Old StratId Count: %d", missingCEMapping);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Strats with No GTP Count: %d", stratsWithNoGTP);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Strats with missing Mapping to GTP Count: %d", stratsWithNoMappedGTP);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Number of Old StratIds mapped to a new Strat ID Count: %d",
                lithoStratIdMapper.size());
        tblWriter.logError(msg);
        log.error(msg);

        StringBuilder sb = new StringBuilder();
        sb.append("Missing New Strat: ");
        if (missingStratIds.size() == 0)
            sb.append("None");

        for (Integer id : missingStratIds) {
            sb.append(String.format("%d, ", id));
        }
        tblWriter.logError(sb.toString());
        log.error(sb.toString());

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    // Now in this Step we Add the PaleoContext to the Collecting Events

}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * @param treeDef/*from ww  w  .  j ava 2 s .  c  o  m*/
 * @throws SQLException
 */
public void convertLithoStratGeneral(final LithoStratTreeDef treeDef, final LithoStrat earth,
        final TableWriter tblWriter, final String srcTableName) throws SQLException {
    Statement stmt = null;
    ResultSet rs = null;
    String s = "";
    try {
        // get a Hibernate session for saving the new records
        Session localSession = HibernateUtil.getCurrentSession();
        HibernateUtil.beginTransaction();

        int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM " + srcTableName);
        if (count < 1)
            return;

        if (hasFrame) {
            setProcess(0, count);
        }

        // create an ID mapper for the geography table (mainly for use in converting localities)
        IdHashMapper lithoStratIdMapper = IdMapperMgr.getInstance().addHashMapper("stratigraphy_StratigraphyID",
                true);
        if (lithoStratIdMapper == null) {
            UIRegistry.showError("The lithoStratIdMapper was null.");
            return;
        }

        IdMapperIFace gtpIdMapper = IdMapperMgr.getInstance().get("geologictimeperiod", "GeologicTimePeriodID");

        IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID");
        if (ceMapper == null) {
            ceMapper = IdMapperMgr.getInstance().addTableMapper("collectingevent", "CollectingEventID", null,
                    false);
        }
        String sql = String.format(
                "SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, "
                        + "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s "
                        + "ORDER BY s.StratigraphyID",
                srcTableName);

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        rs = stmt.executeQuery(sql);

        Map<Integer, Pair<Integer, Integer>> stratHash = new HashMap<Integer, Pair<Integer, Integer>>();

        int stratsWithNoGTP = 0;
        int stratsWithNoMappedGTP = 0;
        int missingCEMapping = 0;

        int lithoCnt = 0;

        int counter = 0;
        // for each old record, convert the record
        while (rs.next()) {
            if (counter % 500 == 0) {
                if (hasFrame) {
                    setProcess(counter);

                } else {
                    log.info("Converted " + counter + " Stratigraphy records");
                }
            }

            // grab the important data fields from the old record
            int oldStratId = rs.getInt(1); // This is a one-to-one with CollectingEvent
            String superGroup = rs.getString(2);
            String lithoGroup = rs.getString(3);
            String formation = rs.getString(4);
            String member = rs.getString(5);
            String bed = rs.getString(6);
            String remarks = escapeStringLiterals(rs.getString(7));
            String text1 = escapeStringLiterals(rs.getString(8));
            String text2 = escapeStringLiterals(rs.getString(9));
            Double number1 = rs.getObject(10) != null ? rs.getDouble(10) : null;
            Double number2 = rs.getObject(11) != null ? rs.getDouble(11) : null;
            Boolean yesNo1 = rs.getObject(12) != null ? rs.getBoolean(12) : null;
            Boolean yesNo2 = rs.getObject(13) != null ? rs.getBoolean(13) : null;
            Integer oldGTPId = rs.getObject(14) != null ? rs.getInt(14) : null;

            // Check to see if there is any Litho information OR an GTP Id
            // If both are missing then skip the record.
            boolean hasLithoFields = isNotEmpty(superGroup) || isNotEmpty(lithoGroup) || isNotEmpty(formation)
                    || isNotEmpty(member);
            if (!hasLithoFields && oldGTPId == null) {
                continue;
            }

            Integer gtpId = null;
            if (oldGTPId != null) {
                gtpId = gtpIdMapper.get(oldGTPId);
                if (gtpId == null) {
                    tblWriter.logError("Old GTPID[" + gtpId
                            + "] in the Strat record could not be mapped for Old StratID[" + oldStratId + "]");
                    stratsWithNoMappedGTP++;
                }
            } else {
                stratsWithNoGTP++;
            }

            // There may not be any Litho information to add to the LithoStrat tree, 
            // but it did have GTP Information if we got here
            Integer lithoStratID = null;
            if (hasLithoFields) {
                // create a new Geography object from the old data
                LithoStrat[] newStrats = convertOldStratRecord(superGroup, lithoGroup, formation, member, bed,
                        remarks, text1, text2, number1, number2, yesNo1, yesNo2, earth, localSession);

                LithoStrat newStrat = getLastLithoStrat(newStrats);
                counter++;
                lithoCnt += newStrats.length;

                // Map Old LithoStrat ID to the new Tree Id
                //System.out.println(oldStratId + " " + newStrat.getLithoStratId());
                if (newStrat != null) {
                    lithoStratID = newStrat.getLithoStratId();
                    lithoStratIdMapper.put(oldStratId, newStrat.getLithoStratId());
                } else {
                    String msg = String.format("Strat Fields were all null for oldID", oldStratId);
                    tblWriter.logError(msg);
                    log.error(msg);
                    missingCEMapping++;
                }
            }
            if (lithoStratID != null || gtpId != null) {
                Integer newCEId = ceMapper.get(oldStratId);
                if (newCEId == null) {
                    String msg = String.format("No CE mapping for Old StratId %d, when they are a one-to-one.",
                            oldStratId);
                    tblWriter.logError(msg);
                    log.error(msg);
                    missingCEMapping++;
                } else {
                    stratHash.put(newCEId, new Pair<Integer, Integer>(gtpId, lithoStratID));
                }
            }
        }
        stmt.close();

        System.out.println("lithoCnt: " + lithoCnt);

        if (hasFrame) {
            setProcess(counter);
        } else {
            log.info("Converted " + counter + " Stratigraphy records");
        }

        TreeHelper.fixFullnameForNodeAndDescendants(earth);
        earth.setNodeNumber(1);
        fixNodeNumbersFromRoot(earth);

        HibernateUtil.commitTransaction();
        log.info("Converted " + counter + " Stratigraphy records");

        rs.close();

        Statement updateStatement = newDBConn.createStatement();

        int ceCnt = BasicSQLUtils.getCountAsInt(oldDBConn,
                "SELECT Count(CollectingEventID) FROM collectingevent");
        int stratCnt = BasicSQLUtils.getCountAsInt(oldDBConn,
                String.format("SELECT Count(CollectingEventID) FROM collectingevent "
                        + "INNER JOIN %s ON CollectingEventID = StratigraphyID", srcTableName));

        String msg = String.format("There are %d CE->Strat and %d CEs. The diff is %d", stratCnt, ceCnt,
                (ceCnt - stratCnt));
        tblWriter.log(msg);
        log.debug(msg);

        // Create a PaleoContext for each ColObj
        stmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);

        int processCnt = BasicSQLUtils
                .getCountAsInt("SELECT COUNT(*) FROM collectionobject WHERE CollectingEventID IS NOT NULL");
        if (frame != null) {
            frame.setDesc("Converting PaleoContext...");
            frame.setProcess(0, processCnt);
        }

        TreeSet<Integer> missingStratIds = new TreeSet<Integer>();

        int missingStrat = 0;
        int missingGTP = 0;
        int coUpdateCnt = 0;
        int cnt = 0;
        sql = "SELECT CollectionObjectID, CollectingEventID FROM collectionobject WHERE CollectingEventID IS NOT NULL ORDER BY CollectionObjectID";
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            int coId = rs.getInt(1); // New CO Id
            Integer ceId = rs.getInt(2); // New CE Id

            Pair<Integer, Integer> strat = stratHash.get(ceId);
            Integer newLithoId = null;
            Integer gtpId = null;
            if (strat != null) {
                gtpId = strat.getFirst();
                newLithoId = strat.getSecond();
            }

            if (newLithoId == null) {
                missingStrat++;
                missingStratIds.add(ceId);
                if (gtpId == null)
                    continue;
            }

            try {
                String updateStr = "INSERT INTO paleocontext (TimestampCreated, TimestampModified, DisciplineID, Version, CreatedByAgentID, ModifiedByAgentID, LithoStratID, ChronosStratID) "
                        + "VALUES ('" + nowStr + "','" + nowStr + "'," + getDisciplineId() + ", 0, "
                        + getCreatorAgentId(null) + "," + getModifiedByAgentId(null) + ","
                        + (newLithoId != null ? newLithoId : "NULL") + "," + (gtpId != null ? gtpId : "NULL")
                        + ")";
                updateStatement.executeUpdate(updateStr, Statement.RETURN_GENERATED_KEYS);

                Integer paleoContextID = getInsertedId(updateStatement);
                if (paleoContextID == null) {
                    throw new RuntimeException("Couldn't get the Agent's inserted ID");
                }

                String sqlUpdate = "UPDATE collectionobject SET PaleoContextID=" + paleoContextID
                        + " WHERE CollectionObjectID = " + coId;
                updateStatement.executeUpdate(sqlUpdate);
                coUpdateCnt++;
            } catch (SQLException e) {
                e.printStackTrace();
                log.error(e);
                showError(e.getMessage());
                throw new RuntimeException(e);
            }
            processCnt++;
            if (frame != null && cnt % 100 == 0)
                frame.setProcess(cnt);
        }
        rs.close();
        stmt.close();

        if (frame != null)
            frame.setProcess(processCnt);

        msg = String.format("There are %d unmappable Strat Records and %d unmappable GTP records.",
                missingStrat, missingGTP);
        tblWriter.log(msg);
        log.debug(msg);

        msg = String.format("There are %d CO records updated.", coUpdateCnt);
        tblWriter.log(msg);
        log.debug(msg);
        updateStatement.close();

        msg = String.format("No CE mapping for Old StratId Count: %d", missingCEMapping);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Strats with No GTP Count: %d", stratsWithNoGTP);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Strats with missing Mapping to GTP Count: %d", stratsWithNoMappedGTP);
        tblWriter.logError(msg);
        log.error(msg);

        msg = String.format("Number of Old StratIds mapped to a new Strat ID Count: %d",
                lithoStratIdMapper.size());
        tblWriter.logError(msg);
        log.error(msg);

        StringBuilder sb = new StringBuilder();
        sb.append("Missing New Strat: ");
        if (missingStratIds.size() == 0)
            sb.append("None");

        for (Integer id : missingStratIds) {
            sb.append(String.format("%d, ", id));
        }
        tblWriter.logError(sb.toString());
        log.error(sb.toString());

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    // Now in this Step we Add the PaleoContext to the Collecting Events

}

From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java

/**
 * Converts all the CollectionObject and CollectionObjectCatalog Records into the new schema
 * CollectionObject table. All "logical" records are moved to the CollectionObject table and all
 * "physical" records are moved to the Preparation table.
 * @return true if no errors/*from   ww  w  .jav  a 2 s.co  m*/
 */
@SuppressWarnings("cast")
public boolean convertCollectionObjects(final boolean useNumericCatNumbers, final boolean usePrefix) {
    final String ZEROES = "000000000";

    UIFieldFormatterIFace formatter0 = UIFieldFormatterMgr.getInstance().getFormatter("CatalogNumber");
    log.debug(formatter0);

    UIFieldFormatterIFace formatter = UIFieldFormatterMgr.getInstance().getFormatter("CatalogNumberNumeric");
    log.debug(formatter);

    DisciplineType dt;
    Discipline discipline = (Discipline) AppContextMgr.getInstance().getClassObject(Discipline.class);
    if (discipline != null) {
        System.out.println("discipline.getType()[" + discipline.getType() + "]");
        dt = DisciplineType.getDiscipline(discipline.getType());
    } else {
        Vector<Object[]> list = query(newDBConn, "SELECT Type FROM discipline");
        String typeStr = (String) list.get(0)[0];
        System.out.println("typeStr[" + typeStr + "]");
        dt = DisciplineType.getDiscipline(typeStr);
    }

    Pair<Integer, Boolean> objTypePair = dispToObjTypeHash.get(dt.getDisciplineType());
    if (objTypePair == null) {
        System.out.println("objTypePair is null dt[" + dt.getName() + "][" + dt.getTitle() + "]");

        for (STD_DISCIPLINES key : dispToObjTypeHash.keySet()) {
            Pair<Integer, Boolean> p = dispToObjTypeHash.get(key);
            System.out.println("[" + key + "] [" + p.first + "][" + p.second + "]");
        }

    } else if (objTypePair.first == null) {
        System.out.println("objTypePair.first is null dt[" + dt + "]");

        for (STD_DISCIPLINES key : dispToObjTypeHash.keySet()) {
            Pair<Integer, Boolean> p = dispToObjTypeHash.get(key);
            System.out.println("[" + key + "] [" + p.first + "][" + p.second + "]");
        }

    }
    //int objTypeId  = objTypePair.first;
    //boolean isEmbedded = objTypePair.second;

    idMapperMgr.dumpKeys();
    IdHashMapper colObjTaxonMapper = (IdHashMapper) idMapperMgr.get("ColObjCatToTaxonType".toLowerCase());
    IdHashMapper colObjAttrMapper = (IdHashMapper) idMapperMgr
            .get("biologicalobjectattributes_BiologicalObjectAttributesID");
    IdHashMapper colObjMapper = (IdHashMapper) idMapperMgr
            .get("collectionobjectcatalog_CollectionObjectCatalogID");

    colObjTaxonMapper.setShowLogErrors(false); // NOTE: TURN THIS ON FOR DEBUGGING or running new Databases through it
    colObjAttrMapper.setShowLogErrors(false);

    //IdHashMapper stratMapper    = (IdHashMapper)idMapperMgr.get("stratigraphy_StratigraphyID");
    //IdHashMapper stratGTPMapper = (IdHashMapper)idMapperMgr.get("stratigraphy_GeologicTimePeriodID");

    String[] fieldsToSkip = { "ContainerID", "ContainerItemID", "AltCatalogNumber", "GUID", "ContainerOwnerID",
            "RepositoryAgreementID", "GroupPermittedToView", // this may change when converting Specify 5.x
            "CollectionObjectID", "VisibilitySetBy", "ContainerOwnerID", "InventoryDate", "ObjectCondition",
            "Notifications", "ProjectNumber", "Restrictions", "YesNo3", "YesNo4", "YesNo5", "YesNo6",
            "FieldNotebookPageID", "ColObjAttributesID", "DNASequenceID", "AppraisalID", "TotalValue",
            "Description", "SGRStatus", "OCR", "ReservedText", "Text3" };

    HashSet<String> fieldsToSkipHash = new HashSet<String>();
    for (String fName : fieldsToSkip) {
        fieldsToSkipHash.add(fName);
    }

    TableWriter tblWriter = convLogger.getWriter("convertCollectionObjects.html", "Collection Objects");

    String msg = "colObjTaxonMapper: " + colObjTaxonMapper.size();
    log.info(msg);
    tblWriter.log(msg);

    setIdentityInsertONCommandForSQLServer(newDBConn, "collectionobject",
            BasicSQLUtils.myDestinationServerType);

    deleteAllRecordsFromTable(newDBConn, "collectionobject", BasicSQLUtils.myDestinationServerType); // automatically closes the connection

    TreeSet<String> badSubNumberCatNumsSet = new TreeSet<String>();

    TimeLogger timeLogger = new TimeLogger();

    try {
        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(Integer.MIN_VALUE);
        StringBuilder str = new StringBuilder();

        List<String> oldFieldNames = new ArrayList<String>();

        StringBuilder sql = new StringBuilder("select ");
        List<String> names = getFieldNamesFromSchema(oldDBConn, "collectionobject");

        sql.append(buildSelectFieldList(names, "collectionobject"));
        sql.append(", ");
        oldFieldNames.addAll(names);

        names = getFieldNamesFromSchema(oldDBConn, "collectionobjectcatalog");
        sql.append(buildSelectFieldList(names, "collectionobjectcatalog"));
        oldFieldNames.addAll(names);

        String fromClause = " FROM collectionobject Inner Join collectionobjectcatalog ON "
                + "collectionobject.CollectionObjectID = collectionobjectcatalog.CollectionObjectCatalogID "
                + "WHERE (collectionobject.DerivedFromID IS NULL) AND collectionobjectcatalog.CollectionObjectCatalogID = ";
        sql.append(fromClause);

        log.info(sql);
        String sqlStr = sql.toString();

        List<FieldMetaData> newFieldMetaData = getFieldMetaDataFromSchema(newDBConn, "collectionobject");

        log.info("Number of Fields in New CollectionObject " + newFieldMetaData.size());

        Map<String, Integer> oldNameIndex = new Hashtable<String, Integer>();
        int inx = 1;
        log.info("---- Old Names ----");
        for (String name : oldFieldNames) {
            log.info("[" + name + "][" + inx + "]");
            oldNameIndex.put(name, inx++);
        }

        log.info("---- New Names ----");
        for (FieldMetaData fmd : newFieldMetaData) {
            log.info("[" + fmd.getName() + "]");
        }
        String tableName = "collectionobject";

        Statement newStmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        newStmt.setFetchSize(Integer.MIN_VALUE);
        ResultSet rsLooping = newStmt.executeQuery(
                "SELECT OldID, NewID FROM collectionobjectcatalog_CollectionObjectCatalogID ORDER BY OldID");

        if (hasFrame) {
            if (rsLooping.last()) {
                setProcess(0, rsLooping.getRow());
                rsLooping.first();

            } else {
                rsLooping.close();
                stmt.close();
                return true;
            }
        } else {
            if (!rsLooping.first()) {
                rsLooping.close();
                stmt.close();
                return true;
            }
        }

        int boaCnt = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM biologicalobjectattributes"); // ZZZ

        PartialDateConv partialDateConv = new PartialDateConv();

        Statement stmt2 = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        stmt2.setFetchSize(Integer.MIN_VALUE);

        int catNumInx = oldNameIndex.get("CatalogNumber");
        int catDateInx = oldNameIndex.get("CatalogedDate");
        int catSeriesIdInx = oldNameIndex.get("CatalogSeriesID");
        int lastEditedByInx = oldNameIndex.get("LastEditedBy");

        /*int     grpPrmtViewInx    = -1;
        Integer grpPrmtViewInxObj = oldNameIndex.get("GroupPermittedToView");
        if (grpPrmtViewInxObj != null)
        {
        grpPrmtViewInx = grpPrmtViewInxObj + 1;
        }*/

        Hashtable<Integer, CollectionInfo> oldCatSeriesIDToCollInfo = new Hashtable<Integer, CollectionInfo>();
        for (CollectionInfo ci : collectionInfoShortList) {
            if (ci.getCatSeriesId() != null) {
                oldCatSeriesIDToCollInfo.put(ci.getCatSeriesId(), ci);
            }
        }

        String insertStmtStr = null;

        /*String catIdTaxIdStrBase = "SELECT cc.CollectionObjectCatalogID, cc.CatalogSeriesID, ct.TaxonomyTypeID "
                                + "FROM collectionobjectcatalog AS cc "
                                + "Inner Join collectionobject AS co ON cc.CollectionObjectCatalogID = co.CollectionObjectID "
                                + "Inner Join collectiontaxonomytypes as ct ON co.CollectionObjectTypeID = ct.BiologicalObjectTypeID "
                                + "where cc.CollectionObjectCatalogID = ";*/

        int colObjAttrsNotMapped = 0;
        int count = 0;
        boolean skipRecord = false;
        do {
            String catSQL = sqlStr + rsLooping.getInt(1);
            //log.debug(catSQL);
            ResultSet rs = stmt.executeQuery(catSQL);
            if (!rs.next()) {
                log.error("Couldn't find CO with old  id[" + rsLooping.getInt(1) + "] " + catSQL);
                continue;
            }

            partialDateConv.nullAll();

            skipRecord = false;

            CollectionInfo collInfo = oldCatSeriesIDToCollInfo.get(rs.getInt(catSeriesIdInx));

            /*String catIdTaxIdStr = catIdTaxIdStrBase + rs.getInt(1);
            //log.info(catIdTaxIdStr);
                    
            ResultSet rs2 = stmt2.executeQuery(catIdTaxIdStr);
            if (!rs2.next())
            {
            log.info("QUERY failed to return results:\n"+catIdTaxIdStr+"\n");
            continue;
            }
            Integer catalogSeriesID = rs2.getInt(2);
            Integer taxonomyTypeID  = rs2.getInt(3);
            Integer newCatSeriesId  = collectionHash.get(catalogSeriesID + "_" + taxonomyTypeID);
            String  prefix          = prefixHash.get(catalogSeriesID + "_" + taxonomyTypeID);
            rs2.close();
                    
            if (newCatSeriesId == null)
            {
            msg = "Can't find " + catalogSeriesID + "_" + taxonomyTypeID;
            log.info(msg);
            tblWriter.logError(msg);
            continue;
            }*/

            /*if (false)
            {
            String stratGTPIdStr = "SELECT co.CollectionObjectID, ce.CollectingEventID, s.StratigraphyID, g.GeologicTimePeriodID FROM collectionobject co " +
                "LEFT JOIN collectingevent ce ON co.CollectingEventID = ce.CollectingEventID  " +
                "LEFT JOIN stratigraphy s ON ce.CollectingEventID = s.StratigraphyID  " +
                "LEFT JOIN geologictimeperiod g ON s.GeologicTimePeriodID = g.GeologicTimePeriodID  " +
                "WHERE co.CollectionObjectID  = " + rs.getInt(1);
            log.info(stratGTPIdStr);
            rs2 = stmt2.executeQuery(stratGTPIdStr);
                    
            Integer coId = null;
            Integer ceId = null;
            Integer stId = null;
            Integer gtpId = null;
            if (rs2.next())
            {
                coId = rs2.getInt(1);
                ceId = rs2.getInt(2);
                stId = rs2.getInt(3);
                gtpId = rs2.getInt(4);
            }
            rs2.close();
            }*/

            String catalogNumber = null;
            String colObjId = null;

            str.setLength(0);

            if (insertStmtStr == null) {
                StringBuffer fieldList = new StringBuffer();
                fieldList.append("( ");
                for (int i = 0; i < newFieldMetaData.size(); i++) {
                    if ((i > 0) && (i < newFieldMetaData.size())) {
                        fieldList.append(", ");
                    }
                    String newFieldName = newFieldMetaData.get(i).getName();
                    fieldList.append(newFieldName + " ");
                }
                fieldList.append(")");
                insertStmtStr = "INSERT INTO collectionobject " + fieldList + "  VALUES (";
            }
            str.append(insertStmtStr);

            for (int i = 0; i < newFieldMetaData.size(); i++) {
                if (i > 0) {
                    str.append(", ");
                }

                String newFieldName = newFieldMetaData.get(i).getName();

                if (i == 0) {
                    Integer oldColObjId = rs.getInt(1);
                    Integer newColObjId = colObjMapper.get(oldColObjId);

                    if (newColObjId == null) {
                        msg = "Couldn't find new ColObj Id for old [" + oldColObjId + "]";
                        tblWriter.logError(msg);
                        showError(msg);
                        throw new RuntimeException(msg);
                    }

                    colObjId = getStrValue(newColObjId);
                    if (contains(colObjId, '.')) {
                        String msgStr = String.format("CatalogNumber '%d' contains a decimal point.", colObjId);
                        log.debug(msgStr);
                        tblWriter.logError(msgStr);
                        skipRecord = true;
                        break;
                    }
                    str.append(colObjId);

                    if (useNumericCatNumbers) {
                        catalogNumber = rs.getString(catNumInx);

                        if (catalogNumber != null) {
                            int catNumInt = (int) Math.abs(rs.getDouble(catNumInx));
                            catalogNumber = Integer.toString(catNumInt);

                            if (catalogNumber.length() > 0 && catalogNumber.length() < ZEROES.length()) {
                                catalogNumber = "\"" + ZEROES.substring(catalogNumber.length()) + catalogNumber
                                        + "\"";

                            } else if (catalogNumber.length() > ZEROES.length()) {
                                showError(
                                        "Catalog Number[" + catalogNumber + "] is too long for formatter of 9");
                            }

                        } else {
                            String mssg = "Empty catalog number.";
                            log.debug(mssg);
                            //showError(msg);
                            tblWriter.logError(mssg);
                        }

                    } else {
                        String prefix = collInfo.getCatSeriesPrefix();

                        float catNum = rs.getFloat(catNumInx);
                        catalogNumber = "\"" + (usePrefix && isNotEmpty(prefix) ? (prefix + "-") : "")
                                + String.format("%9.0f", catNum).trim() + "\"";
                    }

                    int subNumber = rs.getInt(oldNameIndex.get("SubNumber"));
                    if (subNumber < 0 || rs.wasNull()) {
                        badSubNumberCatNumsSet.add(catalogNumber);

                        skipRecord = true;
                        //msg = "Collection Object is being skipped because SubNumber is less than zero CatalogNumber["+ catalogNumber + "]";
                        //log.error(msg);
                        //tblWriter.logError(msg);
                        //showError(msg);
                        break;
                    }

                } else if (fieldsToSkipHash.contains(newFieldName)) {
                    str.append("NULL");

                } else if (newFieldName.equals("CollectionID")) // User/Security changes
                {
                    str.append(collInfo.getCollectionId());

                } else if (newFieldName.equals("Version")) // User/Security changes
                {
                    str.append("0");

                } else if (newFieldName.equals("CreatedByAgentID")) // User/Security changes
                {
                    str.append(getCreatorAgentId(null));

                } else if (newFieldName.equals("ModifiedByAgentID")) // User/Security changes
                {
                    String lastEditedByStr = rs.getString(lastEditedByInx);
                    str.append(getModifiedByAgentId(lastEditedByStr));

                } else if (newFieldName.equals("CollectionMemberID")) // User/Security changes
                {
                    str.append(collInfo.getCollectionId());

                } else if (newFieldName.equals("PaleoContextID")) {
                    str.append("NULL");// newCatSeriesId);

                } else if (newFieldName.equals("CollectionObjectAttributeID")) // User/Security changes
                {
                    Object idObj = rs.getObject(1);
                    if (idObj != null) {
                        Integer coId = rs.getInt(1);
                        Integer newId = colObjAttrMapper.get(coId);
                        if (newId != null) {
                            str.append(getStrValue(newId));
                        } else {
                            if (boaCnt > 0)
                                colObjAttrsNotMapped++;
                            str.append("NULL");
                        }
                    } else {
                        str.append("NULL");
                    }

                } else if (newFieldName.equals("CatalogedDate")) {
                    if (partialDateConv.getDateStr() == null) {
                        getPartialDate(rs.getObject(catDateInx), partialDateConv);
                    }
                    str.append(partialDateConv.getDateStr());

                } else if (newFieldName.equals("CatalogedDatePrecision")) {
                    if (partialDateConv.getDateStr() == null) {
                        getPartialDate(rs.getObject(catDateInx), partialDateConv);
                    }
                    str.append(partialDateConv.getPartial());

                } else if (newFieldName.equals("CatalogedDateVerbatim")) {
                    if (partialDateConv.getDateStr() == null) {
                        getPartialDate(rs.getObject(catDateInx), partialDateConv);
                    }
                    str.append(partialDateConv.getVerbatim());

                } else if (newFieldName.equals("Availability")) {
                    str.append("NULL");

                } else if (newFieldName.equals("CatalogNumber")) {
                    str.append(catalogNumber);

                } else if (newFieldName.equals("Visibility")) // User/Security changes
                {
                    //str.append(grpPrmtViewInx > -1 ? rs.getObject(grpPrmtViewInx) : "NULL");
                    str.append("0");

                } else if (newFieldName.equals("VisibilitySetByID")) // User/Security changes
                {
                    str.append("NULL");

                } else if (newFieldName.equals("CountAmt")) {
                    Integer index = oldNameIndex.get("Count1");
                    if (index == null) {
                        index = oldNameIndex.get("Count");
                    }
                    Object countObj = rs.getObject(index);
                    if (countObj != null) {
                        str.append(getStrValue(countObj, newFieldMetaData.get(i).getType()));
                    } else {
                        str.append("NULL");
                    }

                } else {
                    Integer index = oldNameIndex.get(newFieldName);
                    Object data;
                    if (index == null) {
                        msg = "convertCollectionObjects - Couldn't find new field name[" + newFieldName
                                + "] in old field name in index Map";
                        log.warn(msg);
                        //                            tblWriter.logError(msg);
                        //                            showError(msg);
                        data = null;
                        // for (String key : oldNameIndex.keySet())
                        // {
                        // log.info("["+key+"]["+oldNameIndex.get(key)+"]");
                        // }
                        //stmt.close();
                        //throw new RuntimeException(msg);
                    } else {

                        data = rs.getObject(index);
                    }
                    if (data != null) {
                        int idInx = newFieldName.lastIndexOf("ID");
                        if (idMapperMgr != null && idInx > -1) {
                            IdMapperIFace idMapper = idMapperMgr.get(tableName, newFieldName);
                            if (idMapper != null) {
                                Integer origValue = rs.getInt(index);
                                data = idMapper.get(origValue);
                                if (data == null) {
                                    msg = "No value [" + origValue + "] in map  [" + tableName + "]["
                                            + newFieldName + "]";
                                    log.error(msg);
                                    tblWriter.logError(msg);
                                    //showError(msg);
                                }
                            } else {
                                msg = "No Map for [" + tableName + "][" + newFieldName + "]";
                                log.error(msg);
                                tblWriter.logError(msg);
                                //showError(msg);
                            }
                        }
                    }
                    str.append(getStrValue(data, newFieldMetaData.get(i).getType()));
                }
            }

            if (!skipRecord) {
                str.append(")");
                // log.info("\n"+str.toString());
                if (hasFrame) {
                    if (count % 500 == 0) {
                        setProcess(count);
                    }
                    if (count % 5000 == 0) {
                        log.info("CollectionObject Records: " + count);
                    }

                } else {
                    if (count % 2000 == 0) {
                        log.info("CollectionObject Records: " + count);
                    }
                }

                try {
                    Statement updateStatement = newDBConn.createStatement();
                    if (BasicSQLUtils.myDestinationServerType != BasicSQLUtils.SERVERTYPE.MS_SQLServer) {
                        removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType);
                    }
                    // updateStatement.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
                    //if (count < 50) System.err.println(str.toString());

                    updateStatement.executeUpdate(str.toString());
                    updateStatement.clearBatch();
                    updateStatement.close();
                    updateStatement = null;

                } catch (SQLException e) {
                    log.error("Count: " + count);
                    log.error("Key: [" + colObjId + "][" + catalogNumber + "]");
                    log.error("SQL: " + str.toString());
                    e.printStackTrace();
                    log.error(e);
                    showError(e.getMessage());
                    rs.close();
                    stmt.close();
                    throw new RuntimeException(e);
                }

                count++;
            } else {
                tblWriter.logError("Skipping - CatNo:" + catalogNumber);
            }
            // if (count > 10) break;

            rs.close();

        } while (rsLooping.next());

        /*if (boaCnt > 0)
        {
        msg = "CollectionObjectAttributes not mapped: " + colObjAttrsNotMapped + " out of "+boaCnt;
        log.info(msg);
        tblWriter.logError(msg);
        }*/

        stmt2.close();

        if (hasFrame) {
            setProcess(count);
        } else {
            log.info("Processed CollectionObject " + count + " records.");
        }

        tblWriter.log(String.format("Collection Objects Processing Time: %s", timeLogger.end()));

        tblWriter.log("Processed CollectionObject " + count + " records.");
        rsLooping.close();
        newStmt.close();
        stmt.close();

        tblWriter.append(
                "<br><br><b>Catalog Numbers rejected because the SubNumber was NULL or less than Zero</b><br>");
        tblWriter.startTable();
        tblWriter.logHdr("Catalog Number");
        for (String catNum : badSubNumberCatNumsSet) {
            tblWriter.log(catNum);
        }
        tblWriter.endTable();

    } catch (SQLException e) {
        setIdentityInsertOFFCommandForSQLServer(newDBConn, "collectionobject",
                BasicSQLUtils.myDestinationServerType);
        e.printStackTrace();
        log.error(e);
        tblWriter.logError(e.getMessage());
        showError(e.getMessage());
        throw new RuntimeException(e);

    } finally {
        tblWriter.close();
    }
    setIdentityInsertOFFCommandForSQLServer(newDBConn, "collectionobject",
            BasicSQLUtils.myDestinationServerType);

    return true;
}