Example usage for java.util HashSet iterator

List of usage examples for java.util HashSet iterator

Introduction

In this page you can find the example usage for java.util HashSet iterator.

Prototype

public Iterator<E> iterator() 

Source Link

Document

Returns an iterator over the elements in this set.

Usage

From source file:net.semanticmetadata.lire.imageanalysis.bovw.LocalFeatureHistogramBuilderKmeansPlusPlus.java

/**
 * Uses an existing index, where each and every document should have a set of local features. A number of
 * random images (numDocsForVocabulary) is selected and clustered to get a vocabulary of visual words
 * (the cluster means). For all images a histogram on the visual words is created and added to the documents.
 * Pre-existing histograms are deleted, so this method can be used for re-indexing.
 *
 * @throws java.io.IOException// w  w  w  . ja  va2s .c  o  m
 */
public void index() throws IOException {
    df.setMaximumFractionDigits(3);
    // find the documents for building the vocabulary:
    HashSet<Integer> docIDs = selectVocabularyDocs();
    System.out.println("Using " + docIDs.size() + " documents to build the vocabulary.");
    KMeansPlusPlusClusterer kpp = new KMeansPlusPlusClusterer(numClusters, 15);
    // fill the KMeans object:
    LinkedList<DoublePoint> features = new LinkedList<DoublePoint>();
    // Needed for check whether the document is deleted.
    Bits liveDocs = MultiFields.getLiveDocs(reader);
    for (Iterator<Integer> iterator = docIDs.iterator(); iterator.hasNext();) {
        int nextDoc = iterator.next();
        if (reader.hasDeletions() && !liveDocs.get(nextDoc))
            continue; // if it is deleted, just ignore it.
        Document d = reader.document(nextDoc);
        //            features.clear();
        IndexableField[] fields = d.getFields(localFeatureFieldName);
        String file = d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0];
        for (int j = 0; j < fields.length; j++) {
            LireFeature f = getFeatureInstance();
            f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset,
                    fields[j].binaryValue().length);
            // copy the data over to new array ...
            double[] feat = new double[f.getDoubleHistogram().length];
            System.arraycopy(f.getDoubleHistogram(), 0, feat, 0, feat.length);
            features.add(new DoublePoint(f.getDoubleHistogram()));
        }
    }
    if (features.size() < numClusters) {
        // this cannot work. You need more data points than clusters.
        throw new UnsupportedOperationException("Only " + features.size() + " features found to cluster in "
                + numClusters + ". Try to use less clusters or more images.");
    }
    // do the clustering:
    System.out.println("Number of local features: " + df.format(features.size()));
    System.out.println("Starting clustering ...");
    List<CentroidCluster<DoublePoint>> clusterList = kpp.cluster(features);
    // TODO: Serializing clusters to a file on the disk ...
    System.out.println("Clustering finished, " + clusterList.size() + " clusters found");
    clusters = new LinkedList<double[]>();
    for (Iterator<CentroidCluster<DoublePoint>> iterator = clusterList.iterator(); iterator.hasNext();) {
        CentroidCluster<DoublePoint> centroidCluster = iterator.next();
        clusters.add(centroidCluster.getCenter().getPoint());
    }
    System.out.println("Creating histograms ...");
    int[] tmpHist = new int[numClusters];
    IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true,
            LuceneUtils.AnalyzerType.WhitespaceAnalyzer, 256d);

    // careful: copy reader to RAM for faster access when reading ...
    //        reader = IndexReader.open(new RAMDirectory(reader.directory()), true);
    LireFeature f = getFeatureInstance();
    for (int i = 0; i < reader.maxDoc(); i++) {
        try {
            if (reader.hasDeletions() && !liveDocs.get(i))
                continue;
            for (int j = 0; j < tmpHist.length; j++) {
                tmpHist[j] = 0;
            }
            Document d = reader.document(i);
            IndexableField[] fields = d.getFields(localFeatureFieldName);
            // remove the fields if they are already there ...
            d.removeField(visualWordsFieldName);
            d.removeField(localFeatureHistFieldName);

            // find the appropriate cluster for each feature:
            for (int j = 0; j < fields.length; j++) {
                f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset,
                        fields[j].binaryValue().length);
                tmpHist[clusterForFeature(f, clusters)]++;
            }
            //                System.out.println(Arrays.toString(tmpHist));
            d.add(new StoredField(localFeatureHistFieldName,
                    SerializationUtils.toByteArray(normalize(tmpHist))));
            quantize(tmpHist);
            d.add(new TextField(visualWordsFieldName, arrayToVisualWordString(tmpHist), Field.Store.YES));

            // remove local features to save some space if requested:
            if (DELETE_LOCAL_FEATURES) {
                d.removeFields(localFeatureFieldName);
            }
            // now write the new one. we use the identifier to update ;)
            iw.updateDocument(new Term(DocumentBuilder.FIELD_NAME_IDENTIFIER,
                    d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]), d);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    iw.commit();
    // this one does the "old" commit(), it removes the deleted local features.
    iw.forceMerge(1);
    iw.close();
    System.out.println("Finished.");
}

From source file:subsets.GenerateGFKMatrix.java

public void part_tree(TreeMap<String, HashSet> treemap_hashset,
        TreeMap<String, JSONObject> treemap_previous_jsonobject,
        TreeMap<String, JSONObject> treemap_next_jsonobject, boolean atbottom) {
    try {/* w w  w .  j av  a2s . c o  m*/

        for (String head : treemap_hashset.keySet()) {
            JSONObject json_head = new JSONObject();

            json_head.append("name", head.toString());

            HashSet hashset = treemap_hashset.get(head);
            Iterator it = hashset.iterator();

            while (it.hasNext()) {
                String leave = (String) it.next();
                JSONObject json_leave = new JSONObject();
                if (atbottom) {
                    json_leave.append("name", leave);
                    json_head.append("children", json_leave);
                } else {

                    JSONObject jsononject_leave = treemap_previous_jsonobject.get(leave);
                    if (jsononject_leave != null) {
                        json_head.append("children", jsononject_leave);
                    } else {
                        JSONObject dummy = new JSONObject();
                        dummy.append("name", leave);
                        json_head.append("children", dummy);
                    }
                }
            }
            treemap_next_jsonobject.put(head, json_head);
        }

    } catch (JSONException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:net.sourceforge.mipa.predicatedetection.lattice.sequence.SequenceWindowedLatticeChecker.java

private boolean computePredicateOri(SequenceLatticeIDNode node) {
    // TODO Auto-generated method stub
    if (node == null) {
        return false;
    }//from  ww  w  .j  av  a 2 s .  c om
    boolean flagIntersection = false;
    boolean flagInclusion = true;
    HashSet<State> reachedStates = node.getReachedStates();
    HashSet<State> acceptStates = automaton.getAcceptStates();
    Iterator<State> it = reachedStates.iterator();
    while (it.hasNext()) {
        boolean flag = false;
        State state = it.next();
        Iterator<State> iterator = acceptStates.iterator();
        while (iterator.hasNext()) {
            State acceptState = iterator.next();
            if (state.getName().equals(acceptState.getName())) {
                flag = true;
                break;
            }
        }
        if (flag == false) {
            flagInclusion = false;
        } else {
            flagIntersection = true;
        }
        if (flagInclusion == false && flagIntersection == true) {
            break;
        }
    }
    if (reachedStates.isEmpty()) {
        node.setFlagInclusion(false);
        node.setFlagIntersection(false);
    } else {
        node.setFlagInclusion(flagInclusion);
        node.setFlagIntersection(flagIntersection);
    }
    long time_t = (new Date()).getTime();
    if (type == NodeType.DEF) {
        boolean result = flagInclusion;
        if (result == true) {
            if (count == 0) {
                // to do
            }
            count++;
            return true;
        }
    } else if (type == NodeType.POS) {
        boolean result = flagIntersection;
        if (result == true) {
            if (count == 0) {
                // to do
            }
            count++;
            return true;
        }
    }
    wastedOriTime += (new Date()).getTime() - time_t;
    return false;
}

From source file:net.sourceforge.mipa.predicatedetection.lattice.sequence.SequenceWindowedLatticeChecker.java

private boolean computePredicate(SequenceLatticeIDNode node) {
    if (node == null) {
        return false;
    }//  w  w  w  . j av a2s . co m
    boolean flagIntersection = false;
    boolean flagInclusion = true;
    HashSet<State> reachedStates = node.getReachedStates();
    HashSet<State> acceptStates = automaton.getAcceptStates();
    Iterator<State> it = reachedStates.iterator();
    while (it.hasNext()) {
        boolean flag = false;
        State state = it.next();
        Iterator<State> iterator = acceptStates.iterator();
        while (iterator.hasNext()) {
            State acceptState = iterator.next();
            if (state.getName().equals(acceptState.getName())) {
                flag = true;
                break;
            }
        }
        if (flag == false) {
            flagInclusion = false;
        } else {
            flagIntersection = true;
        }
        if (flagInclusion == false && flagIntersection == true) {
            break;
        }
    }
    if (reachedStates.isEmpty()) {
        node.setFlagInclusion(false);
        node.setFlagIntersection(false);
    } else {
        node.setFlagInclusion(flagInclusion);
        node.setFlagIntersection(flagIntersection);
    }
    // long time_t = (new Date()).getTime();
    if (type == NodeType.DEF) {
        boolean result = flagInclusion;
        if (result == true) {
            /*
             * if (count == 0) { try { application.callback(predicateID,
             * String.valueOf(true)); } catch (Exception e) {
             * e.printStackTrace(); } } count++;
             */
            return true;
        }
    } else if (type == NodeType.POS) {
        boolean result = flagIntersection;
        if (result == true) {
            /*
             * if (count == 0) { try { application.callback(predicateID,
             * String.valueOf(true)); } catch (Exception e) {
             * e.printStackTrace(); } } count++;
             */
            return true;
        }
    }
    // wastedTime += (new Date()).getTime() - time_t;
    return false;
}

From source file:com.trigger_context.Main_Service.java

public void senderSync(DataInputStream in, DataOutputStream out, String folder) {
    String tfolder = folder + (folder.charAt(folder.length() - 1) == '/' ? "" : "/");
    File f = new File(folder);
    File file[] = f.listFiles();/*from  w  ww.  j  ava2s  .  c o  m*/
    // noti(file.toString(),"");
    String md5 = null;
    HashMap<String, File> hm = new HashMap<String, File>();

    HashSet<String> A = new HashSet<String>();
    for (File element : file) {
        hm.put(md5 = calculateMD5(element), element);
        A.add(md5);
    }
    // noti(hm.toString(),"");
    int numB = 0;
    try {
        numB = in.readInt();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        noti("error reading 1st int in sendersync", "");
        e.printStackTrace();
    }
    HashSet<String> B = new HashSet<String>();
    for (int i = 0; i < numB; i++) {
        try {
            B.add(in.readUTF());
        } catch (IOException e1) {
            noti("error in readins md5", "");
            e1.printStackTrace();
        }
    }
    HashSet<String> aMb = new HashSet<String>(A);
    aMb.removeAll(B);
    int l1 = aMb.size();
    try {
        out.writeInt(l1);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        noti("error in writing 1st int", "");
        e.printStackTrace();
    }
    Iterator<String> itr = aMb.iterator();
    while (itr.hasNext()) {
        f = hm.get(itr.next());
        sendFile(out, f.getPath());
    }
    HashSet<String> bMa = new HashSet<String>(B);
    bMa.removeAll(A);
    int l2 = bMa.size();
    try {
        out.writeInt(l2);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        noti("error in writing 2nd int", "");
        e.printStackTrace();
    }
    itr = bMa.iterator();
    while (itr.hasNext()) {
        md5 = itr.next();
        try {
            out.writeUTF(md5);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            noti("error in sending md5", "");
            e.printStackTrace();
        }
        recvFile(in, folder);
    }
}

From source file:net.sourceforge.mipa.predicatedetection.lattice.sequence.SequenceWindowedLatticeChecker.java

private RegularExpression modifyRegularExpression(RegularExpression regularExpression) {
    if (DEBUG) {// ww w  .  jav  a 2  s.  c om
        System.out.println("========================================");
        System.out.println("Modify regular expression:");
    }
    HashSet<String> identifiers = regularExpression.getIdentifiers();
    String expression = regularExpression.getRegularExpression();
    String result = "";
    Iterator<String> it = identifiers.iterator();
    boolean firstPosition = true;
    while (it.hasNext()) {
        String identifier = it.next();
        if (firstPosition == true) {
            result += "(" + identifier + ")";
            firstPosition = false;
        } else {
            result += "|(" + identifier + ")";
        }
    }
    // add {}
    result += "|(" + "z" + ")";

    result = "((" + result + ")*)";
    result = result + expression + result;
    RegularExpression predicate = new RegularExpression(result);
    if (DEBUG) {
        System.out.println(result);
        System.out.println("Modify over");
        System.out.println("----------------------------------------");
        System.out.println();
    }
    return predicate;
}

From source file:ch.ethz.dcg.jukefox.cli.CliJukefoxApplication.java

public void listLibraryPaths() {
    HashSet<String> paths = modelSettingsManager.getLibraryPaths();
    if (paths.isEmpty()) {
        System.out.println("No paths stored in settings.");
        return;//  w ww .  ja v  a2 s  .c o m
    }
    Iterator<String> iterator = paths.iterator();
    while (iterator.hasNext()) {
        System.out.println(iterator.next());
    }
}

From source file:org.pentaho.platform.scheduler.QuartzSubscriptionScheduler.java

/**
 * Returns a list of exception messages//from   w  w  w. j  a  v  a  2s.  c o m
 */
public List syncSchedule(final List newSchedules) throws Exception {
    List exceptionList = new ArrayList();
    if (newSchedules == null) {
        return (exceptionList);
    }

    Scheduler scheduler = QuartzSystemListener.getSchedulerInstance();
    HashSet jobSet = new HashSet(Arrays.asList(scheduler.getJobNames(QuartzSubscriptionScheduler.GROUP_NAME)));

    // Add/modify the good schedules
    for (int i = 0; i < newSchedules.size(); ++i) {
        ISchedule sched = (ISchedule) newSchedules.get(i);
        try {
            syncSchedule(sched.getScheduleReference(), sched);
        } catch (Throwable t) {
            exceptionList.add(Messages.getString("QuartzSubscriptionScheduler.ERROR_SCHEDULING", //$NON-NLS-1$
                    sched.getScheduleReference(), t.getLocalizedMessage()));
        }
        jobSet.remove(sched.getScheduleReference());
    }

    // Now delete the left overs
    for (Iterator it = jobSet.iterator(); it.hasNext();) {
        scheduler.deleteJob((String) it.next(), QuartzSubscriptionScheduler.GROUP_NAME);
    }

    return (exceptionList);
}

From source file:com.redhat.rhn.manager.kickstart.KickstartFormatter.java

/**
 * Helper method to render ssl keys for kickstart file
 * @param setIn of sll keys for this kickstart
 * @return rendered sll key string for kickstart
 */// w w w  .j a  va2 s  .c o m
private String renderSslKeys(HashSet setIn) {
    StringBuilder retval = new StringBuilder();
    int peg = 1;
    for (Iterator itr = setIn.iterator(); itr.hasNext();) {
        retval.append("cat > /tmp/ssl-key-" + peg + " <<'EOF'" + NEWLINE);
        CryptoKey myKey = (CryptoKey) itr.next();
        retval.append(myKey.getKeyString() + NEWLINE);
        retval.append(NEWLINE);
        retval.append("EOF\n# ssl-key" + peg + NEWLINE);
        peg++;
    }

    retval.append("cat /tmp/ssl-key-* > /usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT" + NEWLINE);
    retval.append("perl -pe 's/RHNS-CA-CERT/RHN-ORG-TRUSTED-SSL-CERT/g' " + "-i /etc/sysconfig/rhn/up2date"
            + NEWLINE);

    return retval.toString();
}

From source file:edu.ku.brc.specify.conversion.CollectionInfo.java

/**
 * @param oldDBConn/*from w w  w .  ja  v  a 2 s .c  o m*/
 * @return
 */
public static Vector<CollectionInfo> getCollectionInfoList(final Connection oldDBConn,
        final boolean doSkipCheck) {
    //collectionInfoList.clear();
    if (collectionInfoList.isEmpty()) {

        String hostTaxonID = "SELECT Count(tn.TaxonomicUnitTypeID) FROM habitat h "
                + "INNER JOIN taxonname tn ON h.HostTaxonID = tn.TaxonNameID WHERE tn.TaxonomyTypeId = ";

        /*String sql = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, csd.CatalogSeriesDefinitionID, csd.CatalogSeriesID FROM collectionobjecttype cot " +
                 "INNER JOIN catalogseriesdefinition csd on " + 
                 "csd.ObjectTypeId = cot.CollectionObjectTypeId WHERE cot.Category = 'Biological' ORDER BY cot.CollectionObjectTypeID, csd.CatalogSeriesID";
        */

        String catSeriesSQL = "SELECT SeriesName, CatalogSeriesPrefix, Remarks, LastEditedBy FROM catalogseries WHERE CatalogSeriesID = ";

        String cntTaxonName = "SELECT COUNT(TaxonNameID) FROM taxonname WHERE TaxonName IS NOT NULL AND TaxonomyTypeId = ";

        String colObjCountPerCatSeriesSQL = "SELECT COUNT(cc.CatalogSeriesID) " + //, cc.CatalogSeriesID, cs.SeriesName " +
                "FROM collectionobjectcatalog cc INNER JOIN catalogseries cs ON cc.CatalogSeriesID = cs.CatalogSeriesID "
                + "WHERE cs.CatalogSeriesID = %d GROUP BY cs.CatalogSeriesID";

        String colObjDetCountPerCatSeriesSQL = "SELECT COUNT(cc.CatalogSeriesID) "
                + "FROM determination d INNER JOIN collectionobject co ON d.BiologicalObjectID = co.CollectionObjectID "
                + "INNER JOIN collectionobjectcatalog cc ON co.CollectionObjectID = cc.CollectionObjectCatalogID "
                + "WHERE cc.CatalogSeriesID = %d AND d.TaxonNameID IS NOT NULL GROUP BY cc.CatalogSeriesID";

        String colObjCatSeriesSQL = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, csd.CatalogSeriesDefinitionID, csd.CatalogSeriesID FROM collectionobjecttype cot "
                + "INNER JOIN catalogseriesdefinition csd on csd.ObjectTypeId = cot.CollectionObjectTypeId "
                + "WHERE cot.Category = 'Biological' ORDER BY cot.CollectionObjectTypeID, csd.CatalogSeriesID";

        Statement stmt = null;

        try {
            pw = new PrintWriter(String.format("sql_%d.log", pwPassCnt++));

            log.debug(colObjCatSeriesSQL);
            logSQL("ColObj Cat Series", colObjCatSeriesSQL);

            HashSet<Integer> taxonTypeIdHash = new HashSet<Integer>();

            stmt = oldDBConn.createStatement();
            ResultSet rs = stmt.executeQuery(colObjCatSeriesSQL);
            while (rs.next()) {
                CollectionInfo info = new CollectionInfo(oldDBConn);

                Integer colObjTypeID = rs.getInt(1);
                pw.println(String.format(
                        "%d ================================================================================",
                        colObjTypeID));

                System.err.println("ColObjType: " + colObjTypeID);

                info.setColObjTypeId(colObjTypeID);
                info.setColObjTypeName(rs.getString(2));
                info.setCatSeriesDefId(rs.getInt(3));
                info.setCatSeriesId(rs.getInt(4));

                String sql = String.format(colObjCountPerCatSeriesSQL, info.getCatSeriesId());
                log.debug(sql);
                logSQL("ColObj Count Per Cat Series", sql);

                int colObjCnt = BasicSQLUtils.getCountAsInt(oldDBConn, sql);
                info.setColObjCnt(colObjCnt);

                sql = String.format(colObjDetCountPerCatSeriesSQL, info.getCatSeriesId());
                log.debug(sql);
                logSQL("ColObj Count Determinations Per Cat Series", sql);

                info.setColObjDetTaxCnt(BasicSQLUtils.getCountAsInt(oldDBConn, sql));

                sql = catSeriesSQL + info.getCatSeriesId();
                log.debug(sql);
                logSQL("Cat Series", sql);

                Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sql);
                if (rows != null && rows.size() == 1) {
                    Object[] row = rows.get(0);

                    info.setCatSeriesName((String) row[0]);
                    info.setCatSeriesPrefix((String) row[1]);
                    info.setCatSeriesRemarks((String) row[2]);
                    info.setCatSeriesLastEditedBy((String) row[3]);

                } else {
                    log.error("Error getting CollectionInfo for CollectionObjectTypeID: " + rs.getInt(1)
                            + " number of CatlogSeries: " + rows.size());
                }

                if (!doSkipCheck) {
                    String detSQLStr = "SELECT ct.TaxonomyTypeID, (select distinct relatedsubtypevalues FROM usysmetacontrol c "
                            + "LEFT JOIN usysmetafieldsetsubtype fst ON fst.fieldsetsubtypeid = c.fieldsetsubtypeid "
                            + "WHERE objectid = 10290 AND ct.taxonomytypeid = c.relatedsubtypevalues) AS DeterminationTaxonType "
                            + "FROM collectiontaxonomytypes ct WHERE ct.biologicalobjecttypeid = "
                            + info.getColObjTypeId();
                    log.debug(detSQLStr);
                    logSQL("Checking USYS data", detSQLStr);

                    Vector<Object[]> detRows = BasicSQLUtils.query(oldDBConn, detSQLStr);
                    for (Object[] row : detRows) {
                        Integer txnTypeId = (Integer) row[0];
                        String detTxnTypes = (String) row[1];
                        if (detTxnTypes == null) {
                            detTxnTypes = Integer.toString(txnTypeId);
                        }

                        if (StringUtils.isNotEmpty(detTxnTypes)) {
                            String txNameSQL = "SELECT TaxonomyTypeName FROM taxonomytype WHERE TaxonomyTypeID = ";
                            logSQL("Getting Taxon Type Name", txNameSQL + txnTypeId);

                            if (StringUtils.contains(detTxnTypes, ',')) {
                                StringBuilder sb = new StringBuilder();
                                String[] toks = StringUtils.split(detTxnTypes, ',');

                                String dtName = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + txnTypeId);
                                sb.append(String.format(
                                        "Warning - There are %d DeterminationTaxonTypes for TaxonObjectType %d (%s) they are:\n",
                                        toks.length, txnTypeId, dtName));
                                for (String id : toks) {
                                    logSQL("Getting Taxon Type Name", txNameSQL + id);
                                    String name = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + id);
                                    sb.append(id);
                                    sb.append(" - ");
                                    sb.append(name);
                                    sb.append("\n");
                                }
                                sb.append(
                                        "\nThis database will need to be fixed by hand before it can be converted.");
                                UIRegistry.showError(sb.toString());
                                System.exit(0);
                                askForFix = true;
                                return null;

                            } else if (StringUtils.isNumeric(detTxnTypes.trim())) {
                                Integer txnType = Integer.parseInt(detTxnTypes);
                                if (!txnType.equals(txnTypeId)) {
                                    String tName = BasicSQLUtils.querySingleObj(oldDBConn, txNameSQL + txnType);
                                    String dtName = BasicSQLUtils.querySingleObj(oldDBConn,
                                            txNameSQL + txnTypeId);
                                    StringBuilder sb = new StringBuilder();
                                    sb.append(String.format(
                                            "Warning - The TaxonObjectType %d (%s) in the DeterminationTaxonTypes field\ndoesn't match the actual TaxonObjectType %d (%s)",
                                            txnType, tName, txnTypeId, dtName));
                                    UIRegistry.showError(sb.toString());
                                    askForFix = true;
                                    return null;
                                }
                            }
                        }
                    }
                }

                /*info.setDeterminationTaxonType(detTxnTypeStr);
                for (Integer id : info.getDetTaxonTypeIdList())
                {
                log.debug("ID: "+id);
                }*/

                // This represents a mapping from what would be the Discipline (Biological Object Type) to the Taxonomic Root
                sql = String.format(
                        "SELECT tt.TaxonomyTypeID, tt.TaxonomyTypeName, tt.KingdomID, tn.TaxonNameID, tn.TaxonName, tu.TaxonomicUnitTypeID FROM taxonomytype AS tt "
                                + "INNER JOIN taxonomicunittype AS tu ON tt.TaxonomyTypeID = tu.TaxonomyTypeID "
                                + "INNER JOIN taxonname AS tn ON tu.TaxonomyTypeID = tn.TaxonomyTypeID "
                                + "INNER JOIN collectiontaxonomytypes AS ct ON tn.TaxonomyTypeID = ct.TaxonomyTypeID "
                                + "WHERE tu.RankID =  0 AND tn.RankID =  0 AND ct.BiologicalObjectTypeID = %d "
                                + "ORDER BY ct.BiologicalObjectTypeID ASC",
                        info.getColObjTypeId());
                log.debug(sql);
                logSQL("Taxon -> Coll Taxon Types", sql);

                rows = BasicSQLUtils.query(oldDBConn, sql);
                if (rows != null) {
                    Object[] row = rows.get(0);

                    int taxonomyTypeID = (Integer) row[0];

                    info.setTaxonomyTypeId(taxonomyTypeID);
                    info.setTaxonomyTypeName((String) row[1]);
                    info.setKingdomId((Integer) row[2]);
                    info.setTaxonNameId((Integer) row[3]);
                    info.setTaxonName((String) row[4]);
                    info.setTaxonomicUnitTypeID((Integer) row[5]);

                    info.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID));

                    log.debug("TaxonomyTypeName: " + info.getTaxonomyTypeName() + "  TaxonName: "
                            + info.getTaxonName() + "  TaxonomyTypeId: " + info.getTaxonomyTypeId());

                    sql = hostTaxonID + taxonomyTypeID;
                    log.debug(sql);
                    Vector<Object> ttNames = BasicSQLUtils.querySingleCol(oldDBConn, sql);
                    if (ttNames != null && ttNames.size() > 0 && ((Long) ttNames.get(0)) > 0) {
                        info.setSrcHostTaxonCnt((Long) ttNames.get(0));
                    } else {
                        info.setSrcHostTaxonCnt(0);
                    }

                    taxonTypeIdHash.add(taxonomyTypeID);

                } else {
                    log.error("Error getting CollectionInfo for CollectionObjectTypeID: " + rs.getInt(1));
                }

                collectionInfoList.add(info);
                //System.out.println(info.toString());
            }
            rs.close();

            // Here we figure out whether a Taxon Tree that is used by HostTaxonID is associated with a Collection.
            String sql = "SELECT DISTINCT tt.TaxonomyTypeID, tt.TaxonomyTypeName FROM habitat AS h "
                    + "INNER JOIN taxonname AS tn ON h.HostTaxonID = tn.TaxonNameID "
                    + "INNER JOIN taxonomytype AS tt ON tn.TaxonomyTypeID = tt.TaxonomyTypeID";
            logSQL("Check for HostID", sql);

            Vector<Integer> txTypeIds = BasicSQLUtils.queryForInts(oldDBConn, sql);

            HashSet<Integer> txnTypeIdHashSet = new HashSet<Integer>();
            for (Integer txTypId : txTypeIds) {
                Boolean hasColInfo = false;
                for (CollectionInfo colInfo : collectionInfoList) {
                    if (colInfo.getTaxonomyTypeId().equals(txTypId)) {
                        hasColInfo = true;
                    }
                }

                if (!hasColInfo) {
                    txnTypeIdHashSet.add(txTypId);
                }
            }

            // These TaxonTypeIds do not have CollectionInfo
            for (Iterator<Integer> iter = txnTypeIdHashSet.iterator(); iter.hasNext();) {
                Integer taxonomyTypeID = iter.next();
                System.out.println(taxonomyTypeID);

                sql = "SELECT tt.TaxonomyTypeName, tn.TaxonName, tt.KingdomID, tn.TaxonNameID, tn.TaxonomicUnitTypeID FROM taxonomytype AS tt "
                        + "INNER JOIN taxonomicunittype AS tut ON tt.TaxonomyTypeID = tut.TaxonomyTypeID "
                        + "INNER JOIN taxonname AS tn ON tt.TaxonomyTypeID = tn.TaxonomyTypeID AND tut.TaxonomicUnitTypeID = tn.TaxonomicUnitTypeID "
                        + "WHERE tt.TaxonomyTypeID =  " + taxonomyTypeID + " AND tn.RankID =  0";
                log.debug(sql);
                logSQL("Get TaxonTypeName etc from TaxonomyTypeID and RankID = 0", sql);

                Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sql);
                if (rows.size() != 1) {
                    String msg = "There should only be '1' TaxonTypeName for  TaxonomyTypeID:" + taxonomyTypeID;
                    log.error(msg);
                    UIRegistry.showError(msg);
                    continue;
                }

                CollectionInfo colInfo = new CollectionInfo(oldDBConn);

                String taxonTypeName = (String) rows.get(0)[0];
                String taxonRootName = (String) rows.get(0)[1];
                if (StringUtils.isEmpty(taxonRootName)) {
                    taxonRootName = taxonTypeName;
                }

                //colInfo.setColObjTypeId();
                colInfo.setColObjTypeName(taxonRootName);
                //colInfo.setCatSeriesDefId(rs.getInt(3));
                //colInfo.setCatSeriesId(rs.getInt(4));

                colInfo.setCatSeriesName(taxonRootName);
                colInfo.setCatSeriesPrefix("");
                colInfo.setCatSeriesRemarks("");
                colInfo.setCatSeriesLastEditedBy("");

                colInfo.setColObjCnt(1);
                colInfo.setColObjDetTaxCnt(1);

                colInfo.setTaxonomyTypeId(taxonomyTypeID);
                colInfo.setTaxonomyTypeName(taxonTypeName);
                colInfo.setKingdomId((Integer) rows.get(0)[2]);
                colInfo.setTaxonNameId((Integer) rows.get(0)[3]);
                colInfo.setTaxonName(taxonRootName);
                colInfo.setTaxonomicUnitTypeID((Integer) rows.get(0)[4]);

                colInfo.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID));

                colInfo.setSrcHostTaxonCnt(0);

                collectionInfoList.add(colInfo);
            }

            // Do All
            /*String sqlAllTx = "SELECT cot.CollectionObjectTypeID, cot.CollectionObjectTypeName, tt.TaxonomyTypeID, tt.TaxonomyTypeName, tt.KingdomID, tn.TaxonNameID, tn.TaxonName, tn.TaxonomicUnitTypeID " + 
                          "FROM collectionobjecttype AS cot " +
                          "INNER JOIN collectiontaxonomytypes as ctt ON cot.CollectionObjectTypeID = ctt.BiologicalObjectTypeID " + 
                          "INNER JOIN taxonomytype as tt ON ctt.TaxonomyTypeID = tt.TaxonomyTypeID " + 
                          "INNER JOIN taxonname as tn ON tt.TaxonomyTypeID = tn.TaxonomyTypeID " + 
                          "WHERE  cot.Category = 'Biological' AND tn.ParentTaxonNameID IS NULL";
                    
            log.debug(sqlAllTx);
            Vector<Object[]> rows = BasicSQLUtils.query(oldDBConn, sqlAllTx);
            for (Object[] row : rows)
            {
            int taxonomyTypeID = (Integer)row[2];
            if (taxonTypeIdHash.get(taxonomyTypeID) == null)
            {
                CollectionInfo info = new CollectionInfo(oldDBConn);
                        
                info.setColObjTypeId((Integer)row[0]);
                info.setColObjTypeName((String)row[1]);
                info.setCatSeriesDefId(null);
                info.setCatSeriesId(null);
                info.setCatSeriesName("");
                info.setCatSeriesPrefix("");
                info.setCatSeriesRemarks("");
                info.setCatSeriesLastEditedBy("");
                        
                info.setTaxonomyTypeId(taxonomyTypeID);
                info.setTaxonomyTypeName((String)row[3]);
                info.setKingdomId((Integer)row[4]);
                info.setTaxonNameId((Integer)row[5]);
                info.setTaxonName((String)row[6]);
                        
                info.setTaxonomicUnitTypeID((Integer)row[7]);
                        
                info.setTaxonNameCnt(BasicSQLUtils.getCountAsInt(oldDBConn, cntTaxonName + taxonomyTypeID));
                        
                Vector<Object> ttNames = BasicSQLUtils.querySingleCol(oldDBConn, hostTaxonID + taxonomyTypeID);
                if (ttNames != null && ttNames.size() > 0 && ((Long)ttNames.get(0)) > 0)
                {
                    info.setSrcHostTaxonCnt((Long)ttNames.get(0));
                } else
                {
                    info.setSrcHostTaxonCnt(0);
                }
                        
                taxonTypeIdHash.put(taxonomyTypeID, true);
                        
                collectionInfoList.add(info);
            }
            }*/

            dump();

        } catch (Exception ex) {
            ex.printStackTrace();
            try {
                if (stmt != null) {
                    stmt.close();
                }
            } catch (Exception e) {
            }

        } finally {
            if (pw != null)
                pw.close();
        }
    }

    Collections.sort(collectionInfoList);

    return collectionInfoList;
}