Example usage for org.hibernate Query scroll

List of usage examples for org.hibernate Query scroll

Introduction

In this page you can find the example usage for org.hibernate Query scroll.

Prototype

ScrollableResults scroll(ScrollMode scrollMode);

Source Link

Document

Return the query results as ScrollableResults.

Usage

From source file:at.treedb.db.Iterator.java

License:Open Source License

/**
 * <p>/*  ww  w  .j av a 2 s  . c o m*/
 * Returns the next single object of an entity. This method is used to
 * enumerate large (binary) objects of a entity set. Single object fetching
 * should avoid running into OutOfMemory exceptions.
 * </p>
 * <p>
 * <b>Implementation details:</b>
 * <ol>
 * <li>Hibernate: Statless session<br>
 * </li>
 * <li>JPA/EclipseLink: <a href=
 * "http://wiki.eclipse.org/Using_Advanced_Query_API_%28ELUG%29#Example_107-12">
 * ReadAllQuery/CursoredStream</a> (streaming data) wasn't really working -
 * every time the whole entity data set was loaded by the first access!
 * Actual a native SQL statement is used to pre-load all object IDs. This
 * list is used to retrieve all objects.</li>
 * <li>JPA/ObjectDB: Slow query with setting first position/max data set
 * size.</li>
 * </ol>
 * 
 * @return entity object
 * @throws Exception
 */
@SuppressWarnings("unchecked")
public List<Object> nextObject() throws Exception {
    if (!hasNext) {
        return null;
    }
    int size = 1;
    List<Object> list = null;
    // Hibernate environment
    if (dao.isHibernate() || dao.getJPAimpl() == DAO.JPA_IMPL.HIBERNATEJPA) {
        if (sresult == null) {
            Query query = ((DAOhibernate) dao).createQuery(queryString, map);
            query.setReadOnly(true);
            // MIN_VALUE gives hint to JDBC driver to stream results - but
            // this magic
            // is not working for every DB!
            if (dao.getDB() != DAO.DB.H2) {
                query.setFetchSize(Integer.MIN_VALUE);
            }
            sresult = query.scroll(ScrollMode.FORWARD_ONLY);
        }
        if (sresult.next()) {
            list = new ArrayList<Object>();
            list.add(sresult.get(0));
        }
    } else {
        if (dao.getJPAimpl() != DAO.JPA_IMPL.OBJECTDB) {
            if (idList == null) {
                idList = (List<Integer>) dao.nativeQuery(nativeQueryString);
                if (idList.size() == 0) {
                    return null;
                }
            }
            if (listIndex < idList.size()) {
                list = new ArrayList<Object>();
                Object o = Base.load(dao, (Class<? extends Base>) clazz, idList.get(listIndex));
                if (o == null) {
                    throw new Exception("Iterator.nextObject(): loading JPA object for ID "
                            + idList.get(listIndex) + " failed");
                }
                list.add(o);
                ++listIndex;
            }
        } else {
            // TODO: fallback for ObjectDB - working, but slow, very slow
            list = (List<Object>) dao.query(queryString, index, size, map);
        }
    }
    index += size;
    toRead -= size;
    if (toRead == 0) {
        hasNext = false;
    }
    return list;
}

From source file:ca.ualberta.physics.cssdp.catalogue.dao.UrlDataProductDao.java

License:Apache License

public void process(UrlDataProductUpdateMap urlDataProductUpdateMap) {

    if (urlDataProductUpdateMap.getUrls().size() == 0) {
        return;/*from   ww  w . j  a  v  a2s .c o m*/
    }

    /*
     * The size of scannedUrlDataProducts should be <= jdbc batch size
     * configured.
     */

    // we have to resort to hibernate directly because JPA does not have
    // scrolling capability
    Session session = emp.get().unwrap(Session.class).getSessionFactory().openSession();

    Transaction tx = session.beginTransaction();

    // "in" clause limit is 2^16 on Postgresql, it might be different on
    // other dbs
    String hqlString = "from UrlDataProduct urldp where urldp.url in (:urls)";

    // the fastest way to scroll through the existing data
    Query q = session.createQuery(hqlString);
    q.setParameterList("urls", urlDataProductUpdateMap.getUrls());
    q.setCacheMode(CacheMode.IGNORE);
    ScrollableResults existingData = q.scroll(ScrollMode.FORWARD_ONLY);

    while (existingData.next()) {

        UrlDataProduct existing = (UrlDataProduct) existingData.get(0);
        UrlDataProduct updated = urlDataProductUpdateMap.get(existing.getUrl());

        if (updated != null) {

            /*
             * Only bother to update the record if it's actually changed.
             * Note that the scan timestamp is ignored in the check because
             * that isn't something the provider changed. A change can also
             * mean the url was deleted, and now it's back.
             */
            if (existing.hasChanged(updated)) {
                // existing.setDataProduct(updated.getDataProduct());
                existing.setUrl(updated.getUrl());
                existing.setStartTimestamp(updated.getStartTimestamp());
                existing.setEndTimestamp(updated.getEndTimestamp());
                existing.setScanTimestamp(updated.getScanTimestamp());
                existing.setDeleted(false);
                urlDataProductUpdateMap.remove(updated.getUrl());
                session.update(existing);
            } else {
                // remove it so it's not duplicated
                urlDataProductUpdateMap.remove(existing.getUrl());
            }

        } else {

            // if we get here it means the existing url has been removed
            // from the server, set "delete" it from the catalogue
            existing.setDeleted(true);
            existing.setScanTimestamp(new LocalDateTime());

        }

    }

    // persist the new url mappings
    for (String newUrl : urlDataProductUpdateMap.getUrls()) {
        UrlDataProduct newUrlDataProduct = urlDataProductUpdateMap.get(newUrl);
        session.save(newUrlDataProduct);
        logger.debug("saved a mapping: " + newUrlDataProduct.getUrl());
    }

    session.flush();
    session.clear();

    tx.commit();
    session.close();

}

From source file:com.knowbout.epg.entities.Program.java

License:Apache License

public static ScrollableResults selectAllShowsMoviesSports() {
    Session session = HibernateUtil.currentSession();
    Query query = session.getNamedQuery("Program.selectAllShowsMoviesSports");
    ScrollableResults scroll = query.scroll(ScrollMode.FORWARD_ONLY);
    return scroll;
}

From source file:com.knowbout.epg.entities.Program.java

License:Apache License

public static ScrollableResults selectAllTeams() {
    Session session = HibernateUtil.currentSession();
    Query query = session.getNamedQuery("Program.selectAllTeams");
    ScrollableResults scroll = query.scroll(ScrollMode.FORWARD_ONLY);
    return scroll;
}

From source file:com.mysema.query.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

/**
 * Return the query results as <tt>ScrollableResults</tt>. The
 * scrollability of the returned results depends upon JDBC driver
 * support for scrollable <tt>ResultSet</tt>s.<br>
 *
 * @param mode/*from ww w.  ja  va  2s.  c  om*/
 * @param expr
 * @return
 */
public ScrollableResults scroll(ScrollMode mode, Expression<?> expr) {
    Query query = createQuery(expr);
    reset();
    return query.scroll(mode);
}

From source file:com.mysema.query.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

/**
 * Return the query results as <tt>ScrollableResults</tt>. The
 * scrollability of the returned results depends upon JDBC driver
 * support for scrollable <tt>ResultSet</tt>s.<br>
 *
 * @param mode/* w  w  w  . j  a  va  2s.c o  m*/
 * @param expr1
 * @param expr2
 * @param rest
 * @return
 */
public ScrollableResults scroll(ScrollMode mode, Expression<?> expr1, Expression<?> expr2,
        Expression<?>... rest) {
    Query query = createQuery(expr1, expr2, rest);
    reset();
    return query.scroll(mode);
}

From source file:com.mysema.query.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

/**
 * Return the query results as <tt>ScrollableResults</tt>. The
 * scrollability of the returned results depends upon JDBC driver
 * support for scrollable <tt>ResultSet</tt>s.<br>
 *
 * @param mode/* w ww  .j a va  2s . c  om*/
 * @param args
 * @return
 */
public ScrollableResults scroll(ScrollMode mode, Expression<?>[] args) {
    Query query = createQuery(args);
    reset();
    return query.scroll(mode);
}

From source file:com.querydsl.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

/**
 * Return the query results as an <tt>Iterator</tt>. If the query
 * contains multiple results pre row, the results are returned in
 * an instance of <tt>Object[]</tt>.<br>
 * <br>/*from www. j a v a2s  .  com*/
 * Entities returned as results are initialized on demand. The first
 * SQL query returns identifiers only.<br>
 */
@Override
public CloseableIterator<T> iterate() {
    try {
        Query query = createQuery();
        ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
        return new ScrollableResultsIterator<T>(results);
    } finally {
        reset();
    }
}

From source file:com.querydsl.jpa.hibernate.sql.AbstractHibernateSQLQuery.java

License:Apache License

@Override
public CloseableIterator<T> iterate() {
    try {/*from w  w  w  .j a v a2  s  .c o m*/
        Query query = createQuery();
        ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
        return new ScrollableResultsIterator<T>(results);
    } finally {
        reset();
    }
}

From source file:com.wci.umls.server.jpa.algo.RrfLoaderAlgorithm.java

License:Open Source License

/**
 * Load MRCONSO.RRF. This is responsible for loading {@link Atom}s and
 * {@link AtomClass}es.//from  w ww.j a  v  a  2  s .co  m
 *
 * @throws Exception the exception
 */
private void loadMrconso() throws Exception {
    logInfo("  Load MRCONSO");
    logInfo("  Insert atoms and concepts ");

    // Set up maps
    String line = null;

    int objectCt = 0;
    final PushBackReader reader = readers.getReader(RrfReaders.Keys.MRCONSO);
    final String fields[] = new String[18];
    String prevCui = null;
    Concept cui = null;
    while ((line = reader.readLine()) != null) {

        line = line.replace("\r", "");
        FieldedStringTokenizer.split(line, "|", 18, fields);

        // Skip non-matching in single mode
        if (singleMode && !fields[11].equals(getTerminology())) {
            continue;
        }

        // Field Description
        // 0 CUI
        // 1 LAT
        // 2 TS
        // 3 LUI
        // 4 STT
        // 5 SUI
        // 6 ISPREF
        // 7 AUI
        // 8 SAUI
        // 9 SCUI
        // 10 SDUI
        // 11 SAB
        // 12 TTY
        // 13 CODE
        // 14 STR
        // 15 SRL
        // 16 SUPPRESS
        // 17 CVF
        //
        // e.g.
        // C0000005|ENG|P|L0000005|PF|S0007492|Y|A7755565||M0019694|D012711|MSH|PEN|D012711|(131)I-Macroaggregated
        // Albumin|0|N|256|

        // set the root terminology language
        loadedRootTerminologies.get(fields[11]).setLanguage(fields[1]);

        final Atom atom = new AtomJpa();
        atom.setLanguage(fields[1]);
        atom.setTimestamp(releaseVersionDate);
        atom.setLastModified(releaseVersionDate);
        atom.setLastModifiedBy(loader);
        atom.setObsolete(fields[16].equals("O"));
        atom.setSuppressible(!fields[16].equals("N"));
        atom.setPublished(true);
        atom.setPublishable(true);
        atom.setName(fields[14]);
        atom.setTerminology(fields[11]);
        if (loadedTerminologies.get(fields[11]) == null) {
            throw new Exception("Atom references terminology that does not exist: " + fields[11]);
        }
        atom.setVersion(loadedTerminologies.get(fields[11]).getVersion());
        // skip in single mode
        if (!singleMode) {
            atom.putAlternateTerminologyId(getTerminology(), fields[7]);
        }
        atom.setTerminologyId(fields[8]);
        atom.setTermType(fields[12]);
        atom.setWorkflowStatus(published);

        atom.setCodeId(fields[13]);
        atom.setDescriptorId(fields[10]);
        atom.setConceptId(fields[9]);

        atom.setStringClassId(fields[5]);
        atom.setLexicalClassId(fields[3]);
        atom.setCodeId(fields[13]);

        // Handle root terminology short name, hierarchical name, and sy names
        if (fields[11].equals("SRC") && fields[12].equals("SSN")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setShortName(fields[14]);
            }
        }
        if (fields[11].equals("SRC") && fields[12].equals("RHT")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setHierarchicalName(fields[14]);
            }
        }

        if (fields[11].equals("SRC") && fields[12].equals("RPT")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setPreferredName(fields[14]);
            }
        }
        if (fields[11].equals("SRC") && fields[12].equals("RSY") && !fields[14].equals("")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                List<String> syNames = t.getRootTerminology().getSynonymousNames();
                syNames.add(fields[14]);
            }
        }

        // Handle terminology sy names
        if (fields[11].equals("SRC") && fields[12].equals("VSY") && !fields[14].equals("")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                List<String> syNames = t.getSynonymousNames();
                syNames.add(fields[14]);
            }
        }

        // Determine organizing class type for terminology
        if (!atom.getDescriptorId().equals("")) {
            termIdTypeMap.put(atom.getTerminology(), IdType.DESCRIPTOR);
        } else if (!atom.getConceptId().equals("")) {
            termIdTypeMap.put(atom.getTerminology(), IdType.CONCEPT);
        } // OTHERWISE it remains "CODE"

        // skip in single mode
        if (!singleMode) {
            atom.putConceptTerminologyId(getTerminology(), fields[0]);
        }

        // Add atoms and commit periodically
        addAtom(atom);
        logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
        atomIdMap.put(fields[7], atom.getId());
        atomTerminologyMap.put(fields[7], atom.getTerminology().intern());
        atomConceptIdMap.put(fields[7], atom.getConceptId().length() == 0 ? "".intern() : atom.getConceptId());
        atomCodeIdMap.put(fields[7], atom.getCodeId().length() == 0 ? "".intern() : atom.getCodeId());
        atomDescriptorIdMap.put(fields[7],
                atom.getDescriptorId().length() == 0 ? "".intern() : atom.getDescriptorId());

        // CUI - skip in single mode
        if (!singleMode) {
            // Add concept
            if (prevCui == null || !fields[0].equals(prevCui)) {
                if (prevCui != null) {
                    cui.setName(getComputedPreferredName(cui, list));
                    addConcept(cui);
                    conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
                    logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
                }
                cui = new ConceptJpa();
                cui.setTimestamp(releaseVersionDate);
                cui.setLastModified(releaseVersionDate);
                cui.setLastModifiedBy(loader);
                cui.setPublished(true);
                cui.setPublishable(true);
                cui.setTerminology(getTerminology());
                cui.setTerminologyId(fields[0]);
                cui.setVersion(getVersion());
                cui.setWorkflowStatus(published);
            }
            cui.getAtoms().add(atom);
            prevCui = fields[0];
        }

        // Handle Subset
        // C3539934|ENG|S|L11195730|PF|S13913746|N|A23460885||900000000000538005||SNOMEDCT_US|SB|900000000000538005|Description
        // format|9|N|256|
        if (fields[12].equals("SB")) {

            // Have to handle the type later, when we get to attributes
            final AtomSubset atomSubset = new AtomSubsetJpa();
            setSubsetFields(atomSubset, fields);
            cuiAuiAtomSubsetMap.put(fields[0] + fields[7], atomSubset);
            idTerminologyAtomSubsetMap.put(atomSubset.getTerminologyId() + atomSubset.getTerminology(),
                    atomSubset);
            final ConceptSubset conceptSubset = new ConceptSubsetJpa();
            setSubsetFields(conceptSubset, fields);
            cuiAuiConceptSubsetMap.put(fields[0] + fields[7], conceptSubset);
            idTerminologyConceptSubsetMap.put(conceptSubset.getTerminologyId() + conceptSubset.getTerminology(),
                    conceptSubset);
        }

    }
    // Add last concept
    if (prevCui != null) {
        cui.setName(getComputedPreferredName(cui, list));
        addConcept(cui);
        conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
        logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    }

    // Set the terminology organizing class types
    for (final Terminology terminology : loadedTerminologies.values()) {
        final IdType idType = termIdTypeMap.get(terminology.getTerminology());
        if (idType != null && idType != IdType.CODE) {
            terminology.setOrganizingClassType(idType);
            updateTerminology(terminology);
        }
    }

    logInfo("  Add concepts");
    objectCt = 0;
    // NOTE: Hibernate-specific to support iterating
    // Restrict to timestamp used for THESE atoms, in case multiple RRF
    // files are loaded
    final Session session = manager.unwrap(Session.class);
    org.hibernate.Query hQuery = session.createQuery("select a from AtomJpa a " + "where conceptId is not null "
            + "and conceptId != '' and timestamp = :timestamp " + "order by terminology, conceptId");
    hQuery.setParameter("timestamp", releaseVersionDate);
    hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
    ScrollableResults results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
    prevCui = null;
    cui = null;
    while (results.next()) {
        final Atom atom = (Atom) results.get()[0];
        if (atom.getConceptId() == null || atom.getConceptId().isEmpty()) {
            continue;
        }
        if (prevCui == null || !prevCui.equals(atom.getConceptId())) {
            if (cui != null) {
                // compute preferred name
                cui.setName(getComputedPreferredName(cui, list));
                addConcept(cui);
                conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
                logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
            }
            cui = new ConceptJpa();
            cui.setTimestamp(releaseVersionDate);
            cui.setLastModified(releaseVersionDate);
            cui.setLastModifiedBy(loader);
            cui.setPublished(true);
            cui.setPublishable(true);
            cui.setTerminology(atom.getTerminology());
            cui.setTerminologyId(atom.getConceptId());
            cui.setVersion(atom.getVersion());
            cui.setWorkflowStatus(published);
        }
        cui.getAtoms().add(atom);
        prevCui = atom.getConceptId();
    }
    if (cui != null) {
        cui.setName(getComputedPreferredName(cui, list));
        addConcept(cui);
        conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
        commitClearBegin();
    }
    results.close();
    logInfo("  Add descriptors");
    objectCt = 0;

    // NOTE: Hibernate-specific to support iterating
    hQuery = session.createQuery("select a from AtomJpa a " + "where descriptorId is not null "
            + "and descriptorId != '' and timestamp = :timestamp " + "order by terminology, descriptorId");
    hQuery.setParameter("timestamp", releaseVersionDate);
    hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
    results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
    String prevDui = null;
    Descriptor dui = null;
    while (results.next()) {
        final Atom atom = (Atom) results.get()[0];
        if (atom.getDescriptorId() == null || atom.getDescriptorId().isEmpty()) {
            continue;
        }
        if (prevDui == null || !prevDui.equals(atom.getDescriptorId())) {
            if (dui != null) {
                // compute preferred name
                dui.setName(getComputedPreferredName(dui, list));
                addDescriptor(dui);
                descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId());
                logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
            }
            dui = new DescriptorJpa();
            dui.setTimestamp(releaseVersionDate);
            dui.setLastModified(releaseVersionDate);
            dui.setLastModifiedBy(loader);
            dui.setPublished(true);
            dui.setPublishable(true);
            dui.setTerminology(atom.getTerminology());
            dui.setTerminologyId(atom.getDescriptorId());
            dui.setVersion(atom.getVersion());
            dui.setWorkflowStatus(published);
        }
        dui.getAtoms().add(atom);
        prevDui = atom.getDescriptorId();
    }
    if (dui != null) {
        dui.setName(getComputedPreferredName(dui, list));
        addDescriptor(dui);
        descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId());
        commitClearBegin();
    }
    results.close();

    // Use flag to decide whether to handle codes
    if (codesFlag) {
        logInfo("  Add codes");
        objectCt = 0;
        // NOTE: Hibernate-specific to support iterating
        // Skip NOCODE
        // TODO: there is a LNC exception here -for now
        hQuery = session.createQuery("select a from AtomJpa a where codeId is not null "
                + "and codeId != '' and timestamp = :timestamp "
                + "and (terminology = 'LNC' OR (codeId != conceptId and codeId != descriptorId)) "
                + "and timestamp = :timestamp " + "order by terminology, codeId");
        hQuery.setParameter("timestamp", releaseVersionDate);
        hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
        results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
        String prevCode = null;
        Code code = null;
        while (results.next()) {
            final Atom atom = (Atom) results.get()[0];
            if (atom.getCodeId() == null || atom.getCodeId().isEmpty() || atom.getCodeId().equals("NOCODE")) {
                continue;
            }
            if (prevCode == null || !prevCode.equals(atom.getCodeId())) {
                if (code != null) {
                    // compute preferred name
                    code.setName(getComputedPreferredName(code, list));
                    addCode(code);
                    codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId());
                    logAndCommit(++objectCt, RootService.logCt, 1000);
                }
                code = new CodeJpa();
                code.setTimestamp(releaseVersionDate);
                code.setLastModified(releaseVersionDate);
                code.setLastModifiedBy(loader);
                code.setPublished(true);
                code.setPublishable(true);
                code.setTerminology(atom.getTerminology());
                code.setTerminologyId(atom.getCodeId());
                code.setVersion(atom.getVersion());
                code.setWorkflowStatus(published);
            }
            code.getAtoms().add(atom);
            prevCode = atom.getCodeId();
        }
        if (code != null) {
            code.setName(getComputedPreferredName(code, list));
            addCode(code);
            codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId());
            commitClearBegin();
        }
        results.close();
    }

    // NOTE: for efficiency and lack of use cases, we've temporarily
    // suspended the loading of LexicalClass and StringClass objects

    // // NOTE: atoms are not connected to lexical classes as there are
    // // currently no known uses for this.
    // logInfo(" Add lexical classes");
    // objectCt = 0;
    // query = NEED TO FIX THIS
    // manager
    // .createQuery("select a.id from AtomJpa a order by lexicalClassId");
    // String prevLui = null;
    // LexicalClass lui = null;
    // LexicalClass atoms = null;
    // for (final Long id : (List<Long>) query.getResultList()) {
    // final Atom atom = getAtom(id);
    // if (atom.getLexicalClassId() == null
    // || atom.getLexicalClassId().isEmpty()) {
    // continue;
    // }
    // if (prevLui == null || !prevLui.equals(atom.getLexicalClassId())) {
    // if (lui != null) {
    // // compute preferred name
    // lui.setName(getComputedPreferredName(atoms));
    // addLexicalClass(lui);
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }
    // // just used to hold atoms, enver saved.
    // atoms = new LexicalClassJpa();
    // lui = new LexicalClassJpa();
    // lui.setTimestamp(releaseVersionDate);
    // lui.setLastModified(releaseVersionDate);
    // lui.setLastModifiedBy(loader);
    // lui.setPublished(true);
    // lui.setPublishable(true);
    // lui.setTerminology(terminology);
    // lui.setTerminologyId(atom.getLexicalClassId());
    // lui.setVersion(version);
    // lui.setWorkflowStatus(published);
    // lui.setNormalizedString(getNormalizedString(atom.getName()));
    // }
    // atoms.addAtom(atom);
    // prevLui = atom.getLexicalClassId();
    // }
    // if (lui != null) {
    // lui.setName(getComputedPreferredName(atoms));
    // commitClearBegin();
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }
    //
    // // NOTE: currently atoms are not loaded for string classes
    // // We simply load the objects themselves ( for SUI maintenance)
    // // There are no known use cases for having the atoms here.
    // logInfo(" Add string classes");
    // objectCt = 0;
    // query = NEED TO FIX THIS
    // manager
    // .createQuery("select distinct stringClassId, name from AtomJpa a");
    // for (final Object[] suiFields : (List<Object[]>) query.getResultList()) {
    // final StringClass sui = new StringClassJpa();
    // sui.setTimestamp(releaseVersionDate);
    // sui.setLastModified(releaseVersionDate);
    // sui.setLastModifiedBy(loader);
    // sui.setPublished(true);
    // sui.setPublishable(true);
    // sui.setTerminology(terminology);
    // sui.setTerminologyId(suiFields[0].toString());
    // sui.setVersion(version);
    // sui.setWorkflowStatus(published);
    // sui.setName(suiFields[1].toString());
    // addStringClass(sui);
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }

    // commit
    commitClearBegin();

    logInfo("  Update terminologies for languages and names.");

    // Update all root terminologies now that we know languages and names
    for (final RootTerminology root : loadedRootTerminologies.values()) {
        updateRootTerminology(root);
    }

    // Update all root terminologies now that we know languages and names
    for (final Terminology terminology : loadedTerminologies.values()) {
        updateTerminology(terminology);
    }
    commitClearBegin();

}