Example usage for org.hibernate Session evict

List of usage examples for org.hibernate Session evict

Introduction

In this page you can find the example usage for org.hibernate Session evict.

Prototype

void evict(Object object);

Source Link

Document

Remove this instance from the session cache.

Usage

From source file:de.ingrid.portal.scheduler.jobs.RSSFetcherJob.java

License:EUPL

/**
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 *//* w w  w.jav a 2s.  c om*/
public void execute(JobExecutionContext context) throws JobExecutionException {

    if (log.isDebugEnabled()) {
        log.debug("RSSFetcherJob is started ...");
    }

    Session session = HibernateUtil.currentSession();
    Transaction tx = null;
    JobDataMap dataMap = context.getJobDetail().getJobDataMap();

    int status = STATUS_OK;
    String statusCode = STATUS_CODE_NO_ERROR;
    try {

        SyndFeed feed = null;
        URL feedUrl = null;
        SyndFeedInput input = null;
        Date publishedDate = null;
        SyndEntry entry = null;
        int cnt = 0;
        int feedEntriesCount = 0;
        // String errorMsg = "";

        Calendar cal;

        // get rss sources from database
        tx = session.beginTransaction();
        List rssSources = session.createCriteria(IngridRSSSource.class).list();
        tx.commit();
        Iterator it = rssSources.iterator();

        // start timer
        startTimer();
        URLConnection urlCon = null;
        while (it.hasNext()) {
            IngridRSSSource rssSource = (IngridRSSSource) it.next();
            if (log.isDebugEnabled()) {
                log.debug("Working on: " + rssSource.getUrl());
            }
            try {
                feedUrl = new URL(rssSource.getUrl());
                urlCon = feedUrl.openConnection();
                urlCon.setConnectTimeout(15000);
                urlCon.setReadTimeout(15000);
                new Thread(new InterruptThread(urlCon, 30000)).start();
                input = new SyndFeedInput();
                feed = input.build(new XmlReader(urlCon));

                if (log.isDebugEnabled()) {
                    log.debug("Resource fetched.");
                }

                if (feed.getLanguage() == null) {
                    feed.setLanguage(rssSource.getLanguage());
                }
                if (rssSource.getDescription() != null && rssSource.getDescription().trim().length() > 0) {
                    feed.setAuthor(rssSource.getDescription().trim());
                }

                Iterator it2 = feed.getEntries().iterator();
                // work on all rss items of the feed
                while (it2.hasNext()) {
                    entry = (SyndEntry) it2.next();
                    if (log.isDebugEnabled()) {
                        log.debug("Working on item: " + entry.getTitle());
                    }
                    boolean includeEntry = true;
                    String categoryFilter = rssSource.getCategories();
                    if (categoryFilter != null && !categoryFilter.equalsIgnoreCase("all")) {
                        includeEntry = false;
                        List categories = entry.getCategories();
                        if (categories != null && categories.size() > 0) {
                            for (int i = 0; i < categories.size(); i++) {
                                SyndCategoryImpl category = (SyndCategoryImpl) categories.get(i);
                                String categoryStr = category.getName().toLowerCase();
                                if (categoryStr != null && categoryStr.length() > 0) {
                                    categoryStr = UtilsString.regExEscape(category.getName().toLowerCase());
                                    if (categoryFilter.toLowerCase().matches("^" + categoryStr + ".*|.*,"
                                            + categoryStr + ",.*|.*," + categoryStr + "$")) {
                                        includeEntry = true;
                                        break;
                                    }
                                }
                            }
                        }
                    }

                    // filter entries with no title
                    if (includeEntry && (entry.getTitle() == null || entry.getTitle().trim().length() == 0)) {
                        includeEntry = false;
                        if (log.isDebugEnabled()) {
                            log.debug("Ignore item, because item has no title: " + entry);
                        }
                    }

                    publishedDate = entry.getPublishedDate();
                    // check for published date in the entry
                    if (publishedDate == null) {
                        includeEntry = false;
                        if (log.isDebugEnabled()) {
                            log.debug(
                                    "Ignore item, because a publishing date could not be retrieved: " + entry);
                        }
                    }

                    cal = Calendar.getInstance();

                    // filter entries with dates in future
                    if (includeEntry && publishedDate != null && publishedDate.after(cal.getTime())) {
                        includeEntry = false;
                        if (log.isDebugEnabled()) {
                            log.debug("Ignore item, because the publishing date is in the future: "
                                    + publishedDate);
                        }
                    }
                    // filter dates before RSS entry window
                    cal.add(Calendar.DATE,
                            -1 * PortalConfig.getInstance().getInt(PortalConfig.RSS_HISTORY_DAYS));
                    if (includeEntry && publishedDate != null && publishedDate.before(cal.getTime())) {
                        includeEntry = false;
                        if (log.isDebugEnabled()) {
                            log.debug("Ignore item, because the publishing date is too far in the past: "
                                    + publishedDate);
                        }
                    }

                    if (includeEntry) {
                        // process title here to have same value for checks !
                        // NOTICE: not empty, already checked above !
                        String title = processStringForStore(entry.getTitle(), 255);

                        // check if this entry already exists
                        tx = session.beginTransaction();
                        List rssEntries = session.createCriteria(IngridRSSStore.class)
                                .add(Restrictions.eq("link", entry.getLink()))
                                .add(Restrictions.eq("language", feed.getLanguage())).list();
                        tx.commit();

                        // NOTICE: link might be different although news IS THE SAME !!!
                        // (e.g. Bing always adds different tid parameter ! for ads ?).
                        // So we also check via title and date and language
                        if (rssEntries.isEmpty()) {
                            tx = session.beginTransaction();
                            rssEntries = session.createCriteria(IngridRSSStore.class)
                                    .add(Restrictions.eq("title", title))
                                    .add(Restrictions.eq("publishedDate", publishedDate))
                                    .add(Restrictions.eq("language", feed.getLanguage())).list();
                            tx.commit();
                        }

                        if (rssEntries.isEmpty()) {
                            List authors = new ArrayList();
                            SyndPerson author = new SyndPersonImpl();
                            authors.add(author);
                            if (feed.getAuthor() == null || feed.getAuthor().length() == 0) {
                                if (entry.getAuthor() == null || entry.getAuthor().length() == 0) {
                                    if (feed.getTitle() != null && feed.getTitle().length() > 0) {
                                        author.setName(feed.getTitle());
                                    } else {
                                        author.setName("nicht angegeben / not specified");
                                    }
                                } else {
                                    author.setName(entry.getAuthor());
                                }
                            } else {
                                author.setName(feed.getAuthor());
                            }
                            entry.setAuthors(authors);

                            IngridRSSStore rssEntry = new IngridRSSStore();
                            rssEntry.setTitle(title);
                            String description = processStringForStore(entry.getDescription().getValue(), null);
                            rssEntry.setDescription(description);
                            rssEntry.setLink(entry.getLink());
                            rssEntry.setLanguage(feed.getLanguage());
                            rssEntry.setPublishedDate(publishedDate);
                            rssEntry.setAuthor(entry.getAuthor());

                            tx = session.beginTransaction();
                            session.save(rssEntry);
                            tx.commit();
                            if (log.isDebugEnabled()) {
                                log.debug("Item saved to database.");
                            }

                            cnt++;
                            feedEntriesCount++;
                        } else {
                            for (int i = 0; i < rssEntries.size(); i++) {
                                session.evict(rssEntries.get(i));
                            }
                        }
                        rssEntries = null;
                    }
                }

                feed = null;
            } catch (SocketTimeoutException e) {
                log.error("Error building RSS feed (" + rssSource.getUrl() + ").", e);
                status = STATUS_ERROR;
                statusCode = STATUS_CODE_ERROR_TIMEOUT;
            } catch (SocketException e) {
                log.error("Error building RSS feed (" + rssSource.getUrl()
                        + "). Probable timeouted by watch dog thread.", e);
                status = STATUS_ERROR;
                statusCode = STATUS_CODE_ERROR_TIMEOUT;
            } catch (Throwable t) {
                log.error("Error building RSS feed (" + rssSource.getUrl() + ").", t);
                status = STATUS_ERROR;
                statusCode = STATUS_CODE_ERROR_UNSPECIFIC;
            } finally {
                try {
                    if (urlCon != null && urlCon instanceof HttpURLConnection) {
                        if (log.isDebugEnabled()) {
                            log.debug("Close '" + urlCon.getURL() + "' regulary.");
                        }
                        ((HttpURLConnection) urlCon).disconnect();
                    }
                } catch (Exception e) {
                    // ignore exception
                }

                // add information about the fetching of this feed into the
                // RSSSource database
                tx = session.beginTransaction();

                if (feedEntriesCount > 0) {
                    rssSource.setLastUpdate(new Date());
                    rssSource.setNumLastCount(feedEntriesCount);
                }

                // rssSource.setLastMessageUpdate(new Date());

                // rssSource.setError(errorMsg);

                session.save(rssSource);
                tx.commit();

                session.evict(rssSource);
                feedEntriesCount = 0;
                // errorMsg = "";
            }
        }

        if (log.isDebugEnabled()) {
            log.debug("Number of RSS entries added: " + cnt);
        }

        // remove old entries
        cal = Calendar.getInstance();
        cal.add(Calendar.DATE, -1 * PortalConfig.getInstance().getInt(PortalConfig.RSS_HISTORY_DAYS));

        tx = session.beginTransaction();
        List deleteEntries = session.createCriteria(IngridRSSStore.class)
                .add(Restrictions.lt("publishedDate", cal.getTime())).list();
        tx.commit();
        it = deleteEntries.iterator();
        tx = session.beginTransaction();
        while (it.hasNext()) {
            Object obj = it.next();
            session.evict(obj);
            session.delete((IngridRSSStore) obj);
        }
        tx.commit();
        deleteEntries.clear();
    } catch (Exception t) {
        if (tx != null) {
            tx.rollback();
        }
        if (log.isErrorEnabled()) {
            log.error("Error executing quartz job RSSFetcherJob.", t);
        }
        status = STATUS_ERROR;
        statusCode = STATUS_CODE_ERROR_UNSPECIFIC;
        throw new JobExecutionException("Error executing quartz job RSSFetcherJob.", t, false);
    } finally {
        computeTime(dataMap, stopTimer());
        if (log.isDebugEnabled()) {
            log.debug("Update quartz job data.");
        }
        updateJobData(context, status, statusCode);
        updateJob(context);
        HibernateUtil.closeSession();
        if (log.isDebugEnabled()) {
            log.debug("Hibernate session is closed.");
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("RSSFetcherJob finished.");
    }
}

From source file:de.innovationgate.webgate.api.jdbc.ACLImpl.java

License:Open Source License

public WGACLEntry createEntry(String name, int type, int accessLevel) throws WGAPIException {

    try {//from  w w w . ja va  2s  .c  om
        ACLEntry entry = new ACLEntry(name, type, accessLevel, "");
        Session session = _parent.getSession();
        _parent.createLogEntry(session, WGUpdateLog.TYPE_UPDATE, "$aclentry/" + entry.getName(), entry.getId());
        session.save(entry);
        _parent.commitHibernateTransaction();
        _parent.getDb().getUserCache().clear();
        if (_parent._saveIsolationActive) {
            session.evict(entry);
        }
        return entry;
    } catch (Exception e) {
        _parent.rollbackHibernateTransaction(true);
        throw new WGBackendException("Error creating ACL entry", e);
    }

}

From source file:de.innovationgate.webgate.api.jdbc.ACLImpl.java

License:Open Source License

public List getAllEntries() throws WGAPIException {

    try {/*from w  w  w .  j a va 2s .  c o  m*/
        Session session = _parent.getSession();
        List<ACLEntry> entries = session
                .createQuery("from ACLEntry as aclentry order by aclentry.type asc, aclentry.name asc").list();
        if (_parent._saveIsolationActive) {
            for (ACLEntry entry : entries) {
                session.evict(entry);
            }
        }
        return entries;
    } catch (HibernateException e) {
        throw new WGBackendException("Error retrieving all ACL entries", e);
    }

}

From source file:de.innovationgate.webgate.api.jdbc.ACLImpl.java

License:Open Source License

public WGACLEntry getEntry(String name) throws WGAPIException {
    try {/*  w  w w .  j av a 2  s  .co  m*/
        Session session = _parent.getSession();
        Query query = session.createQuery("from ACLEntry where name=:name");
        query.setParameter("name", name);
        List results = query.list();
        if (results.size() >= 1) {
            ACLEntry entry = (ACLEntry) results.get(0);
            if (_parent._saveIsolationActive) {
                session.evict(entry);
            }
            return entry;
        } else {
            return null;
        }
    } catch (HibernateException e) {
        throw new WGBackendException("Error retrieving ACL entry with name '" + name + "'.", e);
    }

}

From source file:de.innovationgate.webgate.api.jdbc.filehandling.CS41FileAttachmentHandler.java

License:Open Source License

public void saveFileData(AttachFileOperation<CS41FileAttachmentEntity> op) throws WGAPIException {

    try {/*from   www  . j  a v a 2 s.c om*/
        if (!op.isUpdateData()) {
            return;
        }

        // create digest for checksum computation
        MessageDigest digest = null;
        try {
            digest = MessageDigest.getInstance("MD5");
        } catch (NoSuchAlgorithmException e) {
            // ignore digest creation
        }

        Session session = _handling.getParent().getSession();

        CS41FileAttachmentEntity fileMeta = op.getEntity();
        InputStream in = new BufferedInputStream(new FileInputStream(fileMeta.getSourceFile()));
        // store file data
        // split up file in parts with 64K end store each part                          
        int partnr = 0;
        byte[] buffer = new byte[CS41FileAttachmentHandler.ATTACHMENT_FILEPART_SIZE];
        int len = in.read(buffer);

        while (len > 0) {
            // create new file part
            AttachmentFilePart part = _entityDescriptor.createFilePart(fileMeta);
            part.setPartnr(partnr);

            Blob data = Hibernate.getLobCreator(session).createBlob(new ByteArrayInputStream(buffer, 0, len),
                    len);
            part.setData(data);
            // store file part
            session.save(part);
            session.flush();
            session.evict(part);
            // update md5 digest
            if (digest != null) {
                digest.update(buffer, 0, len);
            }
            // read next part from inputstream
            partnr++;
            len = in.read(buffer);
        }

        // store md5 sum as meta
        if (digest != null) {
            fileMeta.setChecksum(new String(Hex.encodeHex(digest.digest())));
            if (_handling.getParent().isSaveIsolationActive()) {
                session.update(fileMeta);
            }
        }
    } catch (Exception e) {
        throw new WGBackendException("Exception storing file data", e);
    }

}

From source file:de.innovationgate.webgate.api.jdbc.filehandling.CS5P4FileHandling.java

License:Open Source License

public ContentFileContent storeFileContents(CS41FileEntityDescriptor descriptor, InputStream in,
        Session session, MessageDigest digestMd5, MessageDigest digestSha512)
        throws IOException, InstantiationException, IllegalAccessException, WGAPIException {
    int partnr = 0;
    byte[] buffer = new byte[CS41FileAttachmentHandler.ATTACHMENT_FILEPART_SIZE];
    int len = in.read(buffer);
    int totalLen = 0;

    // Create file contents entity
    ContentFileContent fileContents = new ContentFileContent();
    fileContents.setOrdinalnr(getParent().incrementSystemSequence(ORDINAL_NUMBER_SEQUENCE_NAME));
    session.save(fileContents);//from  w  ww  . j a  va2 s . c  o m

    while (len > 0) {
        totalLen += len;

        // create new file part
        AttachmentFilePart part = descriptor.createFilePart(fileContents);
        part.setPartnr(partnr);

        Blob data = Hibernate.getLobCreator(session).createBlob(new ByteArrayInputStream(buffer, 0, len), len);
        part.setData(data);

        // store file part
        session.save(part);
        session.flush();
        session.evict(part);

        // update digests
        digestMd5.update(buffer, 0, len);
        digestSha512.update(buffer, 0, len);

        // read next part from inputstream
        partnr++;
        len = in.read(buffer);

    }

    String checksumSha512 = new String(Hex.encodeHex(digestSha512.digest()));

    // Store file data on contents entity
    fileContents.setChecksumSha512(checksumSha512);
    fileContents.setSize(totalLen);
    if (getParent().isSaveIsolationActive()) {
        session.update(fileContents);
    }
    return fileContents;
}

From source file:de.innovationgate.webgate.api.jdbc.WGDocumentImpl.java

License:Open Source License

/**
 * @throws WGBackendException //from w w w. j  ava  2  s . c o  m
 * @see de.innovationgate.webgate.api.WGDocumentCore#remove()
 */
public WGDatabaseRevision remove() throws WGAPIException {

    undoEditable();

    try {

        if (_fileHandler != null) {
            _fileHandler.beforeRemove();
        }

        Session session = _parent.getSession();

        LogEntry logEntry = null;
        if (getType() != WGDocument.TYPE_USERPROFILE) {
            logEntry = _parent.createLogEntry(session, WGUpdateLog.TYPE_DELETE, _document.getDocumentKey(),
                    _entity.getId());
        }

        // Dereferencings of special object types
        // Manual initializings are workaround for hibernate bug B00005D36
        if (_entity instanceof Content) {
            Content content = (Content) _entity;
            if (_parent.getSession().contains(content.getStructentry())) {
                Hibernate.initialize(content.getStructentry().getContent());
                content.getStructentry().getContent().remove(content);
            }
        } else if (_entity instanceof StructEntry) {
            StructEntry entry = (StructEntry) _entity;
            if (entry.getParententry() != null) {
                if (_parent.getSession().contains(entry.getParententry())) {
                    if (_parent._ddlVersion < WGDatabase.CSVERSION_WGA5) {
                        Hibernate.initialize(entry.getParententry().getChildentries());
                        entry.getParententry().getChildentries().remove(entry.getKey());
                    }
                }
            } else if (_parent.getSession().contains(entry.getArea())) {
                if (_parent._ddlVersion < WGDatabase.CSVERSION_WGA5) {
                    Hibernate.initialize(entry.getArea().getRootentries());
                    entry.getArea().getRootentries().remove(entry.getKey());
                }
            }
        }

        session.delete(_entity);

        if (_fileHandler != null) {
            _fileHandler.afterRemove();
        }

        _parent.commitHibernateTransaction();
        _deleted = true;
        session.evict(_entity);

        if (logEntry != null) {
            return _parent.getLogRevision(logEntry);
        } else {
            return null;
        }

    } catch (ConstraintViolationException e) {
        throw new WGBackendException("Deletion of document failed because of database constraint violation", e);
    } catch (HibernateException e) {
        _parent.rollbackHibernateTransaction(true);
        throw new WGBackendException("Error deleting entity", e);
    }

}

From source file:de.innovationgate.webgate.api.jdbc.WGDocumentImpl.java

License:Open Source License

/**
 * Performs an operation involving entity updates and catches possibly appearing {@link NonUniqueObjectException}s.
 * Those exceptions are solved by evicting every entity which causes it and retrying the operation.
 * @param task The update operation//from   w w w  .  java 2s .  c  o  m
 * @param session The hibernate session to use to load the entity
 * @param retries The number of operation retries before a {@link WGBackendException} exception is thrown to cancel the operation.
 * @throws Exception
 */
private void performUpdateOperation(Callable<Boolean> task, Session session, int retries, Object entity)
        throws Exception {

    int repeatCount = 0;
    while (true) {

        repeatCount++;
        if (repeatCount > 1000) {
            throw new WGBackendException(
                    "Update of document failed because of persistent duplicates more than 1000 times. Cancelling update.");
        }

        try {
            task.call();
            break;
        }

        // Another persistent version was loaded somehow. Evict it and try to update again.
        catch (NonUniqueObjectException e) {
            Object otherObject = session.load(e.getEntityName(), e.getIdentifier());
            session.evict(otherObject);
            if (entity != null) {
                session.evict(entity);
            }
        }
    }

}

From source file:de.sub.goobi.forms.BenutzerverwaltungForm.java

License:Open Source License

/**
 * Save user if there is not other user with the same login.
 *
 * @return page or empty String//from   w  w w  .  ja v a  2 s. com
 */
public String save() {
    Session session = Helper.getHibernateSession();
    session.evict(this.myClass);
    String login = this.myClass.getLogin();

    if (!isLoginValid(login)) {
        return null;
    }

    String id = this.myClass.getId().toString();

    try {
        if (this.serviceManager.getUserService().getAmountOfUsersWithExactlyTheSameLogin(id, login) == 0) {
            this.serviceManager.getUserService().save(this.myClass);
            return "/pages/BenutzerAlle";
        } else {
            Helper.setFehlerMeldung("", Helper.getTranslation("loginBereitsVergeben"));
            return null;
        }
    } catch (DataException e) {
        Helper.setFehlerMeldung("Error, could not save", e.getMessage());
        logger.error(e);
        return null;
    }
}

From source file:de.tudarmstadt.ukp.lmf.transform.DBToXMLTransformer.java

License:Apache License

protected void doTransform(boolean includeAxes, final Lexicon... includeLexicons) throws SAXException {
    final int bufferSize = 100;
    commitCounter = 1;/*from   w  w w  .j  a v a 2 s  .co m*/

    writeStartElement(lexicalResource);

    // Iterate over all lexicons
    if (includeLexicons == null || includeLexicons.length > 0) {
        for (Lexicon lexicon : lexicalResource.getLexicons()) {
            String lexiconName = lexicon.getName();

            // Check if we want to include this lexicon.
            if (includeLexicons != null) {
                boolean found = false;
                for (Lexicon l : includeLexicons) {
                    if (lexiconName.equals(l.getName())) {
                        found = true;
                        break;
                    }
                }
                if (!found) {
                    continue;
                }
            }

            logger.info("Processing lexicon: " + lexiconName);
            writeStartElement(lexicon);

            // Iterate over all possible sub-elements of this Lexicon and
            // write them to the XML
            Class<?>[] lexiconClassesToSave = { LexicalEntry.class, SubcategorizationFrame.class,
                    SubcategorizationFrameSet.class, SemanticPredicate.class, Synset.class,
                    SynSemCorrespondence.class,
                    //ConstraintSet.class
            };

            //  "Unfortunately, MySQL does not treat large offset values efficiently by default and will still read all the rows prior to an offset value. It is common to see a query with an offset above 100,000 take over 20 times longer than an offset of zero!"
            // http://www.numerati.com/2012/06/26/reading-large-result-sets-with-hibernate-and-mysql/
            for (Class<?> clazz : lexiconClassesToSave) {
                /*DetachedCriteria criteria = DetachedCriteria.forClass(clazz)
                      .add(Restrictions.sqlRestriction("lexiconId = '" + lexicon.getId() + "'"));
                CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
                while (iter.hasNext()) {
                   Object obj = iter.next();
                   writeElement(obj);
                   session.evict(obj);
                   commitCounter++;
                   if (commitCounter % 1000 == 0)
                      logger.info("progress: " + commitCounter  + " class instances written to file");
                }*/
                Session lookupSession = sessionFactory.openSession();
                Query query = lookupSession.createQuery("FROM " + clazz.getSimpleName() + " WHERE lexiconId = '"
                        + lexicon.getId() + "' ORDER BY id");
                query.setReadOnly(true);
                if (DBConfig.MYSQL.equals(dbConfig.getDBType())) {
                    query.setFetchSize(Integer.MIN_VALUE); // MIN_VALUE gives hint to JDBC driver to stream results
                } else {
                    query.setFetchSize(1000);
                }
                ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
                while (results.next()) {
                    // For streamed query results, no further queries are allowed (incl. lazy proxy queries!)
                    // Detach the object from the lookup session and reload it using the "official" session.
                    Object[] rows = results.get();
                    Object row = rows[0];
                    lookupSession.evict(row);
                    lookupSession.evict(rows);
                    rows = null;
                    row = session.get(row.getClass(), ((IHasID) row).getId());
                    writeElement(row);
                    session.evict(row);
                    row = null;
                    commitCounter++;
                    if (commitCounter % 1000 == 0) {
                        logger.info("progress: " + commitCounter + " class instances written to file");
                    }
                    if (commitCounter % 10000 == 0) {
                        closeSession();
                        openSession();
                    }
                }
                results.close();
                lookupSession.close();
            }
            writeEndElement(lexicon);
        }
    }

    // Iterate over SenseAxes and write them to XMLX when not only
    // lexicons should be converted
    if (includeAxes) {
        logger.info("Processing sense axes");
        DetachedCriteria criteria = DetachedCriteria.forClass(SenseAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
        while (iter.hasNext()) {
            Object obj = iter.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

        logger.info("Processing predicateargument axes");
        DetachedCriteria criteria2 = DetachedCriteria.forClass(PredicateArgumentAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter2 = new CriteriaIterator<Object>(criteria2, sessionFactory, bufferSize);
        while (iter2.hasNext()) {
            Object obj = iter2.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

    }
    writeEndElement(lexicalResource);

    writeEndDocument();
}