Example usage for org.hibernate ScrollableResults get

List of usage examples for org.hibernate ScrollableResults get

Introduction

In this page you can find the example usage for org.hibernate ScrollableResults get.

Prototype

Object get(int i);

Source Link

Document

Get the ith object in the current row of results, without initializing any other results in the row.

Usage

From source file:org.projectforge.database.DatabaseDao.java

License:Open Source License

private long reindexObjects(final Class<?> clazz, final ReindexSettings settings) {
    final Session session = getSession();
    Criteria criteria = createCriteria(session, clazz, settings, true);
    final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
    final boolean scrollMode = number > MIN_REINDEX_ENTRIES_4_USE_SCROLL_MODE ? true : false;
    log.info("Starting re-indexing of " + number + " entries (total number) of type " + clazz.getName()
            + " with scrollMode=" + scrollMode + "...");
    final int batchSize = 1000;// NumberUtils.createInteger(System.getProperty("hibernate.search.worker.batch_size")
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    long index = 0;
    if (scrollMode == true) {
        // Scroll-able results will avoid loading too many objects in memory
        criteria = createCriteria(fullTextSession, clazz, settings, false);
        final ScrollableResults results = criteria.scroll(ScrollMode.FORWARD_ONLY);
        while (results.next() == true) {
            final Object obj = results.get(0);
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }/*from w  ww .j ava 2s  .c  o m*/
            fullTextSession.index(obj); // index each element
            if (index++ % batchSize == 0)
                session.flush(); // clear every batchSize since the queue is processed
        }
    } else {
        criteria = createCriteria(session, clazz, settings, false);
        final List<?> list = criteria.list();
        for (final Object obj : list) {
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }
            fullTextSession.index(obj);
            if (index++ % batchSize == 0)
                session.flush(); // clear every batchSize since the queue is processed
        }
    }
    final SearchFactory searchFactory = fullTextSession.getSearchFactory();
    searchFactory.optimize(clazz);
    log.info("Re-indexing of " + index + " objects of type " + clazz.getName() + " done.");
    return index;
}

From source file:org.projectforge.framework.persistence.database.DatabaseDao.java

License:Open Source License

private long reindexObjects(final Class<?> clazz, final ReindexSettings settings) {
    final Session session = sessionFactory.getCurrentSession();
    Criteria criteria = createCriteria(session, clazz, settings, true);
    final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
    final boolean scrollMode = number > MIN_REINDEX_ENTRIES_4_USE_SCROLL_MODE ? true : false;
    log.info("Starting re-indexing of " + number + " entries (total number) of type " + clazz.getName()
            + " with scrollMode=" + scrollMode + "...");
    final int batchSize = 1000;// NumberUtils.createInteger(System.getProperty("hibernate.search.worker.batch_size")
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    HibernateCompatUtils.setFlushMode(fullTextSession, FlushMode.MANUAL);
    HibernateCompatUtils.setCacheMode(fullTextSession, CacheMode.IGNORE);
    long index = 0;
    if (scrollMode == true) {
        // Scroll-able results will avoid loading too many objects in memory
        criteria = createCriteria(fullTextSession, clazz, settings, false);
        final ScrollableResults results = criteria.scroll(ScrollMode.FORWARD_ONLY);
        while (results.next() == true) {
            final Object obj = results.get(0);
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }/*from w  w  w.  j a  v  a 2 s . c o m*/
            HibernateCompatUtils.index(fullTextSession, obj);
            if (index++ % batchSize == 0) {
                session.flush(); // clear every batchSize since the queue is processed
            }
        }
    } else {
        criteria = createCriteria(session, clazz, settings, false);
        final List<?> list = criteria.list();
        for (final Object obj : list) {
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }
            HibernateCompatUtils.index(fullTextSession, obj);
            if (index++ % batchSize == 0) {
                session.flush(); // clear every batchSize since the queue is processed
            }
        }
    }
    final SearchFactory searchFactory = fullTextSession.getSearchFactory();
    searchFactory.optimize(clazz);
    log.info("Re-indexing of " + index + " objects of type " + clazz.getName() + " done.");
    return index;
}

From source file:org.sakaiproject.tool.assessment.facade.util.PagingUtilQueries.java

License:Educational Community License

public List getAll(final int pageSize, final int pageNumber, final String queryString, final Integer value) {

    HibernateCallback callback = new HibernateCallback() {
        public Object doInHibernate(Session session) throws HibernateException {
            ArrayList page = new ArrayList();
            Query q = session.createQuery(queryString);
            if (value != null) {
                q.setInteger(0, value.intValue());
            }//from w ww  .j av a  2  s. co m
            ScrollableResults assessmentList = q.scroll();
            if (assessmentList.first()) { // check that result set is not empty
                int first = pageSize * (pageNumber - 1);
                int i = 0;
                assessmentList.setRowNumber(first);
                assessmentList.beforeFirst();
                while ((pageSize > i++) && assessmentList.next()) {
                    log.debug("**** add " + i);
                    page.add(assessmentList.get(0));
                }
            }
            return page;
        }
    };
    List pageList = (List) getHibernateTemplate().execute(callback);
    return pageList;
}

From source file:org.squashtest.tm.service.internal.advancedsearch.IndexationServiceImpl.java

License:Open Source License

private void doReindex(FullTextSession ftSession, ScrollableResults scroll) {
    // update index going through the search results
    int batch = 0;
    while (scroll.next()) {
        ftSession.index(scroll.get(0)); // indexing of a single entity

        if (++batch % BATCH_SIZE == 0) { // commit batch
            ftSession.flushToIndexes();//from   w ww . j a  va  2s.co m
            ftSession.clear();
        }
    }
    // commit remaining item
    ftSession.flushToIndexes();
    ftSession.clear();
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void bindMilestoneToProjectTestCases(long projectId, long milestoneId) {

    Query query = currentSession().getNamedQuery("BoundEntityDao.findAllTestCasesForProject");
    query.setParameter(PROJECT_ID, projectId);
    ScrollableResults tcs = scrollableResults(query);

    Milestone milestone = findById(milestoneId);
    int count = 0;
    while (tcs.next()) {
        TestCase tc = (TestCase) tcs.get(0);
        milestone.bindTestCase(tc);/*from   www .j  a  v  a 2  s.  c o m*/
        if (++count % BATCH_UPDATE_SIZE == 0) {
            // flush a batch of updates and release memory:
            currentSession().flush();
            currentSession().clear();
            milestone = findById(milestoneId);
        }
    }
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void bindMilestoneToProjectRequirementVersions(long projectId, long milestoneId) {
    Query query = currentSession().getNamedQuery("milestone.findLastNonObsoleteReqVersionsForProject");
    query.setParameter(PROJECT_ID, projectId);
    ScrollableResults reqVersions = scrollableResults(query);

    Milestone milestone = findById(milestoneId);
    int count = 0;
    while (reqVersions.next()) {
        RequirementVersion reqV = (RequirementVersion) reqVersions.get(0);
        milestone.bindRequirementVersion(reqV);
        if (++count % BATCH_UPDATE_SIZE == 0) {
            // flush a batch of updates and release memory:
            currentSession().flush();//from   www  .  j ava  2s  . c o  m
            currentSession().clear();
            milestone = findById(milestoneId);
        }
    }

}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void synchronizeRequirementVersions(long source, long target, List<Long> projectIds) {
    Query query = currentSession().getNamedQuery("milestone.findAllRequirementVersionsForProjectAndMilestone");
    query.setParameterList(PROJECT_IDS, projectIds);
    query.setParameter(MILESTONE_ID, source);
    ScrollableResults reqVersions = scrollableResults(query);

    Milestone milestone = findById(target);
    int count = 0;
    while (reqVersions.next()) {
        RequirementVersion reqV = (RequirementVersion) reqVersions.get(0);
        milestone.bindRequirementVersion(reqV);
        if (++count % BATCH_UPDATE_SIZE == 0) {
            // flush a batch of updates and release memory:
            currentSession().flush();//from   w w w . j a  v a  2s. c  o  m
            currentSession().clear();
            milestone = findById(target);
        }
    }

}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void synchronizeTestCases(long source, long target, List<Long> projectIds) {
    Query query = currentSession().getNamedQuery("milestone.findAllTestCasesForProjectAndMilestone");
    query.setParameterList(PROJECT_IDS, projectIds);
    query.setParameter(MILESTONE_ID, source);
    ScrollableResults tcs = scrollableResults(query);

    Milestone milestone = findById(target);
    int count = 0;
    while (tcs.next()) {
        TestCase tc = (TestCase) tcs.get(0);
        milestone.bindTestCase(tc);/*  ww w .j  a va 2s.c o m*/
        if (++count % BATCH_UPDATE_SIZE == 0) {
            // flush a batch of updates and release memory:
            currentSession().flush();
            currentSession().clear();
            milestone = findById(target);
        }
    }
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

/**
 * @see org.squashtest.tm.service.internal.repository.MilestoneDao#performBatchUpdate(org.squashtest.tm.service.internal.repository.MilestoneDao.HolderConsumer)
 *///from w ww .  j a  v a 2s.co  m
@Override
public void performBatchUpdate(HolderConsumer consumer) {
    LOGGER.info("About to perform a Milestone Holder batch update");
    final String[] entities = { "TestCase", "RequirementVersion", "Campaign" };

    Session session = currentSession();

    for (String entity : entities) {
        LOGGER.info("About to fetch entities {}", entity);

        String namedQuery = entity + ".findAllWithMilestones";
        LOGGER.debug("Fetching bound entities with query named {}", namedQuery);

        ScrollableResults holders = scrollableResults(session.getNamedQuery(namedQuery));

        int count = 0;

        while (holders.next()) {
            MilestoneHolder holder = (MilestoneHolder) holders.get(0);
            consumer.consume(holder);
            if (++count % BATCH_UPDATE_SIZE == 0) {
                // flush a batch of updates and release memory:
                session.flush();
                session.clear();
            }
        }
    }

    LOGGER.info("Done with Milestone Holder batch update");
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void unbindAllObjectsForProjects(Long milestoneId, List<Long> projectIds) {
    final String[] entities = { "TestCases", "RequirementVersions", "Campaigns" };

    Session session = currentSession();// w w  w.  j a  va2  s .c  o m

    for (String entity : entities) {
        LOGGER.info("About to fetch entities {}", entity);

        String namedQuery = "milestone.findAll" + entity + "ForProjectAndMilestone";
        LOGGER.debug("Fetching bound entities with query named {}", namedQuery);
        Query query = session.getNamedQuery(namedQuery);
        query.setParameter(MILESTONE_ID, milestoneId);
        query.setParameterList(PROJECT_IDS, projectIds);

        ScrollableResults holders = scrollableResults(query);

        int count = 0;

        while (holders.next()) {
            MilestoneHolder holder = (MilestoneHolder) holders.get(0);
            holder.unbindMilestone(milestoneId);
            if (++count % BATCH_UPDATE_SIZE == 0) {
                // flush a batch of updates and release memory:
                session.flush();
                session.clear();
            }
        }
    }
}