Example usage for org.hibernate ScrollMode FORWARD_ONLY

List of usage examples for org.hibernate ScrollMode FORWARD_ONLY

Introduction

In this page you can find the example usage for org.hibernate ScrollMode FORWARD_ONLY.

Prototype

ScrollMode FORWARD_ONLY

To view the source code for org.hibernate ScrollMode FORWARD_ONLY.

Click Source Link

Document

Requests a scrollable result that is only scrollable forwards.

Usage

From source file:org.dcm4chee.archive.query.dao.AbstractQuery.java

License:LGPL

public ScrollableResults execute() {
    return query.scroll(ScrollMode.FORWARD_ONLY, select);
}

From source file:org.easybatch.extensions.hibernate.HibernateRecordReader.java

License:Open Source License

@Override
public void open() {
    currentRecordNumber = 0;//from   w w w. j  a v a2  s  .  c o  m
    Query hibernateQuery = session.createQuery(query);
    hibernateQuery.setReadOnly(true);
    if (maxResults >= 1) {
        hibernateQuery.setMaxResults(maxResults);
    }
    if (fetchSize >= 1) {
        hibernateQuery.setFetchSize(fetchSize);
    }
    scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
}

From source file:org.eclipse.emf.cdo.server.internal.hibernate.HibernateQueryHandler.java

License:Open Source License

/**
 * Executes hql queries. Gets the session from the {@link HibernateStoreAccessor} creates a hibernate query and sets
 * the parameters taken from the {@link CDOQueryInfo#getParameters()}. Takes into account the
 * {@link CDOQueryInfo#getMaxResults()} and the {@link IHibernateStore#FIRST_RESULT} values for paging.
 *
 * @param info//  w w  w.  java2 s  .  co  m
 *          the object containing the query and parameters
 * @param context
 *          the query results are placed in the context
 * @see IQueryHandler#executeQuery(CDOQueryInfo, IQueryContext)
 */
public void executeQuery(CDOQueryInfo info, IQueryContext context) {
    // get a transaction, the hibernateStoreAccessor is placed in a threadlocal
    // so all db access uses the same session.
    final Session session = hibernateStoreAccessor.getHibernateSession();
    try {
        // create the query
        final Query query = session.createQuery(info.getQueryString());
        query.setReadOnly(true);

        // get the parameters with some parameter conversion
        int firstResult = -1;
        boolean cacheResults = true;
        for (String key : info.getParameters().keySet()) {
            if (key.compareToIgnoreCase(IHibernateStore.CACHE_RESULTS) == 0) {
                try {
                    cacheResults = (Boolean) info.getParameters().get(key);
                } catch (ClassCastException e) {
                    throw new IllegalArgumentException("Parameter " + IHibernateStore.CACHE_RESULTS //$NON-NLS-1$
                            + " must be a boolean. errorMessage " + e.getMessage());
                }
            } else if (key.compareToIgnoreCase(IHibernateStore.FIRST_RESULT) == 0) {
                final Object o = info.getParameters().get(key);
                if (o != null) {
                    try {
                        firstResult = (Integer) o;
                    } catch (ClassCastException e) {
                        throw new IllegalArgumentException(
                                "Parameter firstResult must be an integer but it is a " + o //$NON-NLS-1$
                                        + " class " + o.getClass().getName()); //$NON-NLS-1$
                    }
                }
            } else {
                // in case the parameter is a CDOID get the object from the db
                final Object param = info.getParameters().get(key);
                if (param instanceof CDOID && HibernateUtil.getInstance().isStoreCreatedID((CDOID) param)) {
                    final CDOID id = (CDOID) param;
                    final String entityName = HibernateUtil.getInstance().getEntityName(id);
                    final Serializable idValue = HibernateUtil.getInstance().getIdValue(id);
                    final CDORevision revision = (CDORevision) session.get(entityName, idValue);
                    query.setEntity(key, revision);
                    if (cacheResults) {
                        addToRevisionCache(revision);
                    }
                } else {
                    query.setParameter(key, param);
                }
            }
        }

        // set the first result
        if (firstResult > -1) {
            query.setFirstResult(firstResult);
        }

        // the max result
        if (info.getMaxResults() != CDOQueryInfo.UNLIMITED_RESULTS) {
            query.setMaxResults(info.getMaxResults());
        }

        final ScrollableResults scroller = query.scroll(ScrollMode.FORWARD_ONLY);

        // and go for the query
        // future extension: support iterate, scroll through a parameter
        int i = 0;
        try {
            while (scroller.next()) {
                Object[] os = scroller.get();
                Object o;
                if (os.length == 1) {
                    o = handleAuditEntries(os[0]);
                } else {
                    o = handleAuditEntries(os);
                }

                final boolean addOneMore = context.addResult(o);
                if (cacheResults && o instanceof CDORevision) {
                    addToRevisionCache((CDORevision) o);
                }
                if (o instanceof InternalCDORevision) {
                    ((InternalCDORevision) o).freeze();
                }

                // clear the session every 1000 results or so
                if (i++ % 1000 == 0) {
                    session.clear();
                }

                if (!addOneMore) {
                    return;
                }
            }
        } finally {
            scroller.close();
        }
    } finally {
        session.close();
    }
}

From source file:org.eclipse.emf.cdo.server.internal.hibernate.HibernateStoreAccessor.java

License:Open Source License

public void queryXRefs(QueryXRefsContext context) {
    final Session session = getHibernateSession();
    for (CDOID targetCdoId : context.getTargetObjects().keySet()) {
        final CDORevision revision = HibernateUtil.getInstance().getCDORevision(targetCdoId);
        final EClass targetEClass = context.getTargetObjects().get(targetCdoId);

        if (!getStore().isMapped(targetEClass)) {
            continue;
        }/* w  ww .  j  a v  a2 s .c o m*/

        final String targetEntityName = getStore().getEntityName(targetEClass);
        final Map<EClass, List<EReference>> sourceCandidates = context.getSourceCandidates();
        for (EClass sourceEClass : sourceCandidates.keySet()) {

            if (!getStore().isMapped(sourceEClass)) {
                continue;
            }

            final String sourceEntityName = getStore().getEntityName(sourceEClass);
            for (EReference eref : sourceCandidates.get(sourceEClass)) {
                // handle transient ereferences
                if (!isEReferenceMapped(session, sourceEntityName, eref)) {
                    continue;
                }

                final String hql;
                if (eref.isMany()) {
                    hql = "select ref from " + sourceEntityName + " as ref, " + targetEntityName
                            + " as refTo where refTo = :to and refTo in elements(ref." + eref.getName() + ")";
                } else {
                    hql = "select ref from " + sourceEntityName + " as ref where :to = ref." + eref.getName();
                }

                final Query qry = session.createQuery(hql);
                qry.setEntity("to", revision);
                ScrollableResults result = qry.scroll(ScrollMode.FORWARD_ONLY);
                while (result.next()) {
                    final InternalCDORevision sourceRevision = (InternalCDORevision) result.get()[0];

                    sourceRevision.freeze();

                    int sourceIndex = 0;
                    if (eref.isMany()) {
                        // note this takes performance for sure as the list is read,
                        // consider not supporting sourceIndex, or doing it differently
                        final WrappedHibernateList cdoList = (WrappedHibernateList) sourceRevision
                                .getList(eref);
                        sourceIndex = cdoList.getDelegate().indexOf(revision);
                    }

                    boolean more = context.addXRef(targetCdoId, sourceRevision.getID(), eref, sourceIndex);
                    if (!more) {
                        return;
                    }
                }
            }
        }
    }
}

From source file:org.emau.icmvc.ganimed.epix.core.persistence.dao.PreprocessedPersonHibernateStreamImpl.java

License:Open Source License

public PreprocessedPersonHibernateStreamImpl(EntityManager em, String namedQuery) {
    if (logger.isDebugEnabled()) {
        logger.debug("opening db stream");
    }//from  www.ja v  a  2s . com
    session = ((Session) em.getDelegate()).getSessionFactory().openStatelessSession();
    tx = session.beginTransaction();
    resultSet = session.getNamedQuery(namedQuery).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY);
    if (logger.isDebugEnabled()) {
        logger.debug("db stream instanciated");
    }
}

From source file:org.encuestame.core.cron.IndexRebuilder.java

License:Apache License

/**
 * Reindex domain object./*from  ww  w . j a v a 2s . c  o  m*/
 * @param fullTextSession {@link FullTextSession}.
 * @param clazz domain class.
 */
public static void reindex(final FullTextSession fullTextSession, final Class<?> clazz) {
    log.debug(clazz.getName() + " purge index ...");
    //purge all index content.
    fullTextSession.purgeAll(clazz);
    fullTextSession.flushToIndexes();
    fullTextSession.getSearchFactory().optimize(clazz);
    log.debug(clazz.getName() + " starting index ...");
    final long startTime = System.currentTimeMillis();
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    final Transaction transaction = fullTextSession.beginTransaction();
    //Scrollable results will avoid loading too many objects in memory
    final ScrollableResults results = fullTextSession.createCriteria(clazz).setFetchSize(100)
            .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY).scroll(ScrollMode.FORWARD_ONLY);
    int index = 0;
    while (results.next()) {
        index++;
        fullTextSession.index(results.get(0));
        if (index % 100 == 0) {
            fullTextSession.flushToIndexes();
            fullTextSession.flush();
            fullTextSession.clear();
        }
    }
    fullTextSession.flushToIndexes();
    fullTextSession.getSearchFactory().optimize(clazz);
    transaction.commit();
    log.debug(clazz.getName() + " Reindex end in " + ((System.currentTimeMillis() - startTime) / 1000.0)
            + " seconds.");
}

From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManager.java

License:Apache License

/**
 * Purge & index all entities with the input class and name.
 *
 * @param entityClass// w  ww.j  av a  2  s.  c o m
 *            the type of entities to reindex into search index.
 *
 * @param entityName
 *            the name of the entity to reindex
 *
 * @param search
 *            the FullTextSession to use
 */
@SuppressWarnings("unchecked")
public void reindexEntities(final Class entityClass, final String entityName, final FullTextSession search) {
    log.info("reindexEntities(" + entityClass.toString() + ", " + entityName + ")");

    // purge first
    purgeSearchIndex(entityClass, search);

    log.info("Creating query to find batches of " + entityName);
    Query q = search.createQuery("FROM " + entityName)
            // set the result transformer
            //        .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY) <TODO COMMENTED OUT>
            // minimize cache
            .setCacheMode(CacheMode.IGNORE)
            // limit fetch size
            .setFetchSize(fetchBatchSize);
    log.info("setting scroll mode to FORWARD_ONLY for " + entityName);
    ScrollableResults scroll = q.scroll(ScrollMode.FORWARD_ONLY);

    int batch = 0;
    while (scroll.next()) {
        batch++;
        search.index(scroll.get(0));
        if (batch % flushBatchSize == 0) {
            if (log.isInfoEnabled()) {
                log.info("Flushing " + entityName + " - " + batch);
            }

            // no need to call s.flush()
            // we don't change anything
            search.flushToIndexes();
            search.clear();
        }
    }

    log.info("Flushing " + entityName + " - " + batch + " (final)");
    search.flushToIndexes();
    search.clear();

    log.info("Optimizing index for " + entityName);
    search.getSearchFactory().optimize(entityClass);
}

From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManagerTest.java

License:Apache License

/**
 * Test reindexing models using the class overload. The batch size is set to 10, with 11 records. Make sure the
 * flushToIndexes is called//w ww.j a va2 s  .  co m
 * 
 * Look, this is ridiculous, I know. This test is nothing more than a useless whitebox test to get past clover
 * tests.
 */
@Test
public void testReindexModelsFromClass() {
    final int fetchSize = 938;
    final int flushSize = 2;
    context.checking(new Expectations() {
        {
            // purge, flush, optimize, flush first
            one(fullTextSessionMock).purgeAll(SearchIndexManagerTest.class);
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).getSearchFactory();
            will(returnValue(searchFactoryMock));
            one(searchFactoryMock).optimize(SearchIndexManagerTest.class);
            one(fullTextSessionMock).flushToIndexes();

            one(fullTextSessionMock).createQuery("FROM SearchIndexManagerTest");
            will(returnValue(queryMock));

            //                one(queryMock).setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
            will(returnValue(queryMock));

            one(queryMock).setCacheMode(CacheMode.IGNORE);
            will(returnValue(queryMock));

            one(queryMock).setFetchSize(fetchSize);
            will(returnValue(queryMock));

            one(queryMock).scroll(ScrollMode.FORWARD_ONLY);
            will(returnValue(scrollMock));

            // 3 results, batch size of 2
            Object entity1 = new Object();
            Object entity2 = new Object();
            Object entity3 = new Object();

            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity1));
            one(fullTextSessionMock).index(entity1);

            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity2));
            one(fullTextSessionMock).index(entity2);

            // end of batch - flush
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).clear();

            // last one
            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity3));
            one(fullTextSessionMock).index(entity3);

            // no more
            one(scrollMock).next();
            will(returnValue(false));

            // flush, optimize, flush, clear remaining
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).clear();
            one(fullTextSessionMock).getSearchFactory();
            will(returnValue(searchFactoryMock));
            one(searchFactoryMock).optimize(SearchIndexManagerTest.class);
        }
    });

    // call the system under test
    SearchIndexManager indexer = new SearchIndexManager(fetchSize, flushSize);
    indexer.reindexEntities(SearchIndexManagerTest.class, fullTextSessionMock);

    // all expectations met?
    context.assertIsSatisfied();
}

From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManagerTest.java

License:Apache License

/**
 * Test reindexing models using the class, entityName overload.
 * /*from  w  w  w. j  a  v a 2 s  .co m*/
 * Look, this is ridiculous, I know. This test is nothing more than a useless whitebox test to get past clover
 * tests.
 */
@Test
public void testReindexModelsFromClassAndEntityName() {
    final int fetchSize = 8;
    final int flushSize = 2;
    context.checking(new Expectations() {
        {
            // purge, flush, optimize, flush first
            one(fullTextSessionMock).purgeAll(SearchIndexManagerTest.class);
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).getSearchFactory();
            will(returnValue(searchFactoryMock));
            one(searchFactoryMock).optimize(SearchIndexManagerTest.class);
            one(fullTextSessionMock).flushToIndexes();

            one(fullTextSessionMock).createQuery("FROM HeyNow");
            will(returnValue(queryMock));

            //                one(queryMock).setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
            will(returnValue(queryMock));

            one(queryMock).setCacheMode(CacheMode.IGNORE);
            will(returnValue(queryMock));

            one(queryMock).setFetchSize(fetchSize);
            will(returnValue(queryMock));

            one(queryMock).scroll(ScrollMode.FORWARD_ONLY);
            will(returnValue(scrollMock));

            // 3 results, batch size of 2
            Object entity1 = new Object();
            Object entity2 = new Object();
            Object entity3 = new Object();

            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity1));
            one(fullTextSessionMock).index(entity1);

            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity2));
            one(fullTextSessionMock).index(entity2);

            // end of batch - flush
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).clear();

            // last one
            one(scrollMock).next();
            will(returnValue(true));
            one(scrollMock).get(0);
            will(returnValue(entity3));
            one(fullTextSessionMock).index(entity3);

            // no more
            one(scrollMock).next();
            will(returnValue(false));

            // flush, optimize, flush, clear batch
            one(fullTextSessionMock).clear();
            one(fullTextSessionMock).flushToIndexes();
            one(fullTextSessionMock).getSearchFactory();
            will(returnValue(searchFactoryMock));
            one(searchFactoryMock).optimize(SearchIndexManagerTest.class);
        }
    });

    // call the system under test
    SearchIndexManager indexer = new SearchIndexManager(fetchSize, flushSize);
    indexer.reindexEntities(SearchIndexManagerTest.class, "HeyNow", fullTextSessionMock);

    // all expectations met?
    context.assertIsSatisfied();
}

From source file:org.eurekastreams.server.persistence.mappers.cache.DomainGroupCacheLoader.java

License:Apache License

/**
 * Query all domain groups, loading them in the cache.
 *//*from   w w w . j  av  a2s.c  o  m*/
private void queryAllDomainGroups() {
    long start = System.currentTimeMillis();
    log.info("Loading up all domain groups with a single query");

    Criteria criteria = domainGroupQueryStrategy.getCriteria(getHibernateSession());

    // page the data
    criteria.setFetchSize(FETCH_SIZE);
    criteria.setCacheMode(CacheMode.IGNORE);
    ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY);

    // loop through the results and store in cache
    long recordCounter = 0;
    while (scroll.next()) {
        if (++recordCounter % FETCH_SIZE == 0) {
            log.info("Loading " + recordCounter + "th domainGroup record, clearing session.");
            getHibernateSession().clear();
        }

        DomainGroupModelView result = (DomainGroupModelView) scroll.get(0);
        getCache().set(CacheKeys.GROUP_BY_ID + result.getEntityId(), result);
        getCache().set(CacheKeys.GROUP_BY_SHORT_NAME + result.getShortName(), result.getEntityId());
    }

    log.info("Completed loading all domain groups in " + (System.currentTimeMillis() - start)
            + " milliseconds.");
}