Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:info.jtrac.repository.HibernateJtracDao.java

License:Apache License

@Override
@Transactional(propagation = Propagation.REQUIRED)
public long loadNextSequenceNum(final long spaceSequenceId) {
    entityManager.flush();//from w w w . j  a v  a 2 s  . com
    Session session = getSession();
    session.setCacheMode(CacheMode.IGNORE);
    SpaceSequence ss = (SpaceSequence) session.get(SpaceSequence.class, spaceSequenceId);
    long next = ss.getAndIncrement();
    session.update(ss);
    session.flush();
    return next;
}

From source file:it.jugpadova.blo.EventBo.java

License:Apache License

public void regenerateLuceneIndexes() {
    Session session = this.eventDao.getHibernateTemplate().getSessionFactory().getCurrentSession();
    FullTextSession fullTextSession = Search.createFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    ScrollableResults results = fullTextSession.createCriteria(Event.class).scroll(ScrollMode.FORWARD_ONLY);

    int index = 0;
    while (results.next()) {
        index++;/*from   w ww .j a va  2s  .co  m*/
        fullTextSession.index(results.get(0)); //index each element

        if (index % 50 == 0) {
            fullTextSession.clear(); //clear every batchSize since the queue is processed

        }
    }
}

From source file:kr.debop4j.data.ogm.dao.HibernateOgmDao.java

License:Apache License

@Override
public void indexAll(Class<?> clazz, int batchSize) {
    if (isDebugEnabled)
        log.debug("[{}]?  ?  ??? ...", clazz);

    clearIndex(clazz);/*from w ww.j  a  v  a2  s  . c  om*/

    if (batchSize < DEFAUALT_BATCH_SIZE)
        batchSize = DEFAUALT_BATCH_SIZE;

    FullTextSession fts = getFullTextSession();

    FlushMode currentFlushMode = fts.getFlushMode();
    CacheMode currentCacheMode = fts.getCacheMode();
    fts.setFlushMode(FlushMode.MANUAL);
    fts.setCacheMode(CacheMode.IGNORE);

    try {
        Transaction tx = fts.beginTransaction();
        ScrollableResults results = fts.createCriteria(clazz).scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            fts.index(results.get(0));
            if (++index % batchSize == 0) {
                fts.flushToIndexes();
                fts.clear();
                if (isTraceEnabled)
                    log.trace("?? . index=[{}]", index);
            }
        }
        fts.flushToIndexes();
        tx.commit();

        log.info("[{}]?   [{}]   ??? !!!", clazz,
                index);
    } finally {
        fts.setFlushMode(currentFlushMode);
        fts.setCacheMode(currentCacheMode);
    }
}

From source file:kr.debop4j.search.dao.HibernateSearchDao.java

License:Apache License

@Override
public void indexAll(Class<?> clazz, int batchSize) {
    if (log.isDebugEnabled())
        log.debug("[{}]?  ?  ??? ...", clazz);

    clearIndex(clazz);// ww  w.  ja v a 2s  . co m

    if (batchSize < BATCH_SIZE)
        batchSize = BATCH_SIZE;

    final FullTextSession fts = getFullTextSession();

    FlushMode currentFlushMode = fts.getFlushMode();
    CacheMode currentCacheMode = fts.getCacheMode();
    fts.setFlushMode(FlushMode.MANUAL);
    fts.setCacheMode(CacheMode.IGNORE);

    try {
        Transaction tx = fts.beginTransaction();
        ScrollableResults results = fts.createCriteria(clazz).scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            index++;
            fts.index(results.get(0));
            if (index % batchSize == 0) {
                fts.flushToIndexes();
                fts.clear();
            }
        }
        fts.flushToIndexes();
        tx.commit();

        if (log.isDebugEnabled())
            log.debug("[{}]?   [{}]   ??? !!!",
                    clazz, index);
    } finally {
        fts.setFlushMode(currentFlushMode);
        fts.setCacheMode(currentCacheMode);
    }
}

From source file:magoffin.matt.dao.hbm.GenericHibernateDao.java

License:Open Source License

/**
 * Execute a batch callback using a named query.
 * //from   w ww  .j  av a2  s .com
 * @param queryName the named query name
 * @param parameters the named parameters to pass to the query
 * @param callback the callback
 * @return the number of items processed
 */
protected Integer executeNamedQueryBatchCallback(final String queryName, final Map<String, Object> parameters,
        final BatchCallback<T> callback) {
    return getHibernateTemplate().execute(new HibernateCallback<Integer>() {
        @SuppressWarnings("unchecked")
        @Override
        public Integer doInHibernate(Session session) throws HibernateException, SQLException {
            Query q = session.getNamedQuery(queryName);
            if (parameters != null) {
                for (String paramName : parameters.keySet()) {
                    q.setParameter(paramName, parameters.get(paramName));
                }
            }
            q.setCacheMode(CacheMode.IGNORE);
            ScrollableResults items = q.scroll(ScrollMode.FORWARD_ONLY);
            int count = 0;

            OUTER: while (items.next()) {
                T item = (T) items.get(0);
                BatchCallbackResult action = callback.handle(item);
                switch (action) {
                case DELETE:
                    session.delete(item);
                    break;

                case UPDATE:
                case UPDATE_STOP:
                    store(item);
                    if (action == BatchCallbackResult.UPDATE_STOP) {
                        break OUTER;
                    }
                    break;

                case STOP:
                    break OUTER;

                case CONTINUE:
                    // nothing to do
                    break;
                }
                if (++count % batchFlushCount == 0) {
                    session.flush();
                    session.clear();
                }
            }

            return count;
        }
    });
}

From source file:magoffin.matt.ma2.dao.hbm.HibernateAlbumDao.java

License:Open Source License

public int reassignAlbumsUsingTheme(final Theme oldTheme, final Theme newTheme) {
    return getHibernateTemplate().execute(new HibernateCallback<Integer>() {

        public Integer doInHibernate(Session session) throws HibernateException, SQLException {
            ScrollableResults albums = session.getNamedQuery(QUERY_ALBUMS_FOR_THEME_ID)
                    .setLong("themeId", oldTheme.getThemeId()).setCacheMode(CacheMode.IGNORE)
                    .scroll(ScrollMode.FORWARD_ONLY);
            int count = 0;
            while (albums.next()) {
                Album album = (Album) albums.get(0);
                album.setTheme(newTheme);
                if (++count % 20 == 0) {
                    session.flush();//from   w  ww.ja va 2  s.  co m
                    session.clear();
                }
            }
            return count;
        }
    });
}

From source file:net.jforum.actions.LuceneAdminActions.java

License:Open Source License

public void rebuildIndex() {

    Runnable indexingJob = new Runnable() {
        public void run() {
            Session session = null;/*  w ww .  ja va  2s.  co m*/

            try {
                session = sessionFactory.openSession();

                FullTextSession fullTextSession = Search.createFullTextSession(session);
                fullTextSession.setFlushMode(FlushMode.MANUAL);
                fullTextSession.setCacheMode(CacheMode.IGNORE);

                session.beginTransaction();

                int index = 0;
                int batchSize = config.getInt(ConfigKeys.LUCENE_BATCH_SIZE);

                ScrollableResults results = fullTextSession.createCriteria(Post.class).createAlias("topic", "t")
                        .scroll(ScrollMode.FORWARD_ONLY);

                while (results.next() && "1".equals(config.getValue(ConfigKeys.LUCENE_CURRENTLY_INDEXING))) {
                    index++;

                    fullTextSession.index(results.get(0));

                    if (index % batchSize == 0) {
                        session.clear();
                    }
                }

                session.getTransaction().commit();
            } catch (Exception e) {
                if (session.getTransaction().isActive()) {
                    session.getTransaction().rollback();
                }
            } finally {
                if (session.isOpen() && session.isConnected()) {
                    session.close();
                }
            }
        }
    };

    this.config.addProperty(ConfigKeys.LUCENE_CURRENTLY_INDEXING, "1");

    Thread thread = new Thread(indexingJob);
    thread.start();

    this.viewService.redirectToAction(Actions.LIST);
}

From source file:net.jforum.controllers.LuceneAdminController.java

License:Open Source License

public void rebuildIndex() {

    Runnable indexingJob = new Runnable() {
        public void run() {
            Session session = null;/*from   w w  w.  ja v a  2 s . c o m*/

            try {
                session = sessionFactory.openSession();

                FullTextSession fullTextSession = Search.createFullTextSession(session);
                fullTextSession.setFlushMode(FlushMode.MANUAL);
                fullTextSession.setCacheMode(CacheMode.IGNORE);

                session.beginTransaction();

                int index = 0;
                int batchSize = config.getInt(ConfigKeys.LUCENE_BATCH_SIZE);

                ScrollableResults results = fullTextSession.createCriteria(Post.class).createAlias("topic", "t")
                        .scroll(ScrollMode.FORWARD_ONLY);

                while (results.next() && "1".equals(config.getValue(ConfigKeys.LUCENE_CURRENTLY_INDEXING))) {
                    index++;

                    fullTextSession.index(results.get(0));

                    if (index % batchSize == 0) {
                        session.clear();
                    }
                }

                session.getTransaction().commit();
            } catch (Exception e) {
                if (session.getTransaction().isActive()) {
                    session.getTransaction().rollback();
                }
            } finally {
                if (session.isOpen() && session.isConnected()) {
                    session.close();
                }
            }
        }
    };

    this.config.addProperty(ConfigKeys.LUCENE_CURRENTLY_INDEXING, "1");

    Thread thread = new Thread(indexingJob);
    thread.start();

    this.result.redirectTo(this).list();
}

From source file:ome.services.db.SearchTest.java

License:Open Source License

@Test
public void testReindexingExperimenter() throws Exception {
    HibernateTest ht = new HibernateTest();
    ht.setupSession();//from  www . j a v  a 2s. com

    FullTextSession fullTextSession = Search.getFullTextSession(ht.s);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    Transaction transaction = fullTextSession.beginTransaction();
    // Scrollable results will avoid loading too many objects in memory
    ScrollableResults results = fullTextSession.createCriteria(Experimenter.class)
            .scroll(ScrollMode.FORWARD_ONLY);
    int index = 0;
    int batchSize = 10;
    while (results.next()) {
        index++;
        fullTextSession.index(results.get(0)); // index each element
        if (index % batchSize == 0) {
            ht.s.clear(); // clear every batchSize since the queue is
            // processed
        }
    }
    ht.closeSession();

    ht.setupSession();
    List<Experimenter> list = query(ht, "root", Experimenter.class, "omeName");
    assertTrue(list.toString(), list.size() == 1);
    ht.closeSession();
}

From source file:ome.services.fulltext.FullTextIndexer.java

License:Open Source License

/**
 * Runs {@link #doIndexing(FullTextSession)} within a Lucene transaction.
 * {@link #doIndexing(FullTextSession)} will also be called
 *//*from   ww  w. j av  a2s  .  com*/
@Transactional(readOnly = false, isolation = Isolation.SERIALIZABLE)
public Object doWork(Session session, ServiceFactory sf) {
    int count = 1;
    int perbatch = 0;
    long start = System.currentTimeMillis();
    do {

        // ticket:1254 -
        // The following is non-portable and can later be refactored
        // for a more general solution.
        getSqlAction().deferConstraints();

        // s.execute("set statement_timeout=10000");
        // The Postgresql Driver does not currently support the
        // "timeout" value on @Transactional and so if a query timeout
        // is required, then this must be set.

        FullTextSession fullTextSession = Search.getFullTextSession(session);
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        perbatch = doIndexingWithWorldRead(sf, fullTextSession);
        count++;
    } while (doMore(count));
    if (perbatch > 0) {
        log.info(String.format("INDEXED %s objects in %s batch(es) [%s ms.]", perbatch, (count - 1),
                (System.currentTimeMillis() - start)));
    } else {
        log.debug("No objects indexed");
    }
    return null;
}