List of usage examples for org.hibernate CacheMode IGNORE
CacheMode IGNORE
To view the source code for org.hibernate CacheMode IGNORE.
Click Source Link
From source file:org.compass.gps.device.jpa.indexer.HibernateJpaIndexEntitiesIndexer.java
License:Apache License
public void performIndex(CompassSession session, IndexEntity[] entities) { for (IndexEntity indexEntity : entities) { EntityInformation entityInformation = (EntityInformation) indexEntity; if (jpaGpsDevice.isFilteredForIndex(entityInformation.getName())) { continue; }//from ww w . ja va 2 s. c om int fetchCount = jpaGpsDevice.getFetchCount(); if (!jpaGpsDevice.isRunning()) { return; } EntityManagerWrapper wrapper = jpaGpsDevice.getEntityManagerWrapper().newInstance(); ScrollableResults cursor = null; try { wrapper.open(); HibernateEntityManager entityManager = (HibernateEntityManager) wrapper.getEntityManager(); entityManager.getSession().setCacheMode(CacheMode.IGNORE); if (log.isDebugEnabled()) { log.debug(jpaGpsDevice.buildMessage("Indexing entities [" + entityInformation.getName() + "] using query [" + entityInformation.getQueryProvider() + "]")); } if (entityInformation.getQueryProvider() instanceof HibernateJpaQueryProvider) { Criteria criteria = ((HibernateJpaQueryProvider) entityInformation.getQueryProvider()) .createCriteria(entityManager, entityInformation); if (criteria != null) { if (performOrderById) { Boolean performOrder = performOrderByPerEntity.get(entityInformation.getName()); if (performOrder == null || performOrder) { ClassMetadata metadata = entityManager.getSession().getSessionFactory() .getClassMetadata(entityInformation.getName()); String idPropName = metadata.getIdentifierPropertyName(); if (idPropName != null) { criteria.addOrder(Order.asc(idPropName)); } } } criteria.setFetchSize(fetchCount); cursor = criteria.scroll(ScrollMode.FORWARD_ONLY); } } if (cursor == null) { HibernateQuery query = (HibernateQuery) entityInformation.getQueryProvider() .createQuery(entityManager, entityInformation); cursor = query.getHibernateQuery().scroll(ScrollMode.FORWARD_ONLY); } // store things in row buffer to allow using batch fetching in Hibernate RowBuffer buffer = new RowBuffer(session, entityManager.getSession(), fetchCount); Object prev = null; while (true) { try { if (!cursor.next()) { break; } } catch (ObjectNotFoundException e) { continue; } Object item = cursor.get(0); if (item != prev && prev != null) { buffer.put(prev); } prev = item; if (buffer.shouldFlush()) { // put also the item/prev since we are clearing the session // in the flush process buffer.put(prev); buffer.flush(); prev = null; } } if (prev != null) { buffer.put(prev); } buffer.close(); cursor.close(); entityManager.clear(); wrapper.close(); } catch (Exception e) { log.error(jpaGpsDevice.buildMessage("Failed to index the database"), e); if (cursor != null) { try { cursor.close(); } catch (Exception e1) { log.warn(jpaGpsDevice.buildMessage("Failed to close cursor on error, ignoring"), e1); } } wrapper.closeOnError(); if (!(e instanceof JpaGpsDeviceException)) { throw new JpaGpsDeviceException(jpaGpsDevice.buildMessage("Failed to index the database"), e); } throw (JpaGpsDeviceException) e; } } }
From source file:org.dspace.app.cris.dao.ApplicationDao.java
public void ignoreCacheMode() { getSession().setCacheMode(CacheMode.IGNORE); }
From source file:org.egov.tl.service.AbstractLicenseService.java
License:Open Source License
public List<T> getAllLicensesByNatureOfBusiness(String natureOfBusiness) { return this.entityQueryService.getSession().createCriteria(License.class) .createAlias("natureOfBusiness", "nb", JoinType.LEFT_OUTER_JOIN) .add(Restrictions.eq("nb.name", natureOfBusiness)).setCacheMode(CacheMode.IGNORE).list(); }
From source file:org.encuestame.core.cron.IndexRebuilder.java
License:Apache License
/** * Reindex domain object./* ww w . j a va2 s . c o m*/ * @param fullTextSession {@link FullTextSession}. * @param clazz domain class. */ public static void reindex(final FullTextSession fullTextSession, final Class<?> clazz) { log.debug(clazz.getName() + " purge index ..."); //purge all index content. fullTextSession.purgeAll(clazz); fullTextSession.flushToIndexes(); fullTextSession.getSearchFactory().optimize(clazz); log.debug(clazz.getName() + " starting index ..."); final long startTime = System.currentTimeMillis(); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); final Transaction transaction = fullTextSession.beginTransaction(); //Scrollable results will avoid loading too many objects in memory final ScrollableResults results = fullTextSession.createCriteria(clazz).setFetchSize(100) .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++; fullTextSession.index(results.get(0)); if (index % 100 == 0) { fullTextSession.flushToIndexes(); fullTextSession.flush(); fullTextSession.clear(); } } fullTextSession.flushToIndexes(); fullTextSession.getSearchFactory().optimize(clazz); transaction.commit(); log.debug(clazz.getName() + " Reindex end in " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds."); }
From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManager.java
License:Apache License
/** * Purge & index all entities with the input class and name. * * @param entityClass/*from ww w . ja va 2 s . c om*/ * the type of entities to reindex into search index. * * @param entityName * the name of the entity to reindex * * @param search * the FullTextSession to use */ @SuppressWarnings("unchecked") public void reindexEntities(final Class entityClass, final String entityName, final FullTextSession search) { log.info("reindexEntities(" + entityClass.toString() + ", " + entityName + ")"); // purge first purgeSearchIndex(entityClass, search); log.info("Creating query to find batches of " + entityName); Query q = search.createQuery("FROM " + entityName) // set the result transformer // .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY) <TODO COMMENTED OUT> // minimize cache .setCacheMode(CacheMode.IGNORE) // limit fetch size .setFetchSize(fetchBatchSize); log.info("setting scroll mode to FORWARD_ONLY for " + entityName); ScrollableResults scroll = q.scroll(ScrollMode.FORWARD_ONLY); int batch = 0; while (scroll.next()) { batch++; search.index(scroll.get(0)); if (batch % flushBatchSize == 0) { if (log.isInfoEnabled()) { log.info("Flushing " + entityName + " - " + batch); } // no need to call s.flush() // we don't change anything search.flushToIndexes(); search.clear(); } } log.info("Flushing " + entityName + " - " + batch + " (final)"); search.flushToIndexes(); search.clear(); log.info("Optimizing index for " + entityName); search.getSearchFactory().optimize(entityClass); }
From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManagerTest.java
License:Apache License
/** * Test reindexing models using the class overload. The batch size is set to 10, with 11 records. Make sure the * flushToIndexes is called/*w ww . ja v a 2 s. com*/ * * Look, this is ridiculous, I know. This test is nothing more than a useless whitebox test to get past clover * tests. */ @Test public void testReindexModelsFromClass() { final int fetchSize = 938; final int flushSize = 2; context.checking(new Expectations() { { // purge, flush, optimize, flush first one(fullTextSessionMock).purgeAll(SearchIndexManagerTest.class); one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).getSearchFactory(); will(returnValue(searchFactoryMock)); one(searchFactoryMock).optimize(SearchIndexManagerTest.class); one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).createQuery("FROM SearchIndexManagerTest"); will(returnValue(queryMock)); // one(queryMock).setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); will(returnValue(queryMock)); one(queryMock).setCacheMode(CacheMode.IGNORE); will(returnValue(queryMock)); one(queryMock).setFetchSize(fetchSize); will(returnValue(queryMock)); one(queryMock).scroll(ScrollMode.FORWARD_ONLY); will(returnValue(scrollMock)); // 3 results, batch size of 2 Object entity1 = new Object(); Object entity2 = new Object(); Object entity3 = new Object(); one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity1)); one(fullTextSessionMock).index(entity1); one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity2)); one(fullTextSessionMock).index(entity2); // end of batch - flush one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).clear(); // last one one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity3)); one(fullTextSessionMock).index(entity3); // no more one(scrollMock).next(); will(returnValue(false)); // flush, optimize, flush, clear remaining one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).clear(); one(fullTextSessionMock).getSearchFactory(); will(returnValue(searchFactoryMock)); one(searchFactoryMock).optimize(SearchIndexManagerTest.class); } }); // call the system under test SearchIndexManager indexer = new SearchIndexManager(fetchSize, flushSize); indexer.reindexEntities(SearchIndexManagerTest.class, fullTextSessionMock); // all expectations met? context.assertIsSatisfied(); }
From source file:org.eurekastreams.commons.search.bootstrap.SearchIndexManagerTest.java
License:Apache License
/** * Test reindexing models using the class, entityName overload. * // w w w. ja v a 2 s . c om * Look, this is ridiculous, I know. This test is nothing more than a useless whitebox test to get past clover * tests. */ @Test public void testReindexModelsFromClassAndEntityName() { final int fetchSize = 8; final int flushSize = 2; context.checking(new Expectations() { { // purge, flush, optimize, flush first one(fullTextSessionMock).purgeAll(SearchIndexManagerTest.class); one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).getSearchFactory(); will(returnValue(searchFactoryMock)); one(searchFactoryMock).optimize(SearchIndexManagerTest.class); one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).createQuery("FROM HeyNow"); will(returnValue(queryMock)); // one(queryMock).setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); will(returnValue(queryMock)); one(queryMock).setCacheMode(CacheMode.IGNORE); will(returnValue(queryMock)); one(queryMock).setFetchSize(fetchSize); will(returnValue(queryMock)); one(queryMock).scroll(ScrollMode.FORWARD_ONLY); will(returnValue(scrollMock)); // 3 results, batch size of 2 Object entity1 = new Object(); Object entity2 = new Object(); Object entity3 = new Object(); one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity1)); one(fullTextSessionMock).index(entity1); one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity2)); one(fullTextSessionMock).index(entity2); // end of batch - flush one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).clear(); // last one one(scrollMock).next(); will(returnValue(true)); one(scrollMock).get(0); will(returnValue(entity3)); one(fullTextSessionMock).index(entity3); // no more one(scrollMock).next(); will(returnValue(false)); // flush, optimize, flush, clear batch one(fullTextSessionMock).clear(); one(fullTextSessionMock).flushToIndexes(); one(fullTextSessionMock).getSearchFactory(); will(returnValue(searchFactoryMock)); one(searchFactoryMock).optimize(SearchIndexManagerTest.class); } }); // call the system under test SearchIndexManager indexer = new SearchIndexManager(fetchSize, flushSize); indexer.reindexEntities(SearchIndexManagerTest.class, "HeyNow", fullTextSessionMock); // all expectations met? context.assertIsSatisfied(); }
From source file:org.eurekastreams.server.persistence.mappers.cache.DomainGroupCacheLoader.java
License:Apache License
/** * Query all domain groups, loading them in the cache. *///from ww w. j av a 2 s .c o m private void queryAllDomainGroups() { long start = System.currentTimeMillis(); log.info("Loading up all domain groups with a single query"); Criteria criteria = domainGroupQueryStrategy.getCriteria(getHibernateSession()); // page the data criteria.setFetchSize(FETCH_SIZE); criteria.setCacheMode(CacheMode.IGNORE); ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY); // loop through the results and store in cache long recordCounter = 0; while (scroll.next()) { if (++recordCounter % FETCH_SIZE == 0) { log.info("Loading " + recordCounter + "th domainGroup record, clearing session."); getHibernateSession().clear(); } DomainGroupModelView result = (DomainGroupModelView) scroll.get(0); getCache().set(CacheKeys.GROUP_BY_ID + result.getEntityId(), result); getCache().set(CacheKeys.GROUP_BY_SHORT_NAME + result.getShortName(), result.getEntityId()); } log.info("Completed loading all domain groups in " + (System.currentTimeMillis() - start) + " milliseconds."); }
From source file:org.eurekastreams.server.persistence.mappers.cache.PersonCacheLoader.java
License:Apache License
/** * Query the database for all people, only requesting the fields that we're caching, paging for effeciency. *//*from www. j a va2 s. c o m*/ private void queryAllPeople() { Criteria criteria = personQueryStrategy.getCriteria(getHibernateSession()); // page the data criteria.setFetchSize(FETCH_SIZE); criteria.setCacheMode(CacheMode.IGNORE); ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY); // loop through the results and store in cache long recordCounter = 0; while (scroll.next()) { if (++recordCounter % FETCH_SIZE == 0) { log.info("Loading " + recordCounter + "th person record, clearing session."); getHibernateSession().clear(); } PersonModelView result = (PersonModelView) scroll.get(0); getCache().set(CacheKeys.PERSON_BY_ID + result.getEntityId(), result); getCache().set(CacheKeys.PERSON_BY_ACCOUNT_ID + result.getAccountId(), result.getEntityId()); getCache().set(CacheKeys.PERSON_BY_OPEN_SOCIAL_ID + result.getOpenSocialId(), result.getEntityId()); } }
From source file:org.faster.orm.service.hibernate.with.option.HibernateGetWithOptionService.java
License:Open Source License
@SuppressWarnings("unchecked") @Override/*from www . j a va 2 s .co m*/ public PO get(ID id, QueryOption queryOption) { StopWatch sw = null; if (log.isDebugEnabled()) { log.debug("Getting {}#{} with cache {}...", new Object[] { persistClassName, id, getCacheDisplay(queryOption.isCacheEnabled()) }); sw = new StopWatch(); sw.start(); } PO ret = null; if (id != null) { CacheMode origCacheMode = getSession().getCacheMode(); getSession().setCacheMode(queryOption.isCacheEnabled() ? CacheMode.NORMAL : CacheMode.IGNORE); ret = (PO) getSession().get(persistClass, id); getSession().setCacheMode(origCacheMode); if (ret != null) { postLoad(ret); } } if (log.isDebugEnabled()) { log.debug("{}#{}{} found. ({} ms)", new Object[] { persistClassName, id, ret == null ? " not" : "", sw.getTime() }); } return ret; }