List of usage examples for org.hibernate Session getSessionFactory
SessionFactory getSessionFactory();
From source file:org.eclipse.emf.teneo.hibernate.resource.HibernateResource.java
License:Open Source License
protected String getIdentifierName(EObject eobject, Session hs) { String entityName = hs.getEntityName(eobject); if (entityName == null) { return null; }/*from w ww .ja v a 2s .c o m*/ ClassMetadata entityMetaData = hs.getSessionFactory().getClassMetadata(entityName); if (entityMetaData == null) { return null; } String identifierName = entityMetaData.getIdentifierPropertyName(); return identifierName; }
From source file:org.grails.orm.hibernate.validation.UniqueConstraint.java
License:Apache License
@Override protected void processValidate(final Object target, final Object propertyValue, Errors errors) { if (!unique) { return;/*from w w w . j a va 2 s . c o m*/ } final Object id; try { id = InvokerHelper.invokeMethod(target, "ident", null); } catch (Exception e) { throw new GrailsRuntimeException("Target of [unique] constraints [" + target + "] is not a domain instance. Unique constraint can only be applied to " + "domain classes and not custom user types or embedded instances"); } IHibernateTemplate hibernateTemplate = getHibernateTemplate(target); List<?> results = hibernateTemplate.execute(new Closure<List<?>>(this) { public List<?> call(Object... args) { Session session = (Session) args[0]; boolean shouldValidate = true; Class<?> constraintClass = constraintOwningClass; if (propertyValue != null && DomainClassArtefactHandler.isDomainClass(propertyValue.getClass())) { shouldValidate = session.contains(propertyValue); } if (shouldValidate) { GrailsApplication application = (GrailsApplication) applicationContext .getBean(GrailsApplication.APPLICATION_ID); GrailsDomainClass domainClass = (GrailsDomainClass) application .getArtefact(DomainClassArtefactHandler.TYPE, constraintClass.getName()); if (domainClass != null && !domainClass.isRoot()) { GrailsDomainClassProperty property = domainClass.getPropertyByName(constraintPropertyName); while (property.isInherited() && domainClass != null) { domainClass = (GrailsDomainClass) application.getArtefact( DomainClassArtefactHandler.TYPE, domainClass.getClazz().getSuperclass().getName()); if (domainClass != null) { property = domainClass.getPropertyByName(constraintPropertyName); } } constraintClass = domainClass != null ? domainClass.getClazz() : constraintClass; } Criteria criteria = null; if (domainClass.getPersistentProperty(constraintPropertyName).isOneToOne()) { criteria = session.createCriteria(constraintClass, TARGET_DOMAIN_CLASS_ALIAS); String constraintPropertyAlias = constraintPropertyName + "_"; criteria.createAlias(TARGET_DOMAIN_CLASS_ALIAS + "." + constraintPropertyName, constraintPropertyAlias); GrailsDomainClassProperty property = domainClass.getPropertyByName(constraintPropertyName); ClassMetadata classMetadata = session.getSessionFactory() .getClassMetadata(property.getReferencedPropertyType()); String identifierPropertyName = classMetadata.getIdentifierPropertyName(); BeanWrapper bean = new BeanWrapperImpl(propertyValue); Object identifierPropertyValue = bean.getPropertyValue(identifierPropertyName); criteria.add(Restrictions.eq(constraintPropertyAlias + "." + identifierPropertyName, identifierPropertyValue)); } else { criteria = session.createCriteria(constraintClass) .add(Restrictions.eq(constraintPropertyName, propertyValue)); } if (uniquenessGroup != null) { for (Object anUniquenessGroup : uniquenessGroup) { String uniquenessGroupPropertyName = (String) anUniquenessGroup; Object uniquenessGroupPropertyValue = GrailsClassUtils .getPropertyOrStaticPropertyOrFieldValue(target, uniquenessGroupPropertyName); if (uniquenessGroupPropertyValue != null && DomainClassArtefactHandler .isDomainClass(uniquenessGroupPropertyValue.getClass())) { // We are merely verifying that the object is not transient here shouldValidate = session.contains(uniquenessGroupPropertyValue); } if (shouldValidate) { criteria.add( Restrictions.eq(uniquenessGroupPropertyName, uniquenessGroupPropertyValue)); } else { break; // we aren't validating, so no point continuing } } } if (shouldValidate) { return criteria.list(); } return Collections.EMPTY_LIST; } return Collections.EMPTY_LIST; } }); if (results.isEmpty()) { return; } boolean reject = false; if (id != null) { Object existing = results.get(0); Object existingId = null; try { existingId = InvokerHelper.invokeMethod(existing, "ident", null); } catch (Exception e) { // result is not a domain class } if (!id.equals(existingId)) { reject = true; } } else { reject = true; } if (reject) { Object[] args = { constraintPropertyName, constraintOwningClass, propertyValue }; rejectValue(target, errors, UNIQUE_CONSTRAINT, args, getDefaultMessage(DEFAULT_NOT_UNIQUE_MESSAGE_CODE)); } }
From source file:org.granite.test.tide.hibernate.spring.TestHibernate3TideLazyLoadingJPA.java
License:Open Source License
protected void checkSessionsClosed() { Session session = (Session) entityManager.getDelegate(); Assert.assertEquals("Sessions closed", session.getSessionFactory().getStatistics().getSessionOpenCount(), session.getSessionFactory().getStatistics().getSessionCloseCount()); }
From source file:org.horizontaldb.shard.hibernate.AbstractDaoEnricher.java
License:Apache License
private void logSlcStats(Session session, String tenantId) { if (LOG.isTraceEnabled() && session != null) { Statistics statistics = session.getSessionFactory().getStatistics(); if (statistics != null && statistics.isStatisticsEnabled()) { String[] regions = statistics.getSecondLevelCacheRegionNames(); for (String region : regions) { SecondLevelCacheStatistics stat = statistics.getSecondLevelCacheStatistics(region); LOG.trace(String.format( "secondLevelCacheStatistics.%s.%s=hits[%s], misses[%s], puts[%s], memCount[%s], memSize[%s], diskCount[%s]", tenantId, region, stat.getHitCount(), stat.getMissCount(), stat.getPutCount(), stat.getElementCountInMemory(), stat.getSizeInMemory(), stat.getElementCountOnDisk())); }//from w w w.j a va 2 s. c om } } }
From source file:org.horizontaldb.shard.hibernate.AbstractDaoEnricherTest.java
License:Apache License
@Test public void shouldSetAndUnsetSessionForDao() { AbstractDaoEnricher enricher = new AbstractDaoEnricher(txManager); ShardContext shardContext = new ShardContext("testClient"); SessionFactory mockSessionFactory = EasyMock.createMock(SessionFactory.class); Session mockSession = EasyMock.createMock("mockSession", Session.class); AbstractDao mockDao = EasyMock.createMock("mockDao", AbstractDao.class); // called during statistics logging expect(mockSession.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSessionFactory.getStatistics()).andReturn(null); // called during statistics logging expect(txManager.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession); // setup/*from w w w . j av a 2 s. c o m*/ mockDao.setSession(mockSession); // tearDown mockDao.setSession(null); replay(txManager, mockSessionFactory, mockSession, mockDao); enricher.setup(mockDao, shardContext); enricher.tearDown(mockDao, shardContext); verify(txManager, mockSessionFactory, mockSession, mockDao); }
From source file:org.horizontaldb.shard.hibernate.AbstractDaoEnricherTest.java
License:Apache License
@Test public void shouldSetAndUnsetDistinctSessionsForDaoUsingDistinctContexts() { AbstractDaoEnricher enricher = new AbstractDaoEnricher(txManager); ShardContext shardContext = new ShardContext("testClient"); ShardContext shardContext1 = new ShardContext("testClient1"); ShardContext shardContext2 = new ShardContext("testClient2"); SessionFactory mockSessionFactory = EasyMock.createMock(SessionFactory.class); Session mockSession = EasyMock.createMock("mockSession", Session.class); Session mockSession1 = EasyMock.createMock("mockSession1", Session.class); Session mockSession2 = EasyMock.createMock("mockSession2", Session.class); AbstractDao mockDao = EasyMock.createMock("mockDao", AbstractDao.class); // called during statistics logging expect(mockSession2.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSession1.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSession.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSessionFactory.getStatistics()).andReturn(null).times(3); // called during statistics logging expect(txManager.getSessionFactory()).andReturn(mockSessionFactory).times(3); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession1); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession2); // setups// w ww .ja va 2 s . com mockDao.setSession(mockSession); mockDao.setSession(mockSession1); mockDao.setSession(mockSession2); // tearDowns mockDao.setSession(mockSession1); mockDao.setSession(mockSession); mockDao.setSession(null); replay(txManager, mockSessionFactory, mockSession, mockSession1, mockSession2, mockDao); enricher.setup(mockDao, shardContext); enricher.setup(mockDao, shardContext1); enricher.setup(mockDao, shardContext2); enricher.tearDown(mockDao, shardContext2); enricher.tearDown(mockDao, shardContext1); enricher.tearDown(mockDao, shardContext); verify(txManager, mockSessionFactory, mockSession, mockSession1, mockSession2, mockDao); }
From source file:org.horizontaldb.shard.hibernate.AbstractDaoEnricherTest.java
License:Apache License
@Test public void shouldSetAndUnsetProperSessionsForDistinctDaosUsingDistinctContexts() { AbstractDaoEnricher enricher = new AbstractDaoEnricher(txManager); ShardContext shardContext = new ShardContext("testClient"); ShardContext shardContext1 = new ShardContext("testClient1"); ShardContext shardContext2 = new ShardContext("testClient2"); SessionFactory mockSessionFactory = EasyMock.createMock(SessionFactory.class); Session mockSession = EasyMock.createMock("mockSession", Session.class); Session mockSession1 = EasyMock.createMock("mockSession1", Session.class); Session mockSession2 = EasyMock.createMock("mockSession2", Session.class); AbstractDao mockDao = EasyMock.createMock("mockDao", AbstractDao.class); DepartmentDaoImpl mockDao1 = EasyMock.createMock("mockDao1", DepartmentDaoImpl.class); PersonDaoImpl mockDao2 = EasyMock.createMock("mockDao2", PersonDaoImpl.class); // called during statistics logging expect(mockSession2.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSession1.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSession.getSessionFactory()).andReturn(mockSessionFactory); expect(mockSessionFactory.getStatistics()).andReturn(null).times(3); // called during statistics logging expect(txManager.getSessionFactory()).andReturn(mockSessionFactory).times(3); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession1); expect(mockSessionFactory.getCurrentSession()).andReturn(mockSession2); // setups/* w ww .j a v a 2 s . c o m*/ mockDao.setSession(mockSession); mockDao1.setSession(mockSession); mockDao2.setSession(mockSession1); mockDao1.setSession(mockSession2); // tearDowns mockDao1.setSession(mockSession); mockDao2.setSession(null); mockDao1.setSession(null); mockDao.setSession(null); replay(txManager, mockSessionFactory, mockSession, mockSession1, mockSession2, mockDao, mockDao1, mockDao2); enricher.setup(mockDao, shardContext); enricher.setup(mockDao1, shardContext); enricher.setup(mockDao2, shardContext1); enricher.setup(mockDao1, shardContext2); enricher.tearDown(mockDao1, shardContext2); enricher.tearDown(mockDao2, shardContext1); enricher.tearDown(mockDao1, shardContext); enricher.tearDown(mockDao, shardContext); verify(txManager, mockSessionFactory, mockSession, mockSession1, mockSession2, mockDao, mockDao1, mockDao2); }
From source file:org.infinispan.test.hibernate.cache.commons.tm.JBossStandaloneJtaExampleTest.java
License:LGPL
@Test public void testPersistAndLoadUnderJta() throws Exception { Item item;/*from w w w .ja v a 2s . c o m*/ SessionFactory sessionFactory = buildSessionFactory(); try { UserTransaction ut = (UserTransaction) ctx.lookup("UserTransaction"); ut.begin(); try { Session session = sessionFactory.openSession(); assertEquals(TransactionStatus.ACTIVE, session.getTransaction().getStatus()); item = new Item("anItem", "An item owned by someone"); session.persist(item); // IMO the flush should not be necessary, but session.close() does not flush // and the item is not persisted. session.flush(); session.close(); } catch (Exception e) { ut.setRollbackOnly(); throw e; } finally { if (ut.getStatus() == Status.STATUS_ACTIVE) ut.commit(); else ut.rollback(); } ut = (UserTransaction) ctx.lookup("UserTransaction"); ut.begin(); try { Session session = sessionFactory.openSession(); assertEquals(TransactionStatus.ACTIVE, session.getTransaction().getStatus()); Item found = (Item) session.load(Item.class, item.getId()); Statistics stats = session.getSessionFactory().getStatistics(); log.info(stats.toString()); assertEquals(item.getDescription(), found.getDescription()); assertEquals(0, stats.getSecondLevelCacheMissCount()); assertEquals(1, stats.getSecondLevelCacheHitCount()); session.delete(found); // IMO the flush should not be necessary, but session.close() does not flush // and the item is not deleted. session.flush(); session.close(); } catch (Exception e) { ut.setRollbackOnly(); throw e; } finally { if (ut.getStatus() == Status.STATUS_ACTIVE) ut.commit(); else ut.rollback(); } ut = (UserTransaction) ctx.lookup("UserTransaction"); ut.begin(); try { Session session = sessionFactory.openSession(); assertEquals(TransactionStatus.ACTIVE, session.getTransaction().getStatus()); assertNull(session.get(Item.class, item.getId())); session.close(); } catch (Exception e) { ut.setRollbackOnly(); throw e; } finally { if (ut.getStatus() == Status.STATUS_ACTIVE) ut.commit(); else ut.rollback(); } } finally { if (sessionFactory != null) sessionFactory.close(); } }
From source file:org.infoglue.calendar.actions.ViewApplicationStateAction.java
License:Open Source License
/** * This action allows clearing of the given cache manually. *///from ww w. j a va 2s .c om public String doClearAllCaches() throws Exception { CacheController.clearCaches(); Session session = this.getSession(); session.getSessionFactory().evictQueries(); session.getSessionFactory().evict(org.infoglue.calendar.entities.Calendar.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Category.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Event.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.EventType.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.EventTypeCategoryAttribute.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.EventCategory.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Location.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Participant.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Resource.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Entry.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Role.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Group.class); session.getSessionFactory().evict(org.infoglue.calendar.entities.Subscriber.class); return "cleared"; }
From source file:org.jasig.portal.events.aggr.PortalRawEventsAggregatorImpl.java
License:Apache License
private EventProcessingResult doAggregateRawEventsInternal() { if (!this.clusterLockService.isLockOwner(AGGREGATION_LOCK_NAME)) { throw new IllegalStateException("The cluster lock " + AGGREGATION_LOCK_NAME + " must be owned by the current thread and server"); }/*from www.j av a2s . c om*/ if (!this.portalEventDimensionPopulator.isCheckedDimensions()) { //First time aggregation has happened, run populateDimensions to ensure enough dimension data exists final boolean populatedDimensions = this.portalEventAggregationManager.populateDimensions(); if (!populatedDimensions) { this.logger.warn( "Aborting raw event aggregation, populateDimensions returned false so the state of date/time dimensions is unknown"); return null; } } //Flush any dimension creation before aggregation final EntityManager entityManager = this.getEntityManager(); entityManager.flush(); entityManager.setFlushMode(FlushModeType.COMMIT); final IEventAggregatorStatus eventAggregatorStatus = eventAggregationManagementDao .getEventAggregatorStatus(ProcessingType.AGGREGATION, true); //Update status with current server name final String serverName = this.portalInfoProvider.getUniqueServerName(); final String previousServerName = eventAggregatorStatus.getServerName(); if (previousServerName != null && !serverName.equals(previousServerName)) { this.logger.debug("Last aggregation run on {} clearing all aggregation caches", previousServerName); final Session session = getEntityManager().unwrap(Session.class); final Cache cache = session.getSessionFactory().getCache(); cache.evictEntityRegions(); } eventAggregatorStatus.setServerName(serverName); //Calculate date range for aggregation DateTime lastAggregated = eventAggregatorStatus.getLastEventDate(); if (lastAggregated == null) { lastAggregated = portalEventDao.getOldestPortalEventTimestamp(); //No portal events to aggregate, skip aggregation if (lastAggregated == null) { return new EventProcessingResult(0, null, null, true); } //First time aggregation has run, initialize the CLEAN_UNCLOSED status to save catch-up time final IEventAggregatorStatus cleanUnclosedStatus = eventAggregationManagementDao .getEventAggregatorStatus(ProcessingType.CLEAN_UNCLOSED, true); AggregationIntervalInfo oldestMinuteInterval = this.intervalHelper .getIntervalInfo(AggregationInterval.MINUTE, lastAggregated); cleanUnclosedStatus.setLastEventDate(oldestMinuteInterval.getStart().minusMinutes(1)); eventAggregationManagementDao.updateEventAggregatorStatus(cleanUnclosedStatus); } final DateTime newestEventTime = DateTime.now().minus(this.aggregationDelay).secondOfMinute() .roundFloorCopy(); final Thread currentThread = Thread.currentThread(); final String currentName = currentThread.getName(); final MutableInt events = new MutableInt(); final MutableObject lastEventDate = new MutableObject(newestEventTime); boolean complete; try { currentThread.setName(currentName + "-" + lastAggregated + "_" + newestEventTime); logger.debug("Starting aggregation of events between {} (inc) and {} (exc)", lastAggregated, newestEventTime); //Do aggregation, capturing the start and end dates eventAggregatorStatus.setLastStart(DateTime.now()); complete = portalEventDao.aggregatePortalEvents(lastAggregated, newestEventTime, this.eventAggregationBatchSize, new AggregateEventsHandler(events, lastEventDate, eventAggregatorStatus)); eventAggregatorStatus.setLastEventDate((DateTime) lastEventDate.getValue()); eventAggregatorStatus.setLastEnd(DateTime.now()); } finally { currentThread.setName(currentName); } //Store the results of the aggregation eventAggregationManagementDao.updateEventAggregatorStatus(eventAggregatorStatus); complete = complete && (this.eventAggregationBatchSize <= 0 || events.intValue() < this.eventAggregationBatchSize); return new EventProcessingResult(events.intValue(), lastAggregated, eventAggregatorStatus.getLastEventDate(), complete); }