List of usage examples for org.hibernate Session getFlushMode
@Override FlushModeType getFlushMode();
From source file:au.com.nicta.ct.db.CtManualFlush.java
License:Open Source License
public CtManualFlush(Session s) { this.s = s; fm = s.getFlushMode(); s.setFlushMode(FlushMode.MANUAL); }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
@SuppressWarnings("rawtypes") @Override//from w w w.ja va2s. c om public void delete(Expression userQuery) { Session session = this.getCurrentSession(); try { storageClassLoader.bind(Thread.currentThread()); // Session session = factory.getCurrentSession(); userQuery = userQuery.normalize(); // First do a normalize for correct optimization detection. // Check if optimized delete for one type (and no filter) is applicable if (userQuery instanceof Select) { Select select = (Select) userQuery; List<ComplexTypeMetadata> types = select.getTypes(); if (types.size() == 1 && select.getCondition() == null) { FlushMode previousFlushMode = session.getFlushMode(); try { session.setFlushMode(FlushMode.ALWAYS); // Force Hibernate to actually send SQL query to // database during delete. ComplexTypeMetadata mainType = types.get(0); TypeMapping mapping = mappingRepository.getMappingFromUser(mainType); // Compute (and eventually sort) types to delete List<ComplexTypeMetadata> typesToDelete; MetadataRepository internalRepository = typeMappingRepository.getInternalRepository(); if (mapping instanceof ScatteredTypeMapping) { MetadataVisitor<List<ComplexTypeMetadata>> transitiveClosure = new TypeTransitiveClosure(); List<ComplexTypeMetadata> typeClosure = mapping.getDatabase().accept(transitiveClosure); typesToDelete = MetadataUtils.sortTypes(internalRepository, typeClosure); } else { Collection<ComplexTypeMetadata> subTypes = mapping.getDatabase().getSubTypes(); if (subTypes.isEmpty()) { typesToDelete = Collections.singletonList(mapping.getDatabase()); } else { typesToDelete = new ArrayList<ComplexTypeMetadata>(subTypes.size() + 1); typesToDelete.add(mapping.getDatabase()); typesToDelete.addAll(subTypes); } } Map<ComplexTypeMetadata, Map<String, List>> recordsToDeleteMap = new HashMap<ComplexTypeMetadata, Map<String, List>>(); for (ComplexTypeMetadata typeToDelete : typesToDelete) { InboundReferences inboundReferences = new InboundReferences(typeToDelete); Set<ReferenceFieldMetadata> references = internalRepository.accept(inboundReferences); // Empty values from intermediate tables to this non instantiable type and unset inbound // references if (typeToDelete.equals(mainType)) { for (ReferenceFieldMetadata reference : references) { if (reference.isMany()) { // No need to check for mandatory collections of references since constraint // cannot be expressed in db schema String formattedTableName = tableResolver.getCollectionTable(reference); session.createSQLQuery("delete from " + formattedTableName).executeUpdate(); //$NON-NLS-1$ } else { String referenceTableName = tableResolver .get(reference.getContainingType()); if (referenceTableName.startsWith("X_ANONYMOUS")) { //$NON-NLS-1$ session.createSQLQuery("delete from " + referenceTableName) //$NON-NLS-1$ .executeUpdate(); } } } } else { for (ReferenceFieldMetadata reference : references) { if (reference.getContainingType().equals(mainType)) { HashMap<String, List> fieldsCondition = new HashMap<>(); if (reference.isMany()) { // No need to check for mandatory collections of references since constraint // cannot // be expressed in db schema String formattedTableName = tableResolver.getCollectionTable(reference); session.createSQLQuery("delete from " + formattedTableName) //$NON-NLS-1$ .executeUpdate(); } else { String referenceTableName = tableResolver .get(reference.getContainingType()); if (reference.getReferencedField() instanceof CompoundFieldMetadata) { FieldMetadata[] fields = ((CompoundFieldMetadata) reference .getReferencedField()).getFields(); for (FieldMetadata field : fields) { List list = session.createSQLQuery("select " //$NON-NLS-1$ + tableResolver.get(field, reference.getName()) + " from " //$NON-NLS-1$ + referenceTableName).list(); if (list == null || list.isEmpty()) { continue; } else { fieldsCondition.put( tableResolver.get(reference.getReferencedField()), list); } } } else { List list = session.createSQLQuery("select " //$NON-NLS-1$ + tableResolver.get(reference.getReferencedField(), reference.getName()) + " from " + referenceTableName).list(); //$NON-NLS-1$ if (list == null || list.isEmpty()) { continue; } else { fieldsCondition.put( tableResolver.get(reference.getReferencedField()), list); } } recordsToDeleteMap.put(typeToDelete, fieldsCondition); } } } } } deleteData(mapping.getDatabase(), new HashMap<String, List>(), mapping); for (Map.Entry<ComplexTypeMetadata, Map<String, List>> entry : recordsToDeleteMap .entrySet()) { // Empty values in type isMany=true reference deleteData(entry.getKey(), entry.getValue(), mapping); } } finally { session.setFlushMode(previousFlushMode); } return; } } // Generic fall back for deletions (filter) if (userQuery instanceof Select) { ((Select) userQuery).setForUpdate(true); } Iterable<DataRecord> records = internalFetch(session, userQuery, Collections.<ResultsCallback>emptySet()); for (DataRecord currentDataRecord : records) { ComplexTypeMetadata currentType = currentDataRecord.getType(); List<ComplexTypeMetadata> types = new ArrayList<>(); if (userQuery instanceof Select) { types.addAll(((Select) userQuery).getTypes()); } if (types.isEmpty() || types.contains(currentType)) { TypeMapping mapping = mappingRepository.getMappingFromUser(currentType); if (mapping == null) { throw new IllegalArgumentException( "Type '" + currentType.getName() + "' does not have a database mapping."); //$NON-NLS-1$ //$NON-NLS-2$ } Class<?> clazz = storageClassLoader.getClassFromType(mapping.getDatabase()); Serializable idValue; Collection<FieldMetadata> keyFields = currentType.getKeyFields(); if (keyFields.size() == 1) { idValue = (Serializable) currentDataRecord.get(keyFields.iterator().next()); } else { List<Object> compositeIdValues = new LinkedList<Object>(); for (FieldMetadata keyField : keyFields) { compositeIdValues.add(currentDataRecord.get(keyField)); } idValue = ObjectDataRecordConverter.createCompositeId(storageClassLoader, clazz, compositeIdValues); } Wrapper object = (Wrapper) session.get(clazz, idValue, LockOptions.READ); if (object != null) { session.delete(object); } else { LOGGER.warn("Instance of type '" + currentType.getName() + "' and ID '" + idValue.toString() //$NON-NLS-1$ //$NON-NLS-2$ + "' has already been deleted within same transaction."); //$NON-NLS-1$ } } } } catch (ConstraintViolationException e) { throw new com.amalto.core.storage.exception.ConstraintViolationException(e); } catch (HibernateException e) { throw new RuntimeException(e); } finally { this.releaseSession(); storageClassLoader.unbind(Thread.currentThread()); } }
From source file:com.booleanworks.kryptopterus.application.MainHibernateUtil.java
License:Apache License
public Object saveOrUpdate(Object object, Session session) { System.out.println("com.booleanworks.kryptopterus.application.MainHibernateUtil.saveOrUpdate()"); System.out.println("session => " + session.hashCode()); Object result = null;/*from w w w .ja va2s . c o m*/ if (session == null || !session.isConnected() || !session.isOpen()) { session = this.getResidentSession(); } if (session.isJoinedToTransaction()) { session.saveOrUpdate(object); if (!(session.getTransaction().getStatus() == TransactionStatus.MARKED_ROLLBACK)) { session.flush(); } result = session.get(object.getClass(), session.getIdentifier(object)); } else { Transaction transaction = this.beginTransaction(session, false); session.saveOrUpdate(object); if (!transaction.getRollbackOnly() && session.getFlushMode() != FlushModeType.AUTO) { session.flush(); } result = session.get(object.getClass(), session.getIdentifier(object)); this.commitTransaction(session, transaction); } return result; }
From source file:com.byteslounge.spring.tx.MyOwnTxManager.java
License:Apache License
@Override protected void doBegin(Object transaction, TransactionDefinition definition) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; if (txObject.hasConnectionHolder() && !txObject.getConnectionHolder().isSynchronizedWithTransaction()) { throw new IllegalTransactionStateException( "Pre-bound JDBC Connection found! HibernateTransactionManager does not support " + "running within DataSourceTransactionManager if told to manage the DataSource itself. " + "It is recommended to use a single HibernateTransactionManager for all transactions " + "on a single DataSource, no matter whether Hibernate or JDBC access."); }/* w w w . ja v a 2 s. co m*/ Session session = null; try { if (txObject.getSessionHolder() == null || txObject.getSessionHolder().isSynchronizedWithTransaction()) { Session newSession = getSessionFactory().openSession(); if (logger.isDebugEnabled()) { logger.debug("Opened new Session [" + newSession + "] for Hibernate transaction"); } txObject.setSession(newSession); } session = txObject.getSessionHolder().getSession(); if (this.prepareConnection && isSameConnectionForEntireSession(session)) { // We're allowed to change the transaction settings of the JDBC Connection. if (logger.isDebugEnabled()) { logger.debug("Preparing JDBC Connection of Hibernate Session [" + session + "]"); } Connection con = ((SessionImplementor) session).connection(); if (con.isClosed()) { System.out.println("Connection closed by exception"); } Integer previousIsolationLevel = DataSourceUtils.prepareConnectionForTransaction(con, definition); txObject.setPreviousIsolationLevel(previousIsolationLevel); } else { // Not allowed to change the transaction settings of the JDBC Connection. if (definition.getIsolationLevel() != TransactionDefinition.ISOLATION_DEFAULT) { // We should set a specific isolation level but are not allowed to... throw new InvalidIsolationLevelException( "HibernateTransactionManager is not allowed to support custom isolation levels: " + "make sure that its 'prepareConnection' flag is on (the default) and that the " + "Hibernate connection release mode is set to 'on_close' (SpringTransactionFactory's default)."); } if (logger.isDebugEnabled()) { logger.debug("Not preparing JDBC Connection of Hibernate Session [" + session + "]"); } } if (definition.isReadOnly() && txObject.isNewSession()) { // Just set to NEVER in case of a new Session for this transaction. session.setFlushMode(FlushMode.MANUAL); } if (!definition.isReadOnly() && !txObject.isNewSession()) { // We need AUTO or COMMIT for a non-read-only transaction. FlushMode flushMode = session.getFlushMode(); if (FlushMode.isManualFlushMode(session.getFlushMode())) { session.setFlushMode(FlushMode.AUTO); txObject.getSessionHolder().setPreviousFlushMode(flushMode); } } Transaction hibTx; // Register transaction timeout. int timeout = determineTimeout(definition); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { // Use Hibernate's own transaction timeout mechanism on Hibernate 3.1+ // Applies to all statements, also to inserts, updates and deletes! hibTx = session.getTransaction(); hibTx.setTimeout(timeout); hibTx.begin(); } else { // Open a plain Hibernate transaction without specified timeout. hibTx = session.beginTransaction(); } // Add the Hibernate transaction to the session holder. txObject.getSessionHolder().setTransaction(hibTx); // Register the Hibernate Session's JDBC Connection for the DataSource, if set. if (getDataSource() != null) { Connection con = ((SessionImplementor) session).connection(); ConnectionHolder conHolder = new ConnectionHolder(con); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { conHolder.setTimeoutInSeconds(timeout); } if (logger.isDebugEnabled()) { logger.debug("Exposing Hibernate transaction as JDBC transaction [" + con + "]"); } TransactionSynchronizationManager.bindResource(getDataSource(), conHolder); txObject.setConnectionHolder(conHolder); } // Bind the session holder to the thread. if (txObject.isNewSessionHolder()) { TransactionSynchronizationManager.bindResource(getSessionFactory(), txObject.getSessionHolder()); } txObject.getSessionHolder().setSynchronizedWithTransaction(true); } catch (Exception ex) { if (txObject.isNewSession()) { try { if (session.getTransaction().isActive()) { session.getTransaction().rollback(); } } catch (Throwable ex2) { logger.debug("Could not rollback Session after failed transaction begin", ex); } finally { SessionFactoryUtils.closeSession(session); } } throw new CannotCreateTransactionException("Could not open Hibernate Session for transaction", ex); } }
From source file:com.googlecode.hibernate.audit.synchronization.AuditSynchronization.java
License:Open Source License
private void executeInSession(Session session) { if (log.isDebugEnabled()) { log.debug("executeInSession begin"); }/*from w w w .j a v a 2 s. c o m*/ try { AuditWorkUnit workUnit; SortedSet<AuditLogicalGroup> auditLogicalGroups = new TreeSet<AuditLogicalGroup>( new Comparator<AuditLogicalGroup>() { // sort audit logical groups in order to minimize // database dead lock conditions. public int compare(AuditLogicalGroup o1, AuditLogicalGroup o2) { // note that both entities should already be // persistent so they must have ids return o1.getId().compareTo(o2.getId()); }; }); AuditTransaction auditTransaction = new AuditTransaction(); auditTransaction.setTimestamp(new Date()); Principal principal = auditConfiguration.getExtensionManager().getSecurityInformationProvider() .getPrincipal(); auditTransaction.setUsername(principal == null ? null : principal.getName()); if (log.isDebugEnabled()) { log.debug("start workUnits perform"); } while ((workUnit = workUnits.poll()) != null) { workUnit.perform(session, auditConfiguration, auditTransaction); auditLogicalGroups.addAll(workUnit.getAuditLogicalGroups()); } if (log.isDebugEnabled()) { log.debug("end workUnits perform"); } List<AuditTransactionAttribute> attributes = auditConfiguration.getExtensionManager() .getAuditTransactionAttributeProvider().getAuditTransactionAttributes(session); if (attributes != null && !attributes.isEmpty()) { for (AuditTransactionAttribute attribute : attributes) { attribute.setAuditTransaction(auditTransaction); } auditTransaction.getAuditTransactionAttributes().addAll(attributes); } concurrencyModificationCheck(session, auditLogicalGroups, auditTransaction); session.save(auditTransaction); for (AuditLogicalGroup storedAuditLogicalGroup : auditLogicalGroups) { storedAuditLogicalGroup.setLastUpdatedAuditTransactionId(auditTransaction.getId()); } if (!FlushMode.isManualFlushMode(session.getFlushMode())) { session.flush(); } } finally { if (log.isDebugEnabled()) { log.debug("executeInSession end"); } } }
From source file:com.vmware.bdd.dal.DAL.java
License:Open Source License
/** * Helper routine for wrapping a piece of code in a Hibernate transaction. * * @param obj -- the body of the transaction. * @param readOnly -- true if the writes are to be disallowed * @param retriesLeft -- the max number of times to retry on lock-acquisition exceptions. * 0 if retries are to be disallowed./*from w w w . j a v a2 s . c o m*/ **/ @SuppressWarnings("deprecation") private static <T> T inTransactionDoWork(Saveable<T> obj, boolean readOnly, int retriesLeft) { T retval; while (true) { Session sn = getSession(); Transaction tx = null; FlushMode flushMode = null; boolean doRndRollback = ConfigInfo.isDebugEnabled() && stressTxnRollback && (rnd.nextInt() % 5) == 0; AuAssert.check(!isInTransaction()); // Disallow nesting for now. try { tx = sn.beginTransaction(); if (readOnly && tx != null) { flushMode = sn.getFlushMode(); sn.setFlushMode(FlushMode.MANUAL); } sn.connection().setReadOnly(readOnly); retval = obj.body(); if (doRndRollback) { logger.warn("randomly rollback the transaction"); throw new LockAcquisitionException("Random Rollback", new SQLException("Random Rollback")); } if (flushMode != null) { sn.setFlushMode(flushMode); } tx.commit(); break; // must come right after commit } catch (Throwable ex) { if (tx != null) { if (flushMode != null) { sn.setFlushMode(flushMode); } tx.rollback(); flushTransactionCallbacks(false); } // Strip off the BddException wrapper if a callee added it. Throwable realEx = (ex instanceof BddException) ? ex.getCause() : ex; if (isRetryable(realEx)) { if (retriesLeft > 0) { if (!doRndRollback) { retriesLeft--; reportRetry(retriesLeft, realEx); } } else { throw TxRetryException.wrap(realEx, doRndRollback); } } else if (isUniqViolation(realEx)) { throw UniqueConstraintViolationException.wrap((ConstraintViolationException) realEx); } else { throw BddException.wrapIfNeeded(ex, "Exception in a DAL transaction"); } } } flushTransactionCallbacks(true); return retval; }
From source file:com.wideplay.warp.persist.hibernate.ReadOnlyTransactionsTest.java
License:Apache License
@Test public void testReadOnlyTxRestoresSessionFlushMode() { final ReadOnlyTransactionalObject txnal = injector.getInstance(ReadOnlyTransactionalObject.class); Session session = txnal.runReadOnlyTxnAndReturnSession(); // because the session gets closed in UnitOfWork.TRANSACTION, // we do NOT reset the flushmode in the interceptor Assert.assertTrue(session.getFlushMode() == FlushMode.MANUAL, "FlushMode has been reset with UnitOfWork.TRANSACTION and read-only transactions, " + "this means the session was not closed!"); }
From source file:gov.nih.nci.caarray.validation.UniqueConstraintValidator.java
License:BSD License
/** * {@inheritDoc}/*from www . j av a 2 s. co m*/ */ @SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.ExcessiveMethodLength" }) public boolean isValid(final Object o) { UnfilteredCallback unfilteredCallback = new UnfilteredCallback() { public Object doUnfiltered(Session s) { FlushMode fm = s.getFlushMode(); try { s.setFlushMode(FlushMode.MANUAL); Class<?> classWithConstraint = findClassDeclaringConstraint( hibernateHelper.unwrapProxy(o).getClass()); Criteria crit = s.createCriteria(classWithConstraint); ClassMetadata metadata = hibernateHelper.getSessionFactory() .getClassMetadata(classWithConstraint); for (UniqueConstraintField field : uniqueConstraint.fields()) { Object fieldVal = metadata.getPropertyValue(o, field.name(), EntityMode.POJO); if (fieldVal == null) { if (field.nullsEqual()) { // nulls are equal, so add it to to criteria crit.add(Restrictions.isNull(field.name())); } else { // nulls are never equal, so uniqueness is automatically satisfied return true; } } else { // special casing for entity-type properties - only include them in the criteria if they are // already // persistent // otherwise, short-circuit the process and return true immediately since if the // entity-type property // is not persistent then it will be a new value and thus different from any currently in // the db, thus satisfying uniqueness ClassMetadata fieldMetadata = hibernateHelper.getSessionFactory() .getClassMetadata(hibernateHelper.unwrapProxy(fieldVal).getClass()); if (fieldMetadata == null || fieldMetadata.getIdentifier(fieldVal, EntityMode.POJO) != null) { crit.add(Restrictions.eq(field.name(), ReflectHelper.getGetter(o.getClass(), field.name()).get(o))); } else { return true; } } } // if object is already persistent, then add condition to exclude it matching itself Object id = metadata.getIdentifier(o, EntityMode.POJO); if (id != null) { crit.add(Restrictions.ne(metadata.getIdentifierPropertyName(), id)); } int numMatches = crit.list().size(); return numMatches == 0; } finally { s.setFlushMode(fm); } } }; return (Boolean) hibernateHelper.doUnfiltered(unfilteredCallback); }
From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java
License:Open Source License
@SuppressWarnings("unchecked") public List<WorkflowInstance> cancelWorkflows(final List<String> workflowIds) { return (List<WorkflowInstance>) jbpmTemplate.execute(new JbpmCallback() { public Object doInJbpm(JbpmContext context) { // Bypass the cache making sure not to flush it Session session = context.getSession(); CacheMode cacheMode = session.getCacheMode(); FlushMode flushMode = session.getFlushMode(); session.setCacheMode(CacheMode.GET); session.setFlushMode(FlushMode.MANUAL); try { List<WorkflowInstance> workflowInstances = new ArrayList<WorkflowInstance>(workflowIds.size()); Map<String, ProcessInstance> processInstances = new HashMap<String, ProcessInstance>( workflowIds.size() * 2); GraphSession graphSession = context.getGraphSession(); // retrieve and cancel process instances for (String workflowId : workflowIds) { try { ProcessInstance processInstance = getProcessInstance(graphSession, workflowId); processInstance.getContextInstance().setVariable("cancelled", true); processInstance.end(); processInstances.put(workflowId, processInstance); } catch (JbpmException e) { String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId); throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e)); }//from w w w .j av a 2 s . c o m } // Flush at the end of the batch session.flush(); for (String workflowId : workflowIds) { try { // retrieve process instance ProcessInstance processInstance = processInstances.get(workflowId); // TODO: Determine if this is the most appropriate way to cancel workflow... // It might be useful to record point at which it was cancelled etc try { workflowInstances.add(createWorkflowInstance(processInstance)); } catch (Exception ex) { logger.warn("Unable to load workflow instance: '" + processInstance + "' due to exception.", ex); } // delete the process instance graphSession.deleteProcessInstance(processInstance, true, true); } catch (JbpmException e) { String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId); throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e)); } } // Flush at the end of the batch session.flush(); return workflowInstances; } finally { session.setCacheMode(cacheMode); session.setFlushMode(flushMode); } } }); }
From source file:org.beangle.commons.orm.hibernate.HibernateTransactionManager.java
License:Open Source License
@Override protected void doBegin(Object transaction, TransactionDefinition definition) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; if (txObject.hasConnectionHolder() && !txObject.getConnectionHolder().isSynchronizedWithTransaction()) { throw new IllegalTransactionStateException( "Pre-bound JDBC Connection found! HibernateTransactionManager does not support " + "running within DataSourceTransactionManager if told to manage the DataSource itself. " + "It is recommended to use a single HibernateTransactionManager for all transactions " + "on a single DataSource, no matter whether Hibernate or JDBC access."); }//from w w w.j a va 2 s. c o m Session session = null; try { if (txObject.getSessionHolder() == null || txObject.getSessionHolder().isSynchronizedWithTransaction()) { Interceptor entityInterceptor = getEntityInterceptor(); Session newSession = (entityInterceptor != null ? getSessionFactory().openSession(entityInterceptor) : getSessionFactory().openSession()); if (logger.isDebugEnabled()) { logger.debug("Opened new Session [" + SessionUtils.toString(newSession) + "] for Hibernate transaction"); } txObject.setSession(newSession); } session = txObject.getSessionHolder().getSession(); if (this.prepareConnection && isSameConnectionForEntireSession(session)) { // We're allowed to change the transaction settings of the JDBC Connection. if (logger.isDebugEnabled()) { logger.debug("Preparing JDBC Connection of Hibernate Session [" + SessionUtils.toString(session) + "]"); } @SuppressWarnings("deprecation") Connection con = session.connection(); Integer previousIsolationLevel = DataSourceUtils.prepareConnectionForTransaction(con, definition); txObject.setPreviousIsolationLevel(previousIsolationLevel); } else { // Not allowed to change the transaction settings of the JDBC Connection. if (definition.getIsolationLevel() != TransactionDefinition.ISOLATION_DEFAULT) { // We should set a specific isolation level but are not allowed to... throw new InvalidIsolationLevelException( "HibernateTransactionManager is not allowed to support custom isolation levels: " + "make sure that its 'prepareConnection' flag is on (the default) and that the " + "Hibernate connection release mode is set to 'on_close' (BeangleTransactionFactory's default). " + "Make sure that your SessionFactoryBean actually uses BeangleTransactionFactory: Your " + "Hibernate properties should *not* include a 'hibernate.transaction.factory_class' property!"); } if (logger.isDebugEnabled()) { logger.debug("Not preparing JDBC Connection of Hibernate Session [" + SessionUtils.toString(session) + "]"); } } if (definition.isReadOnly() && txObject.isNewSession()) { // Just set to NEVER in case of a new Session for this transaction. session.setFlushMode(FlushMode.MANUAL); } if (!definition.isReadOnly() && !txObject.isNewSession()) { // We need AUTO or COMMIT for a non-read-only transaction. FlushMode flushMode = session.getFlushMode(); if (flushMode.lessThan(FlushMode.COMMIT)) { session.setFlushMode(FlushMode.AUTO); txObject.getSessionHolder().setPreviousFlushMode(flushMode); } } Transaction hibTx; // Register transaction timeout. int timeout = determineTimeout(definition); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { // Use Hibernate's own transaction timeout mechanism on Hibernate 3.1+ // Applies to all statements, also to inserts, updates and deletes! hibTx = session.getTransaction(); hibTx.setTimeout(timeout); hibTx.begin(); } else { // Open a plain Hibernate transaction without specified timeout. hibTx = session.beginTransaction(); } // Add the Hibernate transaction to the session holder. txObject.getSessionHolder().setTransaction(hibTx); // Register the Hibernate Session's JDBC Connection for the DataSource, if set. if (getDataSource() != null) { @SuppressWarnings("deprecation") Connection con = session.connection(); ConnectionHolder conHolder = new ConnectionHolder(con); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { conHolder.setTimeoutInSeconds(timeout); } if (logger.isDebugEnabled()) { logger.debug("Exposing Hibernate transaction as JDBC transaction [" + con + "]"); } TransactionSynchronizationManager.bindResource(getDataSource(), conHolder); txObject.setConnectionHolder(conHolder); } // Bind the session holder to the thread. if (txObject.isNewSessionHolder()) { TransactionSynchronizationManager.bindResource(getSessionFactory(), txObject.getSessionHolder()); } txObject.getSessionHolder().setSynchronizedWithTransaction(true); } catch (Exception ex) { if (txObject.isNewSession()) { try { if (session.getTransaction().isActive()) { session.getTransaction().rollback(); } } catch (Throwable ex2) { logger.debug("Could not rollback Session after failed transaction begin", ex); } finally { SessionUtils.closeSession(session); } } throw new CannotCreateTransactionException("Could not open Hibernate Session for transaction", ex); } }