List of usage examples for org.hibernate Session setDefaultReadOnly
void setDefaultReadOnly(boolean readOnly);
From source file:nl.strohalm.cyclos.struts.CyclosRequestProcessor.java
License:Open Source License
private void openReadOnlyConnection(final HttpServletRequest request) { if (noTransaction(request)) { return;/* www. j av a 2 s . c o m*/ } logDebug(request, "Opening read-only transaction for include"); final Connection connection = (Connection) TransactionSynchronizationManager .getResource(connectionProvider); final SessionHolder holder = (SessionHolder) TransactionSynchronizationManager.getResource(sessionFactory); final Session session = holder.getSession(); session.setFlushMode(FlushMode.MANUAL); session.setDefaultReadOnly(true); session.reconnect(connection); TransactionSynchronizationManager.setCurrentTransactionReadOnly(true); }
From source file:org.cobbzilla.wizard.dao.AbstractDAO.java
public Session readOnlySession() { final Session session = getHibernateTemplate().getSessionFactory().openSession(); session.setDefaultReadOnly(true); return session; }
From source file:org.eclipse.emf.cdo.server.internal.hibernate.HibernateStoreAccessor.java
License:Open Source License
/** * Performs the main write and update actions. Persists new EPackages, updates changed objects, creates new ones and * removes deleted objects. Updates both container as well as resource associations. * * @param context//from w w w. j a v a 2 s .c om * the context contains the changed, new and to-be-removed objects * @param monitor * not used by this method */ @Override public void doWrite(InternalCommitContext context, OMMonitor monitor) { // NOTE: the same flow is also present in the super class (StoreAccessor) // changes in flow can mean that the flow here also has to change monitor.begin(3); HibernateThreadContext.setCommitContext(context); if (context.getNewPackageUnits().length > 0) { writePackageUnits(context.getNewPackageUnits(), monitor.fork()); } // Note: instead of an Async here, we could do much more fine-grained monitoring below. But this // simplistic solution is sufficient to prevent timeout errors. final Async async = monitor.forkAsync(); HibernateThreadContext.getCommitContext().setInDoWrite(true); try { // start with fresh hibernate session to prevent side effects final Session session = context instanceof HibernateRawCommitContext ? getHibernateSession() : getNewHibernateSession(false); session.setDefaultReadOnly(false); // decrement version, hibernate will increment it decrementVersions(context); // order is 1) insert, 2) update and then delete // this order is the most stable! Do not change it without testing // System.err.println(getStore().getMappingXml()); final List<InternalCDORevision> repairContainerIDs = new ArrayList<InternalCDORevision>(); final List<InternalCDORevision> repairResourceIDs = new ArrayList<InternalCDORevision>(); for (InternalCDORevision revision : context.getNewObjects()) { revision.setListPreserving(); // keep track for which cdoRevisions the container id needs to be repaired afterwards final CDOID containerID = (CDOID) revision.getContainerID(); if (containerID instanceof CDOIDTemp && !containerID.isNull()) { repairContainerIDs.add(revision); } final CDOID resourceID = revision.getResourceID(); if (resourceID instanceof CDOIDTemp && !resourceID.isNull()) { repairResourceIDs.add(revision); } final String entityName = getStore().getEntityName(revision.getEClass()); session.saveOrUpdate(entityName, revision); } // now apply all the changes if (context.getDirtyObjectDeltas() != null) { for (InternalCDORevisionDelta delta : context.getDirtyObjectDeltas()) { final String entityName = HibernateUtil.getInstance().getEntityName(delta.getID()); final Serializable idValue = HibernateUtil.getInstance().getIdValue(delta.getID()); final InternalCDORevision cdoRevision = (InternalCDORevision) session.get(entityName, idValue); cdoRevision.setListPreserving(); delta.applyTo(cdoRevision); } } // preserve old behavior for the hibernate raw commit if (context instanceof HibernateRawCommitContext) { // now check the versions and store the hibernate revision to repair // versions later on. The versions can be updated when inserting new objects // this will result in a version difference when the object gets merged // this repair is done just before the merge final Map<CDOID, InternalCDORevision> existingRevisions = CDOIDUtil.createMap(); for (InternalCDORevision revision : context.getDirtyObjects()) { final String entityName = HibernateUtil.getInstance().getEntityName(revision.getID()); final Serializable idValue = HibernateUtil.getInstance().getIdValue(revision.getID()); final InternalCDORevision cdoRevision = (InternalCDORevision) session.get(entityName, idValue); if (cdoRevision != null) { if (cdoRevision.getVersion() != revision.getVersion()) { throw new IllegalStateException( "Revision " + cdoRevision + " was already updated by another transaction"); } existingRevisions.put(revision.getID(), cdoRevision); } } for (InternalCDORevision revision : context.getDirtyObjects()) { final String entityName = HibernateUtil.getInstance().getEntityName(revision.getID()); final InternalCDORevision existingRevision = existingRevisions.get(revision.getID()); if (existingRevision != null) { revision.setVersion(existingRevision.getVersion()); } final InternalCDORevision cdoRevision = (InternalCDORevision) session.merge(entityName, revision); if (getStore().isAuditing() && cdoRevision.getVersion() == revision.getVersion()) { // do a direct update of the version in the db to get it in sync with // hibernate, a special case, hibernate does not send the change back, do it ourselves // only needs to be done in case of auditing cdoRevision.setVersion(cdoRevision.getVersion() + 1); } if (TRACER.isEnabled()) { TRACER.trace( "Updated Object " + revision.getEClass().getName() + " id: " + revision.getID()); //$NON-NLS-1$ //$NON-NLS-2$ } } } // and increment the versions stored in the context // note that this is needed because above the cdorevision read from the db // is updated and its version gets incremented, and not the revision currently // in the cache incrementVersions(context); session.flush(); // delete all objects for (CDOID id : context.getDetachedObjects()) { try { final CDORevision revision = HibernateUtil.getInstance().getCDORevision(id); // maybe deleted in parallell? if (revision != null) { session.delete(revision); } } catch (org.hibernate.ObjectNotFoundException ex) { // ignore these, an object can be removed through cascade deletes } } session.flush(); // now do an update of the container without incrementing the version repairContainerIDs(repairContainerIDs, session); repairResourceIDs(repairResourceIDs, session); session.flush(); // write the blobs ExtendedDataInputStream in = context.getLobs(); if (in != null) { try { int count = in.readInt(); for (int i = 0; i < count; i++) { byte[] id = in.readByteArray(); long size = in.readLong(); if (size > 0) { writeBlob(id, size, new LimitedInputStream(in, size)); } else { writeClob(id, -size, new InputStreamReader(new LimitedInputStream(in, -size))); } } } catch (IOException ex) { throw WrappedException.wrap(ex); } } session.flush(); } catch (Exception e) { OM.LOG.error(e); throw WrappedException.wrap(e); } finally { HibernateThreadContext.getCommitContext().setInDoWrite(false); async.stop(); } context.applyIDMappings(monitor.fork()); monitor.done(); }
From source file:org.eclipse.emf.cdo.server.internal.hibernate.HibernateStoreAccessor.java
License:Open Source License
private HibernateStoreLob getCreateHibernateStoreLob(byte[] idBytes) { final String id = HexUtil.bytesToHex(idBytes); final Session session = getHibernateSession(); session.setDefaultReadOnly(false); HibernateStoreLob lob = (HibernateStoreLob) session.get(HibernateStoreLob.class, id); if (lob == null) { lob = new HibernateStoreLob(); lob.setId(id);//w w w. j a v a 2s .c om } return lob; }
From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java
License:Apache License
/** * Execute the action specified by the given action object within a Session. * * @param action callback object that specifies the Hibernate action * @param enforceNativeSession whether to enforce exposure of the native Hibernate Session to callback code * @return a result object returned by the action, or <code>null</code> * @throws org.springframework.dao.DataAccessException in case of Hibernate errors *///from w w w .j a v a2 s . c o m protected <T> T doExecute(HibernateCallback<T> action, boolean enforceNativeSession) throws DataAccessException { Assert.notNull(action, "Callback object must not be null"); Session session = getSession(); boolean existingTransaction = isSessionTransactional(session); if (existingTransaction) { LOG.debug("Found thread-bound Session for HibernateTemplate"); } FlushMode previousFlushMode = null; try { previousFlushMode = applyFlushMode(session, existingTransaction); if (shouldPassReadOnlyToHibernate()) { session.setDefaultReadOnly(true); } Session sessionToExpose = (enforceNativeSession || exposeNativeSession ? session : createSessionProxy(session)); T result = action.doInHibernate(sessionToExpose); flushIfNecessary(session, existingTransaction); return result; } catch (HibernateException ex) { throw convertHibernateAccessException(ex); } catch (SQLException ex) { throw jdbcExceptionTranslator.translate("Hibernate-related JDBC operation", null, ex); } catch (RuntimeException ex) { // Callback code threw application exception... throw ex; } finally { if (existingTransaction) { LOG.debug("Not closing pre-bound Hibernate Session after HibernateTemplate"); if (previousFlushMode != null) { session.setFlushMode(previousFlushMode); } } else { SessionFactoryUtils.closeSession(session); } } }
From source file:org.ow2.proactive.db.TransactionHelper.java
License:Open Source License
private <T> T tryExecuteTransaction(SessionWork<T> sessionWork, boolean readWriteTransaction, boolean readOnlyEntities) { Session session = sessionFactory.openSession(); session.setDefaultReadOnly(readOnlyEntities); try {//from ww w . j a v a 2s .c o m session.beginTransaction(); T result = sessionWork.doInTransaction(session); session.getTransaction().commit(); return result; } catch (Throwable e) { logger.warn("Database operation failed", e); if (readWriteTransaction) { try { session.getTransaction().rollback(); } catch (Throwable rollbackError) { logger.warn("Failed to rollback transaction", rollbackError); } } throw e; } finally { try { session.close(); } catch (HibernateException e) { logger.warn("Failed to close session", e); } } }
From source file:org.ow2.proactive.resourcemanager.db.RMDBManager.java
License:Open Source License
/** * Should be used for insert/update/delete queries *//*from w w w. ja v a 2s . com*/ private <T> T runWithTransaction(SessionWork<T> sessionWork, boolean readonly) { Session session = sessionFactory.openSession(); Transaction tx = null; try { session.setDefaultReadOnly(readonly); tx = session.beginTransaction(); T result = sessionWork.executeWork(session); tx.commit(); return result; } catch (Throwable e) { if (tx != null) { try { tx.rollback(); } catch (Throwable rollbackError) { logger.warn("Failed to rollback transaction", rollbackError); } } logger.warn("DB operation failed", e); return null; } finally { try { session.close(); } catch (Throwable e) { logger.warn("Failed to close session", e); } } }
From source file:org.ow2.proactive.resourcemanager.db.RMDBManager.java
License:Open Source License
/** * Should be used for select queries/*w w w . ja v a2s.c o m*/ */ private <T> T runWithoutTransaction(SessionWork<T> sessionWork) { Session session = sessionFactory.openSession(); try { session.setDefaultReadOnly(true); T result = sessionWork.executeWork(session); return result; } catch (Throwable e) { logger.warn("DB operation failed", e); return null; } finally { try { session.close(); } catch (Throwable e) { logger.warn("Failed to close session", e); } } }
From source file:org.projectforge.database.XmlDump.java
License:Open Source License
/** * Verify the imported dump./*from www.j a v a2 s . c o m*/ * @return Number of checked objects. This number is negative if any error occurs (at least one object wasn't imported successfully). */ public int verifyDump(final XStreamSavingConverter xstreamSavingConverter) { final SessionFactory sessionFactory = hibernate.getSessionFactory(); Session session = null; boolean hasError = false; try { session = sessionFactory.openSession(EmptyInterceptor.INSTANCE); session.setDefaultReadOnly(true); int counter = 0; for (final Map.Entry<Class<?>, List<Object>> entry : xstreamSavingConverter.getAllObjects() .entrySet()) { final List<Object> objects = entry.getValue(); final Class<?> entityClass = entry.getKey(); if (objects == null) { continue; } for (final Object obj : objects) { if (HibernateUtils.isEntity(obj.getClass()) == false) { continue; } final Serializable id = HibernateUtils.getIdentifier(obj); if (id == null) { // Can't compare this object without identifier. continue; } // log.info("Testing object: " + obj); final Object databaseObject = session.get(entityClass, id, LockOptions.READ); Hibernate.initialize(databaseObject); final boolean equals = equals(obj, databaseObject, true); if (equals == false) { log.error("Object not sucessfully imported! xml object=[" + obj + "], data base=[" + databaseObject + "]"); hasError = true; } ++counter; } } for (final HistoryEntry historyEntry : xstreamSavingConverter.getHistoryEntries()) { final Class<?> type = xstreamSavingConverter.getClassFromHistoryName(historyEntry.getClassName()); final Object o = session.get(type, historyEntry.getEntityId()); if (o == null) { log.error("A corrupted history entry found (entity of class '" + historyEntry.getClassName() + "' with id + " + historyEntry.getEntityId() + " not found: " + historyEntry); hasError = true; } ++counter; } if (hasError == true) { log.fatal( "*********** A inconsistency in the import was found! This may result in a data loss or corrupted data! Please retry the import. " + counter + " entries checked."); return -counter; } log.info("Data-base import successfully verified: " + counter + " entries checked."); return counter; } finally { if (session != null) { session.close(); } } }
From source file:org.projectforge.framework.persistence.database.XmlDump.java
License:Open Source License
/** * Verify the imported dump./*from ww w. j av a2 s.co m*/ * * @return Number of checked objects. This number is negative if any error occurs (at least one object wasn't imported * successfully). */ public int verifyDump(final XStreamSavingConverter xstreamSavingConverter) { final SessionFactory sessionFactory = hibernate.getSessionFactory(); Session session = null; boolean hasError = false; try { session = HibernateCompatUtils.openSession(sessionFactory, EmptyInterceptor.INSTANCE); session.setDefaultReadOnly(true); int counter = 0; for (final Map.Entry<Class<?>, List<Object>> entry : xstreamSavingConverter.getAllObjects() .entrySet()) { final List<Object> objects = entry.getValue(); final Class<?> entityClass = entry.getKey(); if (objects == null) { continue; } for (final Object obj : objects) { if (HibernateUtils.isEntity(obj.getClass()) == false) { continue; } final Serializable id = HibernateUtils.getIdentifier(obj); if (id == null) { // Can't compare this object without identifier. continue; } // log.info("Testing object: " + obj); final Object databaseObject = session.get(entityClass, id, LockOptions.READ); Hibernate.initialize(databaseObject); final boolean equals = equals(obj, databaseObject, true); if (equals == false) { log.error("Object not sucessfully imported! xml object=[" + obj + "], data base=[" + databaseObject + "]"); hasError = true; } ++counter; } } for (final HistoryEntry historyEntry : xstreamSavingConverter.getHistoryEntries()) { final Class<?> type = xstreamSavingConverter.getClassFromHistoryName(historyEntry.getEntityName()); final Object o = type != null ? session.get(type, historyEntry.getEntityId()) : null; if (o == null) { log.warn("A corrupted history entry found (entity of class '" + historyEntry.getEntityName() + "' with id " + historyEntry.getEntityId() + " not found: " + historyEntry + ". This doesn't affect the functioning of ProjectForge, this may result in orphaned history entries."); hasError = true; } ++counter; } if (hasError == true) { log.fatal( "*********** A inconsistency in the import was found! This may result in a data loss or corrupted data! Please retry the import. " + counter + " entries checked."); return -counter; } log.info("Data-base import successfully verified: " + counter + " entries checked."); return counter; } finally { if (session != null) { session.close(); } } }