List of usage examples for org.hibernate Session setReadOnly
void setReadOnly(Object entityOrProxy, boolean readOnly);
From source file:com.mercatis.lighthouse3.persistence.commons.hibernate.UnitOfWorkImplementation.java
License:Apache License
public void setReadOnly(Object o, boolean readOnly) { Session session = threadLocalSessions.get(); if (session == null) { return;/* w ww.j a v a 2 s . c om*/ } if (session.contains(o) && session.isReadOnly(o) != readOnly) { if (log.isDebugEnabled()) { log.debug( "UnitOfWork: SetReadOnly: " + readOnly + ", Session: " + System.identityHashCode(session)); } session.setReadOnly(o, readOnly); } }
From source file:de.innovationgate.webgate.api.jdbc.filehandling.CS5P4FileHandling.java
License:Open Source License
@Override public long dailyFileMaintenance(Logger log) throws WGAPIException { Session session = getParent().getSession(); try {/* ww w. jav a 2 s . c om*/ session.setDefaultReadOnly(false); long freedMemory = 0; int deletedDuplicates = 0; // Remove duplicate file contents String hql = "select cfc.checksumSha512 as checksum from ContentFileContent as cfc group by cfc.checksumSha512 having count(*) > 1"; @SuppressWarnings("unchecked") Iterator<String> duplicateChecksums = session.createQuery(hql).iterate(); while (duplicateChecksums.hasNext()) { String duplicaceChecksum = duplicateChecksums.next(); hql = "select cfc.id as id from ContentFileContent as cfc where cfc.checksumSha512 = :checksum order by cfc.ordinalnr asc"; Query q = session.createQuery(hql); q.setParameter("checksum", duplicaceChecksum); @SuppressWarnings("unchecked") Iterator<String> duplicateIds = q.iterate(); if (!duplicateIds.hasNext()) { // Just in case continue; } // Skip the first one. That is the one that will be kept and constantly retrieved duplicateIds.next(); // Delete the other duplicates while (duplicateIds.hasNext()) { String id = duplicateIds.next(); ContentFileContent cfc = (ContentFileContent) session.get(ContentFileContent.class, id); if (cfc != null) { deletedDuplicates++; freedMemory += cfc.getSize(); session.setReadOnly(cfc, false); // Delete data entities via HQL to prevent loading them hql = "delete ContentFileContentPart cfp where cfp.fileContents=:cfc"; Query deleteQ = session.createQuery(hql); deleteQ.setParameter("cfc", cfc); deleteQ.executeUpdate(); session.delete(cfc); getParent().commitHibernateTransaction(); } } Hibernate.close(duplicateIds); } Hibernate.close(duplicateChecksums); // Remove unused file contents long deletedUnusedFiles = 0; hql = "select cfc.id as id from ContentFileContent as cfc where cfc.checksumSha512 not in (select distinct cfm.checksumSha512 from ContentFileMeta as cfm where cfm.checksumSha512 is not null) and cfc.checksumSha512 not in (select distinct cfd.derivateSha512 from ContentFileDerivate as cfd)"; @SuppressWarnings("unchecked") Iterator<String> obsoleteIds = session.createQuery(hql).iterate(); while (obsoleteIds.hasNext()) { String id = obsoleteIds.next(); ContentFileContent cfc = (ContentFileContent) session.get(ContentFileContent.class, id); if (cfc != null) { deletedUnusedFiles++; freedMemory += cfc.getSize(); session.setReadOnly(cfc, false); // Delete data entities via HQL to prevent loading them hql = "delete ContentFileContentPart cfp where cfp.fileContents=:cfc"; Query deleteQ = session.createQuery(hql); deleteQ.setParameter("cfc", cfc); deleteQ.executeUpdate(); session.delete(cfc); //log.info("Deleted file contents " + cfc.getChecksumSha512() + " ordinal nr " + cfc.getOrdinalnr()); getParent().commitHibernateTransaction(); } } Hibernate.close(obsoleteIds); // Remove unused derivates of old CS5P4 style. Corresponding derivate data is deleted in the next run. hql = "select cfd.id as id from ContentFileDerivate as cfd where cfd.parentSha512 not in (select distinct cfc.checksumSha512 from ContentFileContent as cfc)"; @SuppressWarnings("unchecked") Iterator<String> obsoleteDerivateIds = session.createQuery(hql).iterate(); while (obsoleteDerivateIds.hasNext()) { String id = obsoleteDerivateIds.next(); ContentFileDerivate cfd = (ContentFileDerivate) session.get(ContentFileDerivate.class, id); if (cfd != null) { session.setReadOnly(cfd, false); session.delete(cfd); getParent().commitHibernateTransaction(); } } Hibernate.close(obsoleteDerivateIds); String freedMemoryText; if (deletedDuplicates > 0 || deletedUnusedFiles > 0) { if (freedMemory > 1024 * 1024) { freedMemoryText = WGUtils.DECIMALFORMAT_STANDARD.format(freedMemory / 1024 / 1024) + " MB of file storage"; } else { freedMemoryText = WGUtils.DECIMALFORMAT_STANDARD.format(freedMemory) + " Bytes of file storage"; } log.info("Maintenance on content store of app/plugin '" + getParent().getDb().getDbReference() + "': Deleted " + WGUtils.DECIMALFORMAT_STANDARD.format(deletedDuplicates) + " duplicates and " + WGUtils.DECIMALFORMAT_STANDARD.format(deletedUnusedFiles) + " unused file contents freeing " + freedMemoryText); } return freedMemory; } catch (Throwable e) { try { session.getTransaction().rollback(); } catch (Exception e2) { } throw new WGBackendException("Exception running daily maintenance", e); } finally { session.setDefaultReadOnly(true); } }
From source file:de.innovationgate.webgate.api.jdbc.filehandling.CS5P5FileHandling.java
License:Open Source License
@Override public long dailyFileMaintenance(Logger log) throws WGAPIException { Session session = getParent().getSession(); try {/*www . ja v a 2 s . c o m*/ session.setDefaultReadOnly(false); long freedMemory = 0; int deletedDuplicates = 0; // Remove duplicate file contents String hql = "select cfc.checksumSha512 as checksum from ContentFileContent as cfc group by cfc.checksumSha512 having count(*) > 1"; @SuppressWarnings("unchecked") Iterator<String> duplicateChecksums = session.createQuery(hql).iterate(); while (duplicateChecksums.hasNext()) { String duplicaceChecksum = duplicateChecksums.next(); hql = "select cfc.id as id from ContentFileContent as cfc where cfc.checksumSha512 = :checksum order by cfc.ordinalnr asc"; Query q = session.createQuery(hql); q.setParameter("checksum", duplicaceChecksum); @SuppressWarnings("unchecked") Iterator<String> duplicateIds = q.iterate(); if (!duplicateIds.hasNext()) { // Just in case continue; } // Skip the first one. That is the one that will be kept and // constantly retrieved duplicateIds.next(); // Delete the other duplicates while (duplicateIds.hasNext()) { String id = duplicateIds.next(); ContentFileContent cfc = (ContentFileContent) session.get(ContentFileContent.class, id); if (cfc != null) { deletedDuplicates++; freedMemory += cfc.getSize(); session.setReadOnly(cfc, false); // Delete data entities via HQL to prevent loading them hql = "delete ContentFileContentPart cfp where cfp.fileContents=:cfc"; Query deleteQ = session.createQuery(hql); deleteQ.setParameter("cfc", cfc); deleteQ.executeUpdate(); session.delete(cfc); getParent().commitHibernateTransaction(); } } Hibernate.close(duplicateIds); } Hibernate.close(duplicateChecksums); // Remove unused file contents, not used on attachments, derivates // or binary extension data long deletedUnusedFiles = 0; hql = "select cfc.id as id from ContentFileContent as cfc where cfc.checksumSha512 not in (select distinct cfm.checksumSha512 from ContentFileMeta as cfm where cfm.checksumSha512 is not null) and cfc.checksumSha512 not in (select distinct cfd.derivateSha512 from ContentFileDerivate as cfd) and cfc.checksumSha512 not in (select distinct ext.binarySha512 from ExtensionData as ext where ext.type=7)"; @SuppressWarnings("unchecked") Iterator<String> obsoleteIds = session.createQuery(hql).iterate(); while (obsoleteIds.hasNext()) { String id = obsoleteIds.next(); ContentFileContent cfc = (ContentFileContent) session.get(ContentFileContent.class, id); if (cfc != null) { deletedUnusedFiles++; freedMemory += cfc.getSize(); session.setReadOnly(cfc, false); // Delete data entities via HQL to prevent loading them hql = "delete ContentFileContentPart cfp where cfp.fileContents=:cfc"; Query deleteQ = session.createQuery(hql); deleteQ.setParameter("cfc", cfc); deleteQ.executeUpdate(); session.delete(cfc); // log.info("Deleted file contents " + // cfc.getChecksumSha512() + " ordinal nr " + // cfc.getOrdinalnr()); getParent().commitHibernateTransaction(); } } Hibernate.close(obsoleteIds); // Remove unused derivates of old CS5P4 style. Corresponding // derivate data is deleted in the next run. hql = "select cfd.id as id from ContentFileDerivate as cfd where cfd.parentMeta is null and cfd.parentSha512 not in (select distinct cfc.checksumSha512 from ContentFileContent as cfc)"; @SuppressWarnings("unchecked") Iterator<String> obsoleteDerivateIds = session.createQuery(hql).iterate(); while (obsoleteDerivateIds.hasNext()) { String id = obsoleteDerivateIds.next(); ContentFileDerivate cfd = (ContentFileDerivate) session.get(ContentFileDerivate.class, id); if (cfd != null) { session.setReadOnly(cfd, false); session.delete(cfd); getParent().commitHibernateTransaction(); } } Hibernate.close(obsoleteDerivateIds); String freedMemoryText; if (deletedDuplicates > 0 || deletedUnusedFiles > 0) { if (freedMemory > 1024 * 1024) { freedMemoryText = WGUtils.DECIMALFORMAT_STANDARD.format(freedMemory / 1024 / 1024) + " MB of file storage"; } else { freedMemoryText = WGUtils.DECIMALFORMAT_STANDARD.format(freedMemory) + " Bytes of file storage"; } log.info("Maintenance on content store of app/plugin '" + getParent().getDb().getDbReference() + "': Deleted " + WGUtils.DECIMALFORMAT_STANDARD.format(deletedDuplicates) + " duplicates and " + WGUtils.DECIMALFORMAT_STANDARD.format(deletedUnusedFiles) + " unused file contents freeing " + freedMemoryText); } return freedMemory; } catch (Throwable e) { try { session.getTransaction().rollback(); } catch (Exception e2) { } throw new WGBackendException("Exception running daily maintenance", e); } finally { session.setDefaultReadOnly(true); } }
From source file:de.unisb.cs.st.javalanche.mutation.results.persistence.QueryManager.java
License:Open Source License
public static void updateMutations(List<Mutation> results) { logger.info("Storing results for " + results.size() + " mutations"); Session session = openSession(); Transaction tx = session.beginTransaction(); int saved = 1; for (Mutation mutation : results) { Mutation mutationFromDB = (Mutation) session.get(Mutation.class, mutation.getId()); if (mutationFromDB.getMutationResult() != null) { logger.warn("Mutation already has a test result - not storing the given result"); logger.warn("Mutation:" + mutationFromDB); logger.warn("Result (that is not stored): " + mutation); session.setReadOnly(mutationFromDB, true); session.close();/*ww w . ja v a 2 s . c o m*/ break; } else { session.save(mutation.getMutationResult()); logger.debug("Setting result for mutation " + mutationFromDB.getId()); mutationFromDB.setMutationResult(mutation.getMutationResult()); saved++; } if (saved % 20 == 0) { // 20, same as the JDBC batch size // flush a batch of inserts and release memory: // see // http://www.hibernate.org/hib_docs/reference/en/html/batch.html session.flush(); session.clear(); } } if (session.isOpen()) { tx.commit(); session.close(); logger.info("Succesfully stored results for " + results.size() + " mutations"); } }
From source file:org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsHibernateUtil.java
License:Apache License
/** * Sets the target object to read-only using the given SessionFactory instance. This * avoids Hibernate performing any dirty checking on the object * * @see #setObjectToReadWrite(Object, org.hibernate.SessionFactory) * * @param target The target object/*w ww . j a va 2 s. c o m*/ * @param sessionFactory The SessionFactory instance */ public static void setObjectToReadyOnly(Object target, SessionFactory sessionFactory) { Session session = sessionFactory.getCurrentSession(); if (canModifyReadWriteState(session, target)) { if (target instanceof HibernateProxy) { target = ((HibernateProxy) target).getHibernateLazyInitializer().getImplementation(); } session.setReadOnly(target, true); session.setFlushMode(FlushMode.MANUAL); } }
From source file:org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsHibernateUtil.java
License:Apache License
/** * Sets the target object to read-write, allowing Hibernate to dirty check it and auto-flush changes. * * @see #setObjectToReadyOnly(Object, org.hibernate.SessionFactory) * * @param target The target object//w w w . j av a2 s . com * @param sessionFactory The SessionFactory instance */ public static void setObjectToReadWrite(final Object target, SessionFactory sessionFactory) { HibernateTemplate template = new HibernateTemplate(sessionFactory); template.setExposeNativeSession(true); template.execute(new HibernateCallback<Void>() { public Void doInHibernate(Session session) throws HibernateException, SQLException { if (canModifyReadWriteState(session, target)) { SessionImplementor sessionImpl = (SessionImplementor) session; EntityEntry ee = sessionImpl.getPersistenceContext().getEntry(target); if (ee != null && ee.getStatus() == Status.READ_ONLY) { Object actualTarget = target; if (target instanceof HibernateProxy) { actualTarget = ((HibernateProxy) target).getHibernateLazyInitializer() .getImplementation(); } session.setReadOnly(actualTarget, false); session.setFlushMode(FlushMode.AUTO); incrementVersion(target); } } return null; } }); }
From source file:org.grails.orm.hibernate.cfg.GrailsHibernateUtil.java
License:Apache License
/** * Sets the target object to read-write, allowing Hibernate to dirty check it and auto-flush changes. * * @see #setObjectToReadyOnly(Object, org.hibernate.SessionFactory) * * @param target The target object//from w w w . j av a 2 s.c o m * @param sessionFactory The SessionFactory instance */ public static void setObjectToReadWrite(final Object target, SessionFactory sessionFactory) { Session session = sessionFactory.getCurrentSession(); if (!canModifyReadWriteState(session, target)) { return; } SessionImplementor sessionImpl = (SessionImplementor) session; EntityEntry ee = sessionImpl.getPersistenceContext().getEntry(target); if (ee == null || ee.getStatus() != Status.READ_ONLY) { return; } Object actualTarget = target; if (target instanceof HibernateProxy) { actualTarget = ((HibernateProxy) target).getHibernateLazyInitializer().getImplementation(); } session.setReadOnly(actualTarget, false); session.setFlushMode(FlushMode.AUTO); incrementVersion(target); }
From source file:org.kuali.rice.krad.dao.impl.BusinessObjectDaoJpa.java
License:Educational Community License
/** * @see org.kuali.rice.krad.dao.BusinessObjectDao#manageReadOnly(org.kuali.rice.krad.bo.PersistableBusinessObject) *//* w w w.j a v a 2s . c om*/ public PersistableBusinessObject manageReadOnly(PersistableBusinessObject bo) { Session session = ((HibernateEntityManager) entityManager).getSession(); FlushMode currentFlushMode = session.getFlushMode(); session.setFlushMode(FlushMode.MANUAL); // make sure the merge doesn't flush what we're trying to make read only PersistableBusinessObject managedBO = entityManager.merge(bo); session.setReadOnly(managedBO, true); session.setFlushMode(currentFlushMode); return managedBO; }
From source file:ubic.gemma.persistence.service.common.auditAndSecurity.UserDaoImpl.java
License:Apache License
@Override public User findByUserName(final String userName) { Session session = this.getSessionFactory().getCurrentSession(); //noinspection unchecked List<User> users = session.createCriteria(User.class).setFlushMode(FlushMode.MANUAL) .add(Restrictions.eq("userName", userName)).list(); if (users.isEmpty()) { return null; } else if (users.size() > 1) { throw new IllegalStateException("Multiple users with name=" + userName); }/*w w w .j a v a2s .c o m*/ User u = users.get(0); session.setReadOnly(u, true); // TESTING return u; }