Example usage for org.hibernate FlushMode AUTO

List of usage examples for org.hibernate FlushMode AUTO

Introduction

In this page you can find the example usage for org.hibernate FlushMode AUTO.

Prototype

FlushMode AUTO

To view the source code for org.hibernate FlushMode AUTO.

Click Source Link

Document

The Session is sometimes flushed before query execution in order to ensure that queries never return stale state.

Usage

From source file:org.olat.upgrade.OLATUpgrade_6_3_0.java

License:Apache License

private void migrateNotifications(final UpgradeManager upgradeManager, final UpgradeHistoryData uhd) {
    if (!uhd.getBooleanDataValue(TASK_MIGRATE_NOTIFICATIONS)) {
        log.audit("+-----------------------------------------------------------------------------+");
        log.audit("+... Calculate the businesspath for the publishers (notifications)         ...+");
        log.audit("+-----------------------------------------------------------------------------+");

        int counter = 0;
        final NotificationsManager notificationMgr = NotificationsManager.getInstance();
        final List<Publisher> allPublishers = notificationMgr.getAllPublisher();
        if (log.isDebug()) {
            log.info("Found " + allPublishers.size() + " publishers to migrate.");
        }// w ww  . j a v  a 2 s.  c o m

        getNotificationUpgrades();

        for (final Publisher publisher : allPublishers) {
            final Publisher publisherToSave = upgrade(publisher);
            if (publisherToSave != null) {
                try {
                    DBFactory.getInstance().updateObject(publisherToSave);
                } catch (final ObjectDeletedException e) {
                    log.warn("Publisher was already deleted, no update possible! Publisher key: "
                            + publisherToSave.getKey());
                } catch (final Exception e) {
                    log.warn("Publisher was already deleted, no update possible! Publisher key: "
                            + publisherToSave.getKey());
                }
                counter++;
            }
            if (counter > 0 && counter % 100 == 0) {
                log.audit("Another 100 publishers done");
                DBFactory.getInstance().intermediateCommit();
            }
        }

        DBFactory.getInstance().intermediateCommit();
        log.audit("**** Migrated " + counter + " publishers. ****");

        log.audit("+-----------------------------------------------------------------------------+");
        log.audit("+... Update the latest emailed date for all subscribers                       +");
        log.audit("+-----------------------------------------------------------------------------+");
        final DBQuery query = DBFactory.getInstance().createQuery("update " + Subscriber.class.getName()
                + " subscriber set subscriber.latestEmailed=:latestDate");
        final Calendar cal = Calendar.getInstance();
        //
        // use the day of installing the release,
        // and set the time back to midnight instead of
        // going back one day, e.g. cal.add(Calendar.DAY_OF_MONTH, -1);
        //
        // 1) before release day, sending notifications the old way at 02:00:00 a.m.
        // 2) at release day, sending notifications the old way at 02:00:00 a.m.
        // .. Install the Release -> Upgrader sets latestEmail sent on subscribers to release day at 00:00:00
        // 3) day after release, sending notifications the new way at 02:00:00 a.m.
        //
        // with this procedure only the news are sent twice which were created between 00:00:00 and 02:00:00 of release day.
        //
        cal.set(Calendar.HOUR_OF_DAY, 0);
        cal.set(Calendar.MINUTE, 0);
        cal.set(Calendar.SECOND, 0);
        cal.set(Calendar.MILLISECOND, 0);
        query.setTimestamp("latestDate", cal.getTime());
        final int subCounter = query.executeUpdate(FlushMode.AUTO);

        DBFactory.getInstance().intermediateCommit();
        log.audit("**** Migrated " + subCounter + " subscribers. ****");

        uhd.setBooleanDataValue(TASK_MIGRATE_NOTIFICATIONS, true);
        upgradeManager.setUpgradesHistory(uhd, VERSION);
    }
}

From source file:org.openmrs.module.distrotools.metadata.handler.impl.RoleDeployHandlerTest.java

License:Open Source License

/**
 * We previously encountered a problem where the session couldn't be flushed at certain stages during installation
 * and re-installation of various roles and privileges. It seems like these objects can be cached via the UUID, and
 * once we stopped needlessly overwriting UUIDs the problem was fixed.
 *///  w w  w .  jav a 2s  .c  o  m
@Test
public void integration_shouldWorkWithoutFlushes() {
    sessionFactory.getCurrentSession().setFlushMode(FlushMode.MANUAL);

    distroToolsService.installObject(privilege("Privilege1", "Testing"));
    distroToolsService.installObject(role("Role1", "Testing", null, idSet("Privilege1")));
    distroToolsService.installObject(role("Role2", "Testing", idSet("Role1"), null));

    distroToolsService.installObject(privilege("Privilege1", "Testing"));

    distroToolsService.installObject(role("Role1", "Testing", null, idSet("Privilege1")));

    Context.flushSession();
    sessionFactory.getCurrentSession().setFlushMode(FlushMode.AUTO);
}

From source file:org.openmrs.module.metadatadeploy.handler.impl.RoleDeployHandlerTest.java

License:Open Source License

/**
 * We previously encountered a problem where the session couldn't be flushed at certain stages during installation
 * and re-installation of various roles and privileges. It seems like these objects can be cached via the UUID, and
 * once we stopped needlessly overwriting UUIDs the problem was fixed.
 *///  w w w.j a  va2  s .  co  m
@Test
public void integration_shouldWorkWithoutFlushes() {
    getCurrentSession().setFlushMode(FlushMode.MANUAL);

    deployService.installObject(privilege("Privilege1", "Testing"));
    deployService.installObject(role("Role1", "Testing", null, idSet("Privilege1")));
    deployService.installObject(role("Role2", "Testing", idSet("Role1"), null));

    deployService.installObject(privilege("Privilege1", "Testing"));

    deployService.installObject(role("Role1", "Testing", null, idSet("Privilege1")));

    Context.flushSession();
    getCurrentSession().setFlushMode(FlushMode.AUTO);
}

From source file:org.openmrs.module.metadatasharing.api.db.hibernate.CustomSessionFlushTask.java

License:Open Source License

/**
 * Executes the task method with flushes.
 * /*w w w.ja v a2 s .  c  om*/
 * @return the result or <code>null<code>
 */
public final T executeInAutoFlushMode() {
    Session currentSession = HibernateSessionFactory.getSessionFactory().getCurrentSession();

    FlushMode previousFlushMode = currentSession.getFlushMode();
    currentSession.setFlushMode(FlushMode.AUTO);

    try {
        return task();
    } finally {
        currentSession.setFlushMode(previousFlushMode);
    }
}

From source file:org.openmrs.module.sync.api.db.hibernate.HibernateSyncDAO.java

License:Open Source License

/**
 * Sets hibernate flush mode to org.hibernate.FlushMode.AUTO.
 * /*  w ww  .  j a v a 2 s . c  o m*/
 * @see org.hibernate.FlushMode
 * @see org.openmrs.module.sync.api.db.SyncDAO#setFlushModeAutomatic()
 */
public void setFlushModeAutomatic() throws DAOException {
    sessionFactory.getCurrentSession().setFlushMode(org.hibernate.FlushMode.AUTO);
}

From source file:org.projectforge.core.HibernateSearchDependentObjectsReindexer.java

License:Open Source License

private void reindexDependents(final HibernateTemplate hibernateTemplate, final Session session,
        final BaseDO<?> obj, final Set<String> alreadyReindexed) {
    if (alreadyReindexed.contains(getReindexId(obj)) == true) {
        if (log.isDebugEnabled() == true) {
            log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
        }/*  ww  w  .  j  av a  2s  .c  om*/
        return;
    }
    session.flush(); // Needed to flush the object changes!
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.AUTO);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    try {
        BaseDO<?> dbObj = (BaseDO<?>) session.get(obj.getClass(), obj.getId());
        if (dbObj == null) {
            dbObj = (BaseDO<?>) session.load(obj.getClass(), obj.getId());
        }
        fullTextSession.index(dbObj);
        alreadyReindexed.add(getReindexId(dbObj));
        if (log.isDebugEnabled() == true) {
            log.debug("Object added to index: " + getReindexId(dbObj));
        }
    } catch (final Exception ex) {
        // Don't fail if any exception while re-indexing occurs.
        log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
    }
    // session.flush(); // clear every batchSize since the queue is processed
    final List<Entry> entryList = map.get(obj.getClass());
    reindexDependents(hibernateTemplate, session, obj, entryList, alreadyReindexed);
}

From source file:org.projectforge.database.XmlDump.java

License:Open Source License

/**
 * @param reader/*from   w  ww  . jav a  2s  .c  o m*/
 * @return Only for test cases.
 */
public XStreamSavingConverter restoreDatabase(final Reader reader) {
    final XStreamSavingConverter xstreamSavingConverter = new XStreamSavingConverter() {

        @Override
        protected Serializable getOriginalIdentifierValue(final Object obj) {
            return HibernateUtils.getIdentifier(obj);
        }

        @Override
        public Serializable onBeforeSave(final Session session, final Object obj) {
            log.info("Object " + obj);
            if (obj instanceof PFUserDO) {
                final PFUserDO user = (PFUserDO) obj;
                return save(user, user.getRights());
            } else if (obj instanceof AbstractRechnungDO<?>) {
                final AbstractRechnungDO<? extends AbstractRechnungsPositionDO> rechnung = (AbstractRechnungDO<?>) obj;
                final List<? extends AbstractRechnungsPositionDO> positions = rechnung.getPositionen();
                rechnung.setPositionen(null); // Need to nullable positions first (otherwise insert fails).
                final Serializable id = save(rechnung);
                if (positions != null) {
                    for (final AbstractRechnungsPositionDO pos : positions) {
                        if (pos.getKostZuweisungen() != null) {
                            final List<KostZuweisungDO> zuweisungen = pos.getKostZuweisungen();
                            pos.setKostZuweisungen(null); // Need to nullable first (otherwise insert fails).
                            save(pos);
                            if (pos instanceof RechnungsPositionDO) {
                                ((RechnungDO) rechnung).addPosition((RechnungsPositionDO) pos);
                            } else {
                                ((EingangsrechnungDO) rechnung).addPosition((EingangsrechnungsPositionDO) pos);
                            }
                            if (zuweisungen != null) {
                                for (final KostZuweisungDO zuweisung : zuweisungen) {
                                    pos.addKostZuweisung(zuweisung);
                                    save(zuweisung);
                                }
                            }
                        }
                    }
                }
                return id;
            } else if (obj instanceof AuftragDO) {
                final AuftragDO auftrag = (AuftragDO) obj;
                return save(auftrag, auftrag.getPositionen());
            }
            return super.onBeforeSave(session, obj);
        }
    };
    // UserRightDO is inserted on cascade while inserting PFUserDO.
    xstreamSavingConverter.appendIgnoredObjects(embeddedClasses);
    xstreamSavingConverter.appendOrderedType(PFUserDO.class, GroupDO.class, TaskDO.class, KundeDO.class,
            ProjektDO.class, Kost1DO.class, Kost2ArtDO.class, Kost2DO.class, AuftragDO.class, //
            RechnungDO.class, EingangsrechnungDO.class, EmployeeSalaryDO.class, KostZuweisungDO.class, //
            UserPrefEntryDO.class, UserPrefDO.class, //
            AccessEntryDO.class, GroupTaskAccessDO.class, ConfigurationDO.class);
    Session session = null;
    try {
        final SessionFactory sessionFactory = hibernate.getSessionFactory();
        session = sessionFactory.openSession(EmptyInterceptor.INSTANCE);
        session.setFlushMode(FlushMode.AUTO);
        final XStream xstream = new XStream(new DomDriver());
        xstream.setMode(XStream.ID_REFERENCES);
        xstreamSavingConverter.setSession(session);
        xstream.registerConverter(xstreamSavingConverter, 10);
        xstream.registerConverter(new UserRightIdSingleValueConverter(), 20);
        xstream.registerConverter(new UserPrefAreaSingleValueConverter(), 19);
        // alle Objekte Laden und speichern
        xstream.fromXML(reader);

        xstreamSavingConverter.saveObjects();
    } catch (final Exception ex) {
        log.error(ex.getMessage(), ex);
        throw new RuntimeException(ex);
    } finally {
        IOUtils.closeQuietly(reader);
        if (session != null) {
            session.close();
        }
    }
    return xstreamSavingConverter;
}

From source file:org.projectforge.framework.persistence.database.XmlDump.java

License:Open Source License

/**
 * @param reader// w w w.jav a2  s . c  o m
 * @return Only for test cases.
 */
public XStreamSavingConverter restoreDatabase(final Reader reader) {
    final List<AbstractPlugin> plugins = pluginAdminService.getActivePlugin();
    final XStreamSavingConverter xstreamSavingConverter = new XStreamSavingConverter() {

        @Override
        protected Serializable getOriginalIdentifierValue(final Object obj) {
            return HibernateUtils.getIdentifier(obj);
        }

        @Override
        public Serializable onBeforeSave(final Session session, final Object obj) {
            log.info("Object " + obj);
            if (obj instanceof PFUserDO) {
                final PFUserDO user = (PFUserDO) obj;
                return save(user, user.getRights());
            } else if (obj instanceof AbstractRechnungDO<?>) {

                final AbstractRechnungDO<? extends AbstractRechnungsPositionDO> rechnung = (AbstractRechnungDO<?>) obj;
                final List<? extends AbstractRechnungsPositionDO> positions = rechnung.getPositionen();
                final KontoDO konto = rechnung.getKonto();
                if (konto != null) {
                    save(konto);
                    rechnung.setKonto(null);
                }
                rechnung.setPositionen(null); // Need to nullable positions first (otherwise insert fails).
                final Serializable id = save(rechnung);
                if (konto != null) {
                    rechnung.setKonto(konto);
                }
                if (positions != null) {
                    for (final AbstractRechnungsPositionDO pos : positions) {
                        if (pos.getKostZuweisungen() != null) {
                            final List<KostZuweisungDO> zuweisungen = pos.getKostZuweisungen();
                            pos.setKostZuweisungen(null); // Need to nullable first (otherwise insert fails).
                            save(pos);
                            if (pos instanceof RechnungsPositionDO) {
                                ((RechnungDO) rechnung).addPosition((RechnungsPositionDO) pos);
                            } else {
                                ((EingangsrechnungDO) rechnung).addPosition((EingangsrechnungsPositionDO) pos);
                            }
                            if (zuweisungen != null) {
                                for (final KostZuweisungDO zuweisung : zuweisungen) {
                                    pos.addKostZuweisung(zuweisung);
                                    save(zuweisung);
                                }
                            }
                        }
                    }
                }
                return id;
            } else if (obj instanceof AuftragDO) {
                final AuftragDO auftrag = (AuftragDO) obj;
                return save(auftrag, auftrag.getPositionen());
            }
            if (plugins != null) {
                for (final AbstractPlugin plugin : plugins) {
                    try {
                        plugin.onBeforeRestore(this, obj);
                    } catch (final Exception ex) {
                        log.error("Error in Plugin while restoring object: " + ex.getMessage(), ex);
                    }
                }
            }
            for (final XmlDumpHook xmlDumpHook : xmlDumpHooks) {
                try {
                    xmlDumpHook.onBeforeRestore(userXmlPreferencesDao, this, obj);
                } catch (final Exception ex) {
                    log.error("Error in XmlDumpHook while restoring object: " + ex.getMessage(), ex);
                }
            }
            return super.onBeforeSave(session, obj);
        }

        /**
         * @see org.projectforge.framework.persistence.xstream.XStreamSavingConverter#onAfterSave(java.lang.Object,
         *      java.io.Serializable)
         */
        @Override
        public void onAfterSave(final Object obj, final Serializable id) {
            if (plugins != null) {
                for (final AbstractPlugin plugin : plugins) {
                    plugin.onAfterRestore(this, obj, id);
                }
            }
        }
    };
    // UserRightDO is inserted on cascade while inserting PFUserDO.
    xstreamSavingConverter.appendIgnoredObjects(embeddedClasses);
    // automatically detect insert order.
    List<EntityMetadata> ents = emf.getMetadataRepository().getTableEntities();
    List<Class<?>> classList = ents.stream().map((e) -> e.getJavaType()).collect(Collectors.toList());
    // first entities with now deps
    Collections.reverse(classList);

    xstreamSavingConverter.appendOrderedType(PFUserDO.class, GroupDO.class, TaskDO.class, KundeDO.class,
            ProjektDO.class, Kost1DO.class, Kost2ArtDO.class, Kost2DO.class, AuftragDO.class, //
            RechnungDO.class, EingangsrechnungDO.class, EmployeeSalaryDO.class, KostZuweisungDO.class, //
            UserPrefEntryDO.class, UserPrefDO.class, //
            AccessEntryDO.class, GroupTaskAccessDO.class, ConfigurationDO.class);
    xstreamSavingConverter.appendOrderedType(classList.toArray(new Class<?>[] {}));

    //    if (plugins != null) {
    //      for (final AbstractPlugin plugin : plugins) {
    //        xstreamSavingConverter.appendOrderedType(plugin.getPersistentEntities());
    //      }
    //    }
    Session session = null;
    try {
        final SessionFactory sessionFactory = hibernate.getSessionFactory();
        session = HibernateCompatUtils.openSession(sessionFactory, EmptyInterceptor.INSTANCE);
        session.setFlushMode(FlushMode.AUTO);
        final XStream xstream = XStreamHelper.createXStream();
        xstream.setMode(XStream.ID_REFERENCES);
        xstreamSavingConverter.setSession(session);
        xstream.registerConverter(xstreamSavingConverter, 10);
        xstream.registerConverter(new UserRightIdSingleValueConverter(userRights), 20);
        xstream.registerConverter(new UserPrefAreaSingleValueConverter(), 19);
        // alle Objekte Laden und speichern
        xstream.fromXML(reader);

        xstreamSavingConverter.saveObjects();
    } catch (final Exception ex) {
        log.error(ex.getMessage(), ex);
        throw new RuntimeException(ex);
    } finally {
        IOUtils.closeQuietly(reader);
        if (session != null) {
            session.close();
        }
    }
    return xstreamSavingConverter;
}

From source file:org.projectforge.framework.persistence.history.HibernateSearchDependentObjectsReindexer.java

License:Open Source License

private void reindexDependents(final Session session, final BaseDO<?> obj, final Set<String> alreadyReindexed) {
    if (alreadyReindexed.contains(getReindexId(obj)) == true) {
        if (log.isDebugEnabled() == true) {
            log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
        }/*from  w w  w  .j a  v a 2s.  c o m*/
        return;
    }
    session.flush(); // Needed to flush the object changes!
    final FullTextSession fullTextSession = Search.getFullTextSession(session);

    HibernateCompatUtils.setFlushMode(fullTextSession, FlushMode.AUTO);
    HibernateCompatUtils.setCacheMode(fullTextSession, CacheMode.IGNORE);
    try {
        BaseDO<?> dbObj = session.get(obj.getClass(), obj.getId());
        if (dbObj == null) {
            dbObj = session.load(obj.getClass(), obj.getId());
        }
        HibernateCompatUtils.index(fullTextSession, dbObj);
        alreadyReindexed.add(getReindexId(dbObj));
        if (log.isDebugEnabled() == true) {
            log.debug("Object added to index: " + getReindexId(dbObj));
        }
    } catch (final Exception ex) {
        // Don't fail if any exception while re-indexing occurs.
        log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
    }
    // session.flush(); // clear every batchSize since the queue is processed
    final List<Entry> entryList = map.get(obj.getClass());
    reindexDependents(session, obj, entryList, alreadyReindexed);
}

From source file:org.sakaiproject.scorm.ui.player.HibernateFilter.java

License:Educational Community License

@Override
protected Session getSession(SessionFactory sessionFactory) throws DataAccessResourceFailureException {
    Session session = SessionFactoryUtils.getSession(sessionFactory, true);
    //set the FlushMode to auto in order to save objects.
    session.setFlushMode(FlushMode.AUTO);
    return session;
}