Example usage for org.hibernate FlushMode COMMIT

List of usage examples for org.hibernate FlushMode COMMIT

Introduction

In this page you can find the example usage for org.hibernate FlushMode COMMIT.

Prototype

FlushMode COMMIT

To view the source code for org.hibernate FlushMode COMMIT.

Click Source Link

Document

The Session is flushed when Transaction#commit is called.

Usage

From source file:com.xpn.xwiki.store.XWikiHibernateStore.java

License:Open Source License

public void saveXWikiDoc(XWikiDocument doc, XWikiContext context, boolean bTransaction) throws XWikiException {
    MonitorPlugin monitor = Util.getMonitorPlugin(context);
    try {//from w  ww .  j  a v a2 s .c om
        // Start monitoring timer
        if (monitor != null) {
            monitor.startTimer("hibernate");
        }
        doc.setStore(this);
        // Make sure the database name is stored
        doc.getDocumentReference().setWikiReference(new WikiReference(context.getDatabase()));

        if (bTransaction) {
            checkHibernate(context);
            SessionFactory sfactory = injectCustomMappingsInSessionFactory(doc, context);
            bTransaction = beginTransaction(sfactory, context);
        }
        Session session = getSession(context);
        session.setFlushMode(FlushMode.COMMIT);

        // These informations will allow to not look for attachments and objects on loading
        doc.setElement(XWikiDocument.HAS_ATTACHMENTS, (doc.getAttachmentList().size() != 0));
        doc.setElement(XWikiDocument.HAS_OBJECTS, (doc.getXObjects().size() != 0));

        // Let's update the class XML since this is the new way to store it
        // TODO If all the properties are removed, the old xml stays?
        BaseClass bclass = doc.getXClass();
        if (bclass != null) {
            if (bclass.getFieldList().size() > 0) {
                doc.setXClassXML(bclass.toXMLString());
            } else {
                doc.setXClassXML("");
            }
        }

        if (doc.hasElement(XWikiDocument.HAS_ATTACHMENTS)) {
            saveAttachmentList(doc, context, false);
        }

        // Handle the latest text file
        if (doc.isContentDirty() || doc.isMetaDataDirty()) {
            Date ndate = new Date();
            doc.setDate(ndate);
            if (doc.isContentDirty()) {
                doc.setContentUpdateDate(ndate);
                doc.setContentAuthorReference(doc.getAuthorReference());
            }
            doc.incrementVersion();
            if (context.getWiki().hasVersioning(context)) {
                context.getWiki().getVersioningStore().updateXWikiDocArchive(doc, false, context);
            }

            doc.setContentDirty(false);
            doc.setMetaDataDirty(false);
        } else {
            if (doc.getDocumentArchive() != null) {
                // Let's make sure we save the archive if we have one
                // This is especially needed if we load a document from XML
                if (context.getWiki().hasVersioning(context)) {
                    context.getWiki().getVersioningStore().saveXWikiDocArchive(doc.getDocumentArchive(), false,
                            context);
                }
            } else {
                // Make sure the getArchive call has been made once
                // with a valid context
                try {
                    if (context.getWiki().hasVersioning(context)) {
                        doc.getDocumentArchive(context);
                    }
                } catch (XWikiException e) {
                    // this is a non critical error
                }
            }
        }

        // Verify if the document already exists
        Query query = session
                .createQuery("select xwikidoc.id from XWikiDocument as xwikidoc where xwikidoc.id = :id");
        query.setLong("id", doc.getId());
        if (query.uniqueResult() == null) {
            session.save(doc);
        } else {
            session.update(doc);
            // TODO: this is slower!! How can it be improved?
            // session.saveOrUpdate(doc);
        }

        // Remove objects planned for removal
        if (doc.getXObjectsToRemove().size() > 0) {
            for (BaseObject removedObject : doc.getXObjectsToRemove()) {
                deleteXWikiObject(removedObject, context, false);
            }
            doc.setXObjectsToRemove(new ArrayList<BaseObject>());
        }

        if (bclass != null) {
            bclass.setDocumentReference(doc.getDocumentReference());
            // Store this XWikiClass in the context so that we can use it in case of recursive usage of classes
            context.addBaseClass(bclass);
            // Update instances of the class, in case some properties changed their storage type

            // In case the current document has both a class and instances of that class, we have to take care
            // not to insert duplicate entities in the session
            Map<Integer, BaseObject> localClassObjects = new HashMap<Integer, BaseObject>();
            if (doc.hasElement(XWikiDocument.HAS_OBJECTS)
                    && doc.getXObjects(doc.getDocumentReference()) != null) {
                for (BaseObject obj : doc.getXObjects(doc.getDocumentReference())) {
                    if (obj != null) {
                        localClassObjects.put(obj.getId(), obj);
                    }
                }
            }
            for (PropertyClass prop : (Collection<PropertyClass>) bclass.getFieldList()) {
                // migrate values of list properties
                if (prop instanceof StaticListClass || prop instanceof DBListClass) {
                    ListClass lc = (ListClass) prop;
                    String[] classes = { DBStringListProperty.class.getName(),
                            StringListProperty.class.getName(), StringProperty.class.getName() }; // @see ListClass#newProperty()
                    for (int i = 0; i < classes.length; i++) {
                        String oldclass = classes[i];
                        if (!oldclass.equals(lc.newProperty().getClass().getName())) {
                            Query q = session
                                    .createQuery("select p from " + oldclass + " as p, BaseObject as o"
                                            + " where o.className=? and p.id=o.id and p.name=?")
                                    .setString(0, bclass.getName()).setString(1, lc.getName());
                            for (Iterator it = q.list().iterator(); it.hasNext();) {
                                BaseProperty lp = (BaseProperty) it.next();
                                BaseProperty lp1 = lc.newProperty();
                                lp1.setId(lp.getId());
                                lp1.setName(lp.getName());
                                if (lc.isMultiSelect()) {
                                    List tmp;
                                    if (lp.getValue() instanceof List) {
                                        tmp = (List) lp.getValue();
                                    } else {
                                        tmp = new ArrayList<String>(1);
                                        tmp.add(lp.getValue());
                                    }
                                    lp1.setValue(tmp);
                                } else {
                                    Object tmp = lp.getValue();
                                    if (tmp instanceof List && ((List) tmp).size() > 0) {
                                        tmp = ((List) tmp).get(0);
                                    }
                                    lp1.setValue(tmp);
                                }
                                session.delete(lp);
                                session.save(lp1);
                            }
                        }
                    }
                }
                // migrate values of list properties
                else if (prop instanceof NumberClass) {
                    NumberClass nc = (NumberClass) prop;
                    // @see NumberClass#newProperty()
                    String[] classes = { IntegerProperty.class.getName(), LongProperty.class.getName(),
                            FloatProperty.class.getName(), DoubleProperty.class.getName() };
                    for (int i = 0; i < classes.length; i++) {
                        String oldclass = classes[i];
                        if (!oldclass.equals(nc.newProperty().getClass().getName())) {
                            Query q = session
                                    .createQuery("select p from " + oldclass + " as p, BaseObject as o"
                                            + " where o.className=?" + "  and p.id=o.id and p.name=?")
                                    .setString(0, bclass.getName()).setString(1, nc.getName());
                            for (BaseProperty np : (List<BaseProperty>) q.list()) {
                                BaseProperty np1 = nc.newProperty();
                                np1.setId(np.getId());
                                np1.setName(np.getName());
                                if (nc.getNumberType().equals("integer")) {
                                    np1.setValue(Integer.valueOf(((Number) np.getValue()).intValue()));
                                } else if (nc.getNumberType().equals("float")) {
                                    np1.setValue(Float.valueOf(((Number) np.getValue()).floatValue()));
                                } else if (nc.getNumberType().equals("double")) {
                                    np1.setValue(Double.valueOf(((Number) np.getValue()).doubleValue()));
                                } else if (nc.getNumberType().equals("long")) {
                                    np1.setValue(Long.valueOf(((Number) np.getValue()).longValue()));
                                }
                                session.delete(np);
                                session.save(np1);
                            }
                        }
                    }
                } else {
                    // General migration of properties
                    Query q = session.createQuery("select p from BaseProperty as p, BaseObject as o"
                            + " where o.className=? and p.id=o.id and p.name=? and p.classType <> ?");
                    q.setString(0, bclass.getName());
                    q.setString(1, prop.getName());
                    q.setString(2, prop.newProperty().getClassType());
                    @SuppressWarnings("unchecked")
                    List<BaseProperty> brokenProperties = q.list();
                    for (BaseProperty brokenProperty : brokenProperties) {
                        BaseProperty newProperty = prop.fromString(brokenProperty.toText());
                        BaseObject localObject = localClassObjects.get(brokenProperty.getId());
                        if (localObject != null) {
                            BaseProperty currentProperty = (BaseProperty) localObject.get(prop.getName());
                            if (currentProperty != null) {
                                newProperty = prop.fromString(currentProperty.toText());
                                if (newProperty != null) {
                                    localObject.put(prop.getName(), newProperty);
                                } else {
                                    localObject.put(prop.getName(), brokenProperty);
                                }
                            }
                        }
                        if (newProperty == null) {
                            log.warn("Incompatible data migration when changing field {} of class {}",
                                    prop.getName(), prop.getClassName());
                            continue;
                        }
                        newProperty.setId(brokenProperty.getId());
                        session.delete(brokenProperty);
                        session.save(newProperty);
                    }
                }
            }
        }

        if (doc.hasElement(XWikiDocument.HAS_OBJECTS)) {
            // TODO: Delete all objects for which we don't have a name in the Map
            for (List<BaseObject> objects : doc.getXObjects().values()) {
                for (BaseObject obj : objects) {
                    if (obj != null) {
                        obj.setDocumentReference(doc.getDocumentReference());
                        /* If the object doesn't have a GUID, create it before saving */
                        if (StringUtils.isEmpty(obj.getGuid())) {
                            obj.setGuid(UUID.randomUUID().toString());
                        }
                        saveXWikiCollection(obj, context, false);
                    }
                }
            }
        }

        if (context.getWiki().hasBacklinks(context)) {
            saveLinks(doc, context, true);
        }

        if (bTransaction) {
            endTransaction(context, true);
        }

        doc.setNew(false);

        // We need to ensure that the saved document becomes the original document
        doc.setOriginalDocument(doc.clone());

    } catch (Exception e) {
        Object[] args = { this.defaultEntityReferenceSerializer.serialize(doc.getDocumentReference()) };
        throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                XWikiException.ERROR_XWIKI_STORE_HIBERNATE_SAVING_DOC, "Exception while saving document {0}", e,
                args);
    } finally {
        try {
            if (bTransaction) {
                endTransaction(context, false);
            }
        } catch (Exception e) {
        }

        // End monitoring timer
        if (monitor != null) {
            monitor.endTimer("hibernate");
        }
    }
}

From source file:com.xpn.xwiki.store.XWikiHibernateStore.java

License:Open Source License

public void deleteXWikiDoc(XWikiDocument doc, XWikiContext context) throws XWikiException {
    boolean bTransaction = true;
    MonitorPlugin monitor = Util.getMonitorPlugin(context);
    try {/* www  .  j a v  a 2s .c o  m*/
        // Start monitoring timer
        if (monitor != null) {
            monitor.startTimer("hibernate");
        }
        checkHibernate(context);
        SessionFactory sfactory = injectCustomMappingsInSessionFactory(doc, context);
        bTransaction = bTransaction && beginTransaction(sfactory, context);
        Session session = getSession(context);
        session.setFlushMode(FlushMode.COMMIT);

        if (doc.getStore() == null) {
            Object[] args = { doc.getDocumentReference() };
            throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                    XWikiException.ERROR_XWIKI_STORE_HIBERNATE_CANNOT_DELETE_UNLOADED_DOC,
                    "Impossible to delete document {0} if it is not loaded", null, args);
        }

        // Let's delete any attachment this document might have
        for (XWikiAttachment attachment : doc.getAttachmentList()) {
            context.getWiki().getAttachmentStore().deleteXWikiAttachment(attachment, false, context, false);
        }

        // deleting XWikiLinks
        if (context.getWiki().hasBacklinks(context)) {
            deleteLinks(doc.getId(), context, true);
        }

        // Find the list of classes for which we have an object
        // Remove properties planned for removal
        if (doc.getXObjectsToRemove().size() > 0) {
            for (BaseObject bobj : doc.getXObjectsToRemove()) {
                if (bobj != null) {
                    deleteXWikiObject(bobj, context, false);
                }
            }
            doc.setXObjectsToRemove(new ArrayList<BaseObject>());
        }
        for (List<BaseObject> objects : doc.getXObjects().values()) {
            for (BaseObject obj : objects) {
                if (obj != null) {
                    deleteXWikiObject(obj, context, false);
                }
            }
        }
        context.getWiki().getVersioningStore().deleteArchive(doc, false, context);

        session.delete(doc);

        // We need to ensure that the deleted document becomes the original document
        doc.setOriginalDocument(doc.clone());

        if (bTransaction) {
            endTransaction(context, true);
        }
    } catch (Exception e) {
        Object[] args = { doc.getDocumentReference() };
        throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                XWikiException.ERROR_XWIKI_STORE_HIBERNATE_DELETING_DOC,
                "Exception while deleting document {0}", e, args);
    } finally {
        try {
            if (bTransaction) {
                endTransaction(context, false);
            }
        } catch (Exception e) {
        }

        // End monitoring timer
        if (monitor != null) {
            monitor.endTimer("hibernate");
        }
    }
}

From source file:com.xpn.xwiki.store.XWikiHibernateStoreLocksTest.java

License:Open Source License

@Before
public void configure() throws Exception {
    // Needed because XHS has initializers which depend on Utils.
    Utils.setComponentManager(this.getComponentManager());

    final ObservationManager om = this.getComponentManager().getInstance(ObservationManager.class);
    this.observationManager = om;
    this.getMockery().checking(new Expectations() {
        {//w ww .  j  ava 2 s.  co m
            oneOf(om).addListener(with(new BaseMatcher<EventListener>() {
                @Override
                public void describeTo(final Description d) {
                    d.appendText("See if the listener is a deleteLocksOnLogoutListener.");
                }

                @Override
                public boolean matches(final Object o) {
                    return ((EventListener) o).getName().equals("deleteLocksOnLogoutListener");
                }
            }));
            will(new CustomAction("grab the EventListener so it can be called") {
                @Override
                public Object invoke(org.jmock.api.Invocation invocation) throws Exception {
                    listener[0] = (EventListener) invocation.getParameter(0);
                    return null;
                }
            });
        }
    });

    final HibernateSessionFactory xhsf = this.getComponentManager().getInstance(HibernateSessionFactory.class);
    final SessionFactory hsf = this.getMockery().mock(SessionFactory.class, "hsf");
    final Session session = this.getMockery().mock(org.hibernate.classic.Session.class);
    this.getMockery().checking(new Expectations() {
        {
            oneOf(xhsf).getSessionFactory();
            will(returnValue(hsf));
            oneOf(hsf).openSession();
            will(returnValue(session));
        }
    });

    final Query mockQuery = this.getMockery().mock(Query.class);
    final Transaction mockTransaction = this.getMockery().mock(Transaction.class);
    this.getMockery().checking(new Expectations() {
        {
            exactly(2).of(session).setFlushMode(FlushMode.COMMIT);
            oneOf(session).createQuery("delete from XWikiLock as lock where lock.userName=:userName");
            will(returnValue(mockQuery));
            oneOf(mockQuery).setString("userName", "XWiki.LoggerOutter");
            oneOf(mockQuery).executeUpdate();
            oneOf(session).beginTransaction();
            will(returnValue(mockTransaction));
            oneOf(mockTransaction).commit();
            oneOf(session).close();
        }
    });

    // setDatabase() is called for each transaction and that calls checkDatabase().
    final DataMigrationManager dmm = this.getComponentManager().getInstance(DataMigrationManager.class,
            "hibernate");
    this.getMockery().checking(new Expectations() {
        {
            oneOf(dmm).checkDatabase();
        }
    });

    // initialize() gets the xcontext from the execution then uses that
    // to get the path to the hibernate.cfg.xml
    this.getMockery().setImposteriser(ClassImposteriser.INSTANCE);
    final Execution exec = this.getComponentManager().getInstance(Execution.class);
    final ExecutionContext execCtx = this.getMockery().mock(ExecutionContext.class);
    final XWikiContext xc = new XWikiContext();
    xc.setWiki(this.getMockery().mock(XWiki.class));
    this.getMockery().checking(new Expectations() {
        {
            oneOf(exec).getContext();
            will(returnValue(execCtx));
            oneOf(execCtx).getProperty("xwikicontext");
            will(returnValue(xc));
            oneOf(xc.getWiki()).Param(with("xwiki.store.hibernate.path"), with(any(String.class)));
            will(returnValue("unimportant"));
        }
    });

    this.xhs = getComponentManager().getInstance(XWikiStoreInterface.class, "hibernate");
}

From source file:com.yahoo.elide.datastores.hibernate3.HibernateTransaction.java

License:Apache License

@Override
public void flush(RequestScope requestScope) {
    try {/* ww  w  .j  a  v a 2 s . c  o m*/
        deferredTasks.forEach(Runnable::run);
        deferredTasks.clear();
        if (session.getFlushMode() != FlushMode.COMMIT && session.getFlushMode() != FlushMode.NEVER) {
            session.flush();
        }
    } catch (HibernateException e) {
        throw new TransactionException(e);
    }
}

From source file:com.yahoo.elide.datastores.hibernate5.HibernateTransaction.java

License:Apache License

@Override
public void flush(RequestScope requestScope) {
    try {/*from  w w  w  .  j ava 2  s  .  co  m*/
        deferredTasks.forEach(Runnable::run);
        deferredTasks.clear();
        FlushMode flushMode = session.getFlushMode();
        if (flushMode != FlushMode.COMMIT && flushMode != FlushMode.MANUAL) {
            session.flush();
        }
    } catch (HibernateException e) {
        throw new TransactionException(e);
    }
}

From source file:de.escidoc.core.common.persistence.hibernate.CustomHibernateSessionFilter.java

License:Open Source License

@Override
protected Session getSession(final SessionFactory sessionFactory) {
    final Session session = super.getSession(sessionFactory);
    session.setFlushMode(FlushMode.COMMIT);
    return session;
}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

License:Open Source License

/**
 * @throws WGUnavailableException //  w  ww .  j a  v a  2s  .  c o m
 * @throws WGAPIException 
 * @see de.innovationgate.webgate.api.WGDatabaseCore#openSession(String,
 *      String)
 */
public WGUserAccess openSession(AuthenticationSession authSession, Object pwd, boolean master)
        throws WGAPIException {

    try {

        // Hibernate login
        Session session = _sessionBuilder.openSession();
        // Connection conn = session.connection();
        // conn.setAutoCommit(true); //Problematic with DBCP?
        session.setFlushMode(FlushMode.COMMIT);
        if (_saveIsolationActive) {
            session.setDefaultReadOnly(true);
        }
        getSessionStatus().setSession(session);

        if (!session.isOpen()) {
            throw new WGUnavailableException(_db, "Unable to connect to hibernate session");
        }

        // special handling if loadbalancing is enabled
        if (hasFeature(WGDatabase.FEATURE_LOADBALANCE)) {

            // set all connections to readonly except master sessions and if
            // update is in progress
            final boolean readOnly = (master ? false : !isUpdateInProgress(authSession.getDistinguishedName()));

            try {
                session.doWork(new Work() {

                    public void execute(Connection connection) throws SQLException {
                        connection.setReadOnly(readOnly);
                    }
                });
            } catch (HibernateException e) {
                throw new WGBackendException("Unable to set readonly flag on connection.", e);
            }
        }

        if (getTransactionMode() != WGSessionContext.TRANSACTION_MODE_MANUAL) {
            session.beginTransaction();
        }

        if (master) {
            // Master login always has manager access
            return new WGUserAccess(WGDatabase.MASTER_USERNAME, WGDatabase.ACCESSLEVEL_MANAGER);
        }

        // Determine access
        WGUserDetails userDetails;
        try {
            userDetails = _db.defaultBuildUserDetails(authSession);
        } catch (WGBackendException e) {
            try {
                closeSession();
            } catch (WGBackendException e1) {
                WGFactory.getLogger().error(e1);
            }
            throw e;
        }
        if (userDetails.getAccessLevel() <= WGDatabase.ACCESSLEVEL_NOACCESS) {
            try {
                closeSession();
            } catch (WGBackendException e) {
                WGFactory.getLogger().error(e);
            }
        }

        return userDetails;

    } catch (HibernateException e) {
        try {
            closeSession();
        } catch (WGBackendException e1) {
            WGFactory.getLogger().error(e1);
        }
        throw new WGUnavailableException(_db, "Error opening hibernate session", e);
    }

}

From source file:de.iteratec.iteraplan.persistence.elasticeam.model.diff.EMFModelWriter.java

License:Open Source License

/**
 * In case of persisting changes to the iteraplan db, a new transaction should be created before applying any changes
 *//*from  ww w  .  j a  va 2s  .c o m*/
private void beginTransation() {
    if (session == null && transaction == null) {
        //TODO remove context.getBean(...)
        SessionFactory sessionFactory = (SessionFactory) DefaultSpringApplicationContext
                .getSpringApplicationContext().getBean("sessionFactory");
        this.session = sessionFactory.getCurrentSession();
        session.setFlushMode(FlushMode.COMMIT);
        if (!session.isOpen()) {
            session = sessionFactory.openSession();
        }
        this.transaction = this.session.beginTransaction();
    } else {
        throw new IteraplanTechnicalException(IteraplanErrorMessages.GENERAL_TECHNICAL_ERROR,
                "Cannot begin new transaction while there is another active one alive");
    }

}

From source file:edu.duke.cabig.c3pr.service.impl.ScheduledNotificationServiceImpl.java

License:BSD License

public synchronized Integer saveScheduledNotification(PlannedNotification plannedNotification,
        String composedMessage, List<StudyOrganization> ssList, String eventId) {
    log.debug(this.getClass().getName() + ": Entering saveScheduledNotification()");
    ScheduledNotification scheduledNotification = null;

    //Creating a new session to save the scheduled notifications to avoid conflicts with the
    //CurrentSession (whose flush initiated this interceptor call in the first place).
    SessionFactory sessionFactory = (SessionFactory) applicationContext.getBean("notificationSessionFactory");
    Session session = sessionFactory.openSession();
    session.setFlushMode(FlushMode.COMMIT);
    try {/*w  ww.j  a  va  2 s. c  om*/
        session.update(plannedNotification);

        // for updating master subject notification event, planned notification is not associated to a healthcare site
        if (plannedNotification.getHealthcareSite() != null) {
            session.update(plannedNotification.getHealthcareSite());
        }
        //generating and saving the ScheduledNotification
        scheduledNotification = addScheduledNotification(plannedNotification, composedMessage, ssList, eventId);
        session.saveOrUpdate(plannedNotification);
        session.flush();
    } catch (Exception e) {
        log.error(e.getMessage());
    } finally {
        session.close();
    }
    log.debug(this.getClass().getName() + ": Exiting saveScheduledNotification()");
    if (scheduledNotification != null) {
        return scheduledNotification.getId();
    } else {
        log.error(this.getClass().getName()
                + "saveScheduledNotification(): ScheduledNotification was not saved successfully");
        return 0;
    }
}

From source file:edu.nps.moves.mmowgli.hibernate.HSess.java

License:Open Source License

public static void init() {
    if (get() != null) {
        dumpPreviousCallerTrace();/*from  w w w.j a va 2 s  .  c  o m*/
        repair(); // closes after dumping stack in sys out
    }

    Session s = VHib.openSession();
    s.setFlushMode(FlushMode.COMMIT);
    s.beginTransaction();
    s.getTransaction().setTimeout(HIBERNATE_TRANSACTION_TIMEOUT_IN_SECONDS);

    MSysOut.println(HIBERNATE_LOGS, "HSess.open() of sess " + s.hashCode());
    set(s);
}