Example usage for org.hibernate Session setFlushMode

List of usage examples for org.hibernate Session setFlushMode

Introduction

In this page you can find the example usage for org.hibernate Session setFlushMode.

Prototype

@Deprecated
void setFlushMode(FlushMode flushMode);

Source Link

Document

Set the flush mode for this session.

Usage

From source file:griffon.plugins.hibernate3.internal.HibernateSchemaHelper.java

License:Apache License

public void updateDatabaseSchema() {
    if (LOG.isDebugEnabled()) {
        LOG.info("Updating database schema for Hibernate SessionFactory");
    }// w  w w  .  ja  va 2s .c  o m
    execute(new HibernateCallback() {
        public void doInSession(Session session) throws SQLException {
            session.setFlushMode(FlushMode.AUTO);
            Connection con = session.connection();
            Dialect dialect = Dialect.getDialect(getConfiguration().getProperties());
            DatabaseMetadata metadata = new DatabaseMetadata(con, dialect);
            String[] sql = getConfiguration().generateSchemaUpdateScript(dialect, metadata);
            executeSchemaScript(con, sql);
        }
    });
}

From source file:lucee.runtime.orm.hibernate.HibernateORMSession.java

License:Open Source License

void createSession(SessionFactory factory, DatasourceConnection dc) {
    Session session;
    _sessions.put(KeyImpl.init(dc.getDatasource().getName()),
            session = factory.openSession(dc.getConnection()));
    session.setFlushMode(FlushMode.MANUAL);
}

From source file:nl.strohalm.cyclos.struts.CyclosRequestProcessor.java

License:Open Source License

private void openReadOnlyConnection(final HttpServletRequest request) {
    if (noTransaction(request)) {
        return;//w w  w .  j av a  2  s.  c  o  m
    }
    logDebug(request, "Opening read-only transaction for include");

    final Connection connection = (Connection) TransactionSynchronizationManager
            .getResource(connectionProvider);

    final SessionHolder holder = (SessionHolder) TransactionSynchronizationManager.getResource(sessionFactory);
    final Session session = holder.getSession();
    session.setFlushMode(FlushMode.MANUAL);
    session.setDefaultReadOnly(true);
    session.reconnect(connection);

    TransactionSynchronizationManager.setCurrentTransactionReadOnly(true);
}

From source file:org.agnitas.web.filter.OpenSessionInViewFilter.java

License:Open Source License

protected org.hibernate.Session getSession(org.hibernate.SessionFactory sessionFactory)
        throws org.springframework.dao.DataAccessResourceFailureException {
    Session aSession = org.springframework.orm.hibernate3.SessionFactoryUtils.getSession(sessionFactory, true);
    aSession.setFlushMode(FlushMode.ALWAYS);

    return aSession;
}

From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java

License:Open Source License

@SuppressWarnings("unchecked")
public List<WorkflowInstance> cancelWorkflows(final List<String> workflowIds) {
    return (List<WorkflowInstance>) jbpmTemplate.execute(new JbpmCallback() {
        public Object doInJbpm(JbpmContext context) {
            // Bypass the cache making sure not to flush it
            Session session = context.getSession();
            CacheMode cacheMode = session.getCacheMode();
            FlushMode flushMode = session.getFlushMode();
            session.setCacheMode(CacheMode.GET);
            session.setFlushMode(FlushMode.MANUAL);
            try {
                List<WorkflowInstance> workflowInstances = new ArrayList<WorkflowInstance>(workflowIds.size());
                Map<String, ProcessInstance> processInstances = new HashMap<String, ProcessInstance>(
                        workflowIds.size() * 2);
                GraphSession graphSession = context.getGraphSession();

                // retrieve and cancel process instances
                for (String workflowId : workflowIds) {
                    try {
                        ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
                        processInstance.getContextInstance().setVariable("cancelled", true);
                        processInstance.end();
                        processInstances.put(workflowId, processInstance);
                    } catch (JbpmException e) {
                        String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId);
                        throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e));
                    }//from ww  w . ja  v a2 s . c  o m
                }

                // Flush at the end of the batch
                session.flush();

                for (String workflowId : workflowIds) {
                    try {
                        // retrieve process instance
                        ProcessInstance processInstance = processInstances.get(workflowId);
                        // TODO: Determine if this is the most appropriate way to cancel workflow...
                        //       It might be useful to record point at which it was cancelled etc
                        try {
                            workflowInstances.add(createWorkflowInstance(processInstance));
                        } catch (Exception ex) {
                            logger.warn("Unable to load workflow instance: '" + processInstance
                                    + "' due to exception.", ex);
                        }

                        // delete the process instance
                        graphSession.deleteProcessInstance(processInstance, true, true);
                    } catch (JbpmException e) {
                        String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId);
                        throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e));
                    }
                }

                // Flush at the end of the batch
                session.flush();
                return workflowInstances;
            } finally {
                session.setCacheMode(cacheMode);
                session.setFlushMode(flushMode);
            }
        }
    });
}

From source file:org.apereo.portal.events.handlers.db.JpaPortalEventStore.java

License:Apache License

@Override
@RawEventsTransactional/*w  w  w. j  a v a  2s .co  m*/
public boolean aggregatePortalEvents(DateTime startTime, DateTime endTime, int maxEvents,
        Function<PortalEvent, Boolean> handler) {
    final Session session = this.getEntityManager().unwrap(Session.class);
    session.setFlushMode(FlushMode.COMMIT);
    final org.hibernate.Query query = session.createQuery(this.selectUnaggregatedQuery);
    query.setParameter(this.startTimeParameter.getName(), startTime);
    query.setParameter(this.endTimeParameter.getName(), endTime);
    if (maxEvents > 0) {
        query.setMaxResults(maxEvents);
    }

    int resultCount = 0;
    for (final ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY); results.next();) {
        final PersistentPortalEvent persistentPortalEvent = (PersistentPortalEvent) results.get(0);
        final PortalEvent portalEvent;
        try {
            portalEvent = this.toPortalEvent(persistentPortalEvent.getEventData(),
                    persistentPortalEvent.getEventType());
        } catch (RuntimeException e) {
            this.logger.warn("Failed to convert PersistentPortalEvent to PortalEvent: " + persistentPortalEvent,
                    e);

            //Mark the event as error and store the mark to prevent trying to reprocess the broken event data
            persistentPortalEvent.setErrorAggregating(true);
            session.persist(persistentPortalEvent);

            continue;
        }

        try {

            final Boolean eventHandled = handler.apply(portalEvent);
            if (!eventHandled) {
                this.logger.debug("Aggregation stop requested before processing event {}", portalEvent);
                return false;
            }

            //Mark the event as aggregated and store the mark
            persistentPortalEvent.setAggregated(true);
            session.persist(persistentPortalEvent);

            //periodic flush and clear of session to manage memory demands
            if (++resultCount % this.flushPeriod == 0) {
                this.logger.debug("Aggregated {} events, flush and clear {} EntityManager.", resultCount,
                        PERSISTENCE_UNIT_NAME);
                session.flush();
                session.clear();
            }

        } catch (Exception e) {
            this.logger.warn("Failed to aggregate portal event: " + persistentPortalEvent, e);
            //mark the event as erred and move on. This will not be picked up by processing again
            persistentPortalEvent.setErrorAggregating(true);
            session.persist(persistentPortalEvent);
        }
    }

    return true;
}

From source file:org.archiviststoolkit.mydomain.DomainAccessObjectImpl.java

License:Open Source License

private Collection findAllCommon(Session session, LockMode lockmode, String... sortFields)
        throws LookupException {
    List completeList;//from www  . ja  v  a  2  s  .c om
    Transaction tx = null;
    try {
        session.setFlushMode(FlushMode.MANUAL);

        tx = session.beginTransaction();
        Criteria criteria = session.createCriteria(getPersistentClass());
        for (String field : sortFields) {
            criteria.addOrder(Order.asc(field));
        }
        //         System.out.println("Find all: " + persistentClass.getName());
        //         if (lockmode != null) {
        //            criteria.setLockMode(lockmode);
        //            System.out.println("Setting lock mode: " + lockmode);
        //         }
        completeList = criteria.list();
        //         if (lockmode != null && lockmode == LockMode.READ) {
        //            System.out.println("Rollback because read only");
        //            session.connection().rollback();
        //         } else {
        //            session.flush();
        //            session.connection().commit();
        //         }
        //         session.flush();
        //         session.connection().commit();
        tx.commit();

    } catch (RuntimeException ex) {
        try {
            tx.rollback();
        } catch (HibernateException e) {
            //todo log error
        }
        throw new LookupException("failed to find all", ex);
    } finally {
        if (session != longSession) {
            session.close();
        }
    }

    return (completeList);
}

From source file:org.archiviststoolkit.mydomain.DomainAccessObjectImpl.java

License:Open Source License

/**
 * Return a collection which conforms to the named query.
 *
 * @param queryName the name of the query
 * @return the collection provided by the query
 * @throws LookupException fails if we cannot execute the query
 *///from  w ww .j  av a 2s  .  c  o  m

public final Collection findByNamedQuery(final String queryName) throws LookupException {
    List filteredList;

    Session session = SessionFactory.getInstance().openSession(getPersistentClass());

    try {
        //session.connection().setReadOnly(true);
        session.setFlushMode(FlushMode.MANUAL);

        Query query = session.getNamedQuery(queryName);

        filteredList = query.list();
        //session.flush();  Don't flush here this causes all sorts of hell on Oracle
        session.connection().commit();

    } catch (HibernateException hibernateException) {
        throw new LookupException("failed to findbynamedquery", hibernateException);
    } catch (SQLException sqlException) {
        throw new LookupException("failed to findbynamedquery", sqlException);
    }

    SessionFactory.getInstance().closeSession(session);

    return (filteredList);
}

From source file:org.babyfish.test.hibernate.model.setandref.DbTest.java

License:Open Source License

private static void testMergeDepartment(final boolean loadBeforeMerge) {
    Object[] arr = load();//  w  w w  .  j av a  2s  .  co  m
    Action<Session> handler;
    final Department detachedDepartment1 = (Department) arr[0];
    final Department detachedDepartment2 = (Department) arr[1];
    final Employee detachedEmployee = (Employee) arr[2];

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {

            session.setFlushMode(FlushMode.COMMIT);

            Department department1 = (Department) session.get(Department.class, 1L);
            Department department2 = (Department) session.get(Department.class, 2L);
            Employee employee = (Employee) session.get(Employee.class, 1L);

            detachedDepartment1.getEmployees().add(detachedEmployee);

            if (loadBeforeMerge) {
                assertCollection(department1.getEmployees());
                assertCollection(department2.getEmployees());
                assertReference(employee.getDepartment());
            }

            session.merge(detachedDepartment1);

            assertCollection(department1.getEmployees(), employee);
            assertCollection(department2.getEmployees());
            assertReference(employee.getDepartment(), department1);
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {

            session.setFlushMode(FlushMode.COMMIT);

            Department department1 = (Department) session.get(Department.class, 1L);
            Department department2 = (Department) session.get(Department.class, 2L);
            Employee employee = (Employee) session.get(Employee.class, 1L);

            detachedDepartment2.getEmployees().add(detachedEmployee);

            if (loadBeforeMerge) {
                assertCollection(department1.getEmployees(), employee);
                assertCollection(department2.getEmployees());
                assertReference(employee.getDepartment(), department1);
            }

            session.merge(detachedDepartment2);

            assertCollection(department1.getEmployees());
            assertCollection(department2.getEmployees(), employee);
            assertReference(employee.getDepartment(), department2);
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {

            session.setFlushMode(FlushMode.COMMIT);

            Department department1 = (Department) session.get(Department.class, 1L);
            Department department2 = (Department) session.get(Department.class, 2L);
            Employee employee = (Employee) session.get(Employee.class, 1L);

            detachedDepartment2.getEmployees().clear();

            if (loadBeforeMerge) {
                assertCollection(department1.getEmployees());
                assertCollection(department2.getEmployees(), employee);
                assertReference(employee.getDepartment(), department2);
            }

            session.merge(detachedDepartment2);

            assertCollection(department1.getEmployees());
            assertCollection(department2.getEmployees());
            assertReference(employee.getDepartment());
        }
    };
    execute(handler);
}

From source file:org.babyfish.test.hibernate.model.setandset.DbTest.java

License:Open Source License

private void testMergeStudent(final boolean loadBeforeMerge) {
    Object[] arr = load();/*from  www  . j  av a 2  s . com*/
    final Student detachedStudent1 = (Student) arr[0];
    final Student detachedStudent2 = (Student) arr[1];
    final Course detachedCourse1 = (Course) arr[2];
    final Course detachedCourse2 = (Course) arr[3];

    Action<Session> handler;

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {
            session.setFlushMode(FlushMode.COMMIT);

            Student student1 = (Student) session.get(Student.class, 1L);
            Student student2 = (Student) session.get(Student.class, 2L);
            Course course1 = (Course) session.get(Course.class, 1L);
            Course course2 = (Course) session.get(Course.class, 2L);
            detachedStudent1.getCourses().add(detachedCourse1);

            if (loadBeforeMerge) {
                assertCollection(student1.getCourses());
                assertCollection(student2.getCourses());
                assertCollection(course1.getStudents());
                assertCollection(course2.getStudents());
            }

            session.merge(detachedStudent1);

            assertCollection(student1.getCourses(), course1);
            assertCollection(student2.getCourses());
            assertCollection(course1.getStudents(), student1);
            assertCollection(course2.getStudents());
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {
            session.setFlushMode(FlushMode.COMMIT);

            Student student1 = (Student) session.get(Student.class, 1L);
            Student student2 = (Student) session.get(Student.class, 2L);
            Course course1 = (Course) session.get(Course.class, 1L);
            Course course2 = (Course) session.get(Course.class, 2L);
            detachedStudent1.getCourses().add(detachedCourse2);

            if (loadBeforeMerge) {
                assertCollection(student1.getCourses(), course1);
                assertCollection(student2.getCourses());
                assertCollection(course1.getStudents(), student1);
                assertCollection(course2.getStudents());
            }

            session.merge(detachedStudent1);

            assertCollection(student1.getCourses(), course1, course2);
            assertCollection(student2.getCourses());
            assertCollection(course1.getStudents(), student1);
            assertCollection(course2.getStudents(), student1);
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {
            session.setFlushMode(FlushMode.COMMIT);

            Student student1 = (Student) session.get(Student.class, 1L);
            Student student2 = (Student) session.get(Student.class, 2L);
            Course course1 = (Course) session.get(Course.class, 1L);
            Course course2 = (Course) session.get(Course.class, 2L);
            detachedStudent2.getCourses().add(detachedCourse1);

            if (loadBeforeMerge) {
                assertCollection(student1.getCourses(), course1, course2);
                assertCollection(student2.getCourses());
                assertCollection(course1.getStudents(), student1);
                assertCollection(course2.getStudents(), student1);
            }

            session.merge(detachedStudent2);

            assertCollection(student1.getCourses(), course1, course2);
            assertCollection(student2.getCourses(), course1);
            assertCollection(course1.getStudents(), student1, student2);
            assertCollection(course2.getStudents(), student1);
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {
            session.setFlushMode(FlushMode.COMMIT);

            Student student1 = (Student) session.get(Student.class, 1L);
            Student student2 = (Student) session.get(Student.class, 2L);
            Course course1 = (Course) session.get(Course.class, 1L);
            Course course2 = (Course) session.get(Course.class, 2L);
            detachedStudent2.getCourses().add(detachedCourse2);

            if (loadBeforeMerge) {
                assertCollection(student1.getCourses(), course1, course2);
                assertCollection(student2.getCourses(), course1);
                assertCollection(course1.getStudents(), student1, student2);
                assertCollection(course2.getStudents(), student1);
            }

            session.merge(detachedStudent2);

            assertCollection(student1.getCourses(), course1, course2);
            assertCollection(student2.getCourses(), course1, course2);
            assertCollection(course1.getStudents(), student1, student2);
            assertCollection(course2.getStudents(), student1, student2);
        }
    };
    execute(handler);

    handler = new Action<Session>() {
        @Override
        public void run(Session session) {
            session.setFlushMode(FlushMode.COMMIT);

            Student student1 = (Student) session.get(Student.class, 1L);
            Student student2 = (Student) session.get(Student.class, 2L);
            Course course1 = (Course) session.get(Course.class, 1L);
            Course course2 = (Course) session.get(Course.class, 2L);
            detachedStudent1.getCourses().remove(detachedCourse1);

            if (loadBeforeMerge) {
                assertCollection(student1.getCourses(), course1, course2);
                assertCollection(student2.getCourses(), course1, course2);
                assertCollection(course1.getStudents(), student1, student2);
                assertCollection(course2.getStudents(), student1, student2);
            }

            session.merge(detachedStudent1);

            assertCollection(student1.getCourses(), course2);
            assertCollection(student2.getCourses(), course1, course2);
            assertCollection(course1.getStudents(), student2);
            assertCollection(course2.getStudents(), student1, student2);
        }
    };
    execute(handler);
}