Example usage for org.hibernate FlushMode MANUAL

List of usage examples for org.hibernate FlushMode MANUAL

Introduction

In this page you can find the example usage for org.hibernate FlushMode MANUAL.

Prototype

FlushMode MANUAL

To view the source code for org.hibernate FlushMode MANUAL.

Click Source Link

Document

The Session is only ever flushed when Session#flush is explicitly called by the application.

Usage

From source file:ome.server.utests.handlers.SessionHandlerMockHibernateTest.java

License:Open Source License

private void setsFlushMode(FlushMode... modes) {
    // done by handler see ticket:557
    if (modes.length == 0) {
        mockSession.expects(atLeastOnce()).method("setFlushMode").with(eq(FlushMode.COMMIT));
        mockSession.expects(atLeastOnce()).method("setFlushMode").with(eq(FlushMode.MANUAL));
    } else {//  ww w. j  a v  a 2 s .c om
        for (FlushMode mode : modes) {
            mockSession.expects(atLeastOnce()).method("setFlushMode").with(eq(mode));
        }
    }
}

From source file:ome.services.db.SearchTest.java

License:Open Source License

@Test
public void testReindexingExperimenter() throws Exception {
    HibernateTest ht = new HibernateTest();
    ht.setupSession();/*from  w w w .jav  a 2 s  .co m*/

    FullTextSession fullTextSession = Search.getFullTextSession(ht.s);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    Transaction transaction = fullTextSession.beginTransaction();
    // Scrollable results will avoid loading too many objects in memory
    ScrollableResults results = fullTextSession.createCriteria(Experimenter.class)
            .scroll(ScrollMode.FORWARD_ONLY);
    int index = 0;
    int batchSize = 10;
    while (results.next()) {
        index++;
        fullTextSession.index(results.get(0)); // index each element
        if (index % batchSize == 0) {
            ht.s.clear(); // clear every batchSize since the queue is
            // processed
        }
    }
    ht.closeSession();

    ht.setupSession();
    List<Experimenter> list = query(ht, "root", Experimenter.class, "omeName");
    assertTrue(list.toString(), list.size() == 1);
    ht.closeSession();
}

From source file:ome.services.fulltext.FullTextIndexer.java

License:Open Source License

/**
 * Runs {@link #doIndexing(FullTextSession)} within a Lucene transaction.
 * {@link #doIndexing(FullTextSession)} will also be called
 *//*from   w  w  w .jav  a2 s  . c  o  m*/
@Transactional(readOnly = false, isolation = Isolation.SERIALIZABLE)
public Object doWork(Session session, ServiceFactory sf) {
    int count = 1;
    int perbatch = 0;
    long start = System.currentTimeMillis();
    do {

        // ticket:1254 -
        // The following is non-portable and can later be refactored
        // for a more general solution.
        getSqlAction().deferConstraints();

        // s.execute("set statement_timeout=10000");
        // The Postgresql Driver does not currently support the
        // "timeout" value on @Transactional and so if a query timeout
        // is required, then this must be set.

        FullTextSession fullTextSession = Search.getFullTextSession(session);
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        perbatch = doIndexingWithWorldRead(sf, fullTextSession);
        count++;
    } while (doMore(count));
    if (perbatch > 0) {
        log.info(String.format("INDEXED %s objects in %s batch(es) [%s ms.]", perbatch, (count - 1),
                (System.currentTimeMillis() - start)));
    } else {
        log.debug("No objects indexed");
    }
    return null;
}

From source file:ome.tools.hibernate.SessionStatus.java

License:Open Source License

private Object doStateful(final MethodInvocation invocation) throws Throwable {
    Object result = null;/* w ww  .  ja  va2s.  c o m*/
    SessionStatus status = null;
    try {
        // Need to open even if "closing" because the service may need
        // to perform cleanup in its close() method.
        status = newOrRestoredSession(invocation);
        status.session.setFlushMode(FlushMode.COMMIT);
        // changing MANUAL to COMMIT for ticket:557. the appserver
        // won't allow us to commit here anyway, and setting to COMMIT
        // prevents Spring from automatically re-writing the flushMode
        // as AUTO
        result = invocation.proceed();
        return result;
    } finally {
        // TODO do we need to check for disconnected or closed session here?
        // The newOrRestoredSession method does not attempt to close the
        // session before throwing the dirty session exception. We must do
        // it here.
        try {
            if (isCloseSession(invocation)) {
                ctx.publishMessage(new RegisterServiceCleanupMessage(this, invocation.getThis()) {
                    @Override
                    public void close() {
                        SessionStatus status = removeStatus(invocation);
                        status.session.disconnect();
                        status.session.close();
                    }

                });
            } else {
                if (status != null) {
                    // Guarantee that no one has changed the FlushMode
                    status.session.setFlushMode(FlushMode.MANUAL);
                    status.session.disconnect();
                    status.calls--;
                }
            }
        } catch (Exception e) {

            log.error("Error while closing/disconnecting session.", e);

        } finally {

            try {
                resetThreadSession();
            } catch (Exception e) {
                log.error("Could not cleanup thread session.", e);
                throw e;
            }

        }

    }
}

From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java

License:Open Source License

@SuppressWarnings("unchecked")
public List<WorkflowInstance> cancelWorkflows(final List<String> workflowIds) {
    return (List<WorkflowInstance>) jbpmTemplate.execute(new JbpmCallback() {
        public Object doInJbpm(JbpmContext context) {
            // Bypass the cache making sure not to flush it
            Session session = context.getSession();
            CacheMode cacheMode = session.getCacheMode();
            FlushMode flushMode = session.getFlushMode();
            session.setCacheMode(CacheMode.GET);
            session.setFlushMode(FlushMode.MANUAL);
            try {
                List<WorkflowInstance> workflowInstances = new ArrayList<WorkflowInstance>(workflowIds.size());
                Map<String, ProcessInstance> processInstances = new HashMap<String, ProcessInstance>(
                        workflowIds.size() * 2);
                GraphSession graphSession = context.getGraphSession();

                // retrieve and cancel process instances
                for (String workflowId : workflowIds) {
                    try {
                        ProcessInstance processInstance = getProcessInstance(graphSession, workflowId);
                        processInstance.getContextInstance().setVariable("cancelled", true);
                        processInstance.end();
                        processInstances.put(workflowId, processInstance);
                    } catch (JbpmException e) {
                        String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId);
                        throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e));
                    }/*  w  w w.j a  v a  2s  . c om*/
                }

                // Flush at the end of the batch
                session.flush();

                for (String workflowId : workflowIds) {
                    try {
                        // retrieve process instance
                        ProcessInstance processInstance = processInstances.get(workflowId);
                        // TODO: Determine if this is the most appropriate way to cancel workflow...
                        //       It might be useful to record point at which it was cancelled etc
                        try {
                            workflowInstances.add(createWorkflowInstance(processInstance));
                        } catch (Exception ex) {
                            logger.warn("Unable to load workflow instance: '" + processInstance
                                    + "' due to exception.", ex);
                        }

                        // delete the process instance
                        graphSession.deleteProcessInstance(processInstance, true, true);
                    } catch (JbpmException e) {
                        String msg = messageService.getMessage(ERR_CANCEL_WORKFLOW, workflowId);
                        throw new WorkflowException(msg, JbpmAccessor.convertJbpmException(e));
                    }
                }

                // Flush at the end of the batch
                session.flush();
                return workflowInstances;
            } finally {
                session.setCacheMode(cacheMode);
                session.setFlushMode(flushMode);
            }
        }
    });
}

From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java

License:Open Source License

@SuppressWarnings({ "unchecked", "cast" })
private void cacheVariablesNoBatch(Session session, List<Long> contextInstanceIds,
        Map<Long, TokenVariableMap> variablesCache) {
    Query query = session.getNamedQuery("org.alfresco.repo.workflow.cacheInstanceVariables");
    query.setParameterList("ids", contextInstanceIds);
    query.setCacheMode(CacheMode.PUT);/*from   w w w .j  a  va2 s.  c  o m*/
    query.setFlushMode(FlushMode.MANUAL);
    query.setCacheable(true);

    List<TokenVariableMap> results = (List<TokenVariableMap>) query.list();
    for (TokenVariableMap tokenVariableMap : results) {
        variablesCache.put(tokenVariableMap.getToken().getId(), tokenVariableMap);
    }
}

From source file:org.archiviststoolkit.mydomain.DomainAccessObjectImpl.java

License:Open Source License

private Collection findAllCommon(Session session, LockMode lockmode, String... sortFields)
        throws LookupException {
    List completeList;/*from w w w  .  j a va  2 s. c  o  m*/
    Transaction tx = null;
    try {
        session.setFlushMode(FlushMode.MANUAL);

        tx = session.beginTransaction();
        Criteria criteria = session.createCriteria(getPersistentClass());
        for (String field : sortFields) {
            criteria.addOrder(Order.asc(field));
        }
        //         System.out.println("Find all: " + persistentClass.getName());
        //         if (lockmode != null) {
        //            criteria.setLockMode(lockmode);
        //            System.out.println("Setting lock mode: " + lockmode);
        //         }
        completeList = criteria.list();
        //         if (lockmode != null && lockmode == LockMode.READ) {
        //            System.out.println("Rollback because read only");
        //            session.connection().rollback();
        //         } else {
        //            session.flush();
        //            session.connection().commit();
        //         }
        //         session.flush();
        //         session.connection().commit();
        tx.commit();

    } catch (RuntimeException ex) {
        try {
            tx.rollback();
        } catch (HibernateException e) {
            //todo log error
        }
        throw new LookupException("failed to find all", ex);
    } finally {
        if (session != longSession) {
            session.close();
        }
    }

    return (completeList);
}

From source file:org.archiviststoolkit.mydomain.DomainAccessObjectImpl.java

License:Open Source License

/**
 * Return a collection which conforms to the named query.
 *
 * @param queryName the name of the query
 * @return the collection provided by the query
 * @throws LookupException fails if we cannot execute the query
 *//*from   ww w.j  ava  2  s  .  c o m*/

public final Collection findByNamedQuery(final String queryName) throws LookupException {
    List filteredList;

    Session session = SessionFactory.getInstance().openSession(getPersistentClass());

    try {
        //session.connection().setReadOnly(true);
        session.setFlushMode(FlushMode.MANUAL);

        Query query = session.getNamedQuery(queryName);

        filteredList = query.list();
        //session.flush();  Don't flush here this causes all sorts of hell on Oracle
        session.connection().commit();

    } catch (HibernateException hibernateException) {
        throw new LookupException("failed to findbynamedquery", hibernateException);
    } catch (SQLException sqlException) {
        throw new LookupException("failed to findbynamedquery", sqlException);
    }

    SessionFactory.getInstance().closeSession(session);

    return (filteredList);
}

From source file:org.babyfish.hibernate.collection.spi.persistence.MapBasePersistence.java

License:Open Source License

@SuppressWarnings("unchecked")
public Ref<V> visionallyRead(K key) {

    Arguments.mustNotBeNull("key", key);
    String role = this.getNonNullRole();

    SessionImplementor session = this.getSession();
    if (session == null || !session.isOpen() || !session.isConnected()) {
        return null;
    }//  www. j a va 2 s .  c  o m

    SessionFactoryImplementor sessionFactory = session.getFactory();
    QueryableCollection collection = (QueryableCollection) sessionFactory.getCollectionPersister(role);
    EntityPersister elementPersister = collection.getElementPersister();

    String[] indexNames = collection.getIndexColumnNames();
    if (indexNames == null || indexNames[0] == null) {
        indexNames = collection.getIndexFormulas();
    }
    CriteriaImpl criteria = new CriteriaImpl(elementPersister.getEntityName(), session);

    //ownerKey, not ownerId
    Object ownerKey = collection.getCollectionType().getKeyOfOwner(this.getOwner(), session);
    //In Hibernate, isOneToMany means that there is no middle table
    //The @OneToMany of JPA with middle table is consider as many-to-many in Hibernate
    if (sessionFactory.getCollectionPersister(role).isOneToMany()) {
        String[] joinOwnerColumns = collection.getKeyColumnNames();
        StringBuilder sqlBuilder = new StringBuilder();
        for (int i = 0; i < joinOwnerColumns.length; i++) {
            if (i != 0) {
                sqlBuilder.append(" and ");
            }
            sqlBuilder.append("{alias}.").append(joinOwnerColumns[i]).append(" = ?");
        }
        criteria.add(Restrictions.sqlRestriction(sqlBuilder.toString(), ownerKey, collection.getKeyType()));

        sqlBuilder = new StringBuilder();
        for (int i = 0; i < indexNames.length; i++) {
            if (i != 0) {
                sqlBuilder.append(" and ");
            }
            sqlBuilder.append("{alias}.").append(indexNames[i]).append(" = ?");
        }
        criteria.add(Restrictions.sqlRestriction(sqlBuilder.toString(), key, collection.getIndexType()));
    } else {
        String lhsPropertyName = collection.getCollectionType().getLHSPropertyName();
        int lhsPropertyIndex = -1;
        if (lhsPropertyName != null) {
            String[] propertyNames = collection.getOwnerEntityPersister().getPropertyNames();
            for (int i = propertyNames.length - 1; i >= 0; i--) {
                if (propertyNames[i].equals(lhsPropertyName)) {
                    lhsPropertyIndex = i;
                    break;
                }
            }
        }
        String[] lhsColumnNames = JoinHelper.getLHSColumnNames(collection.getCollectionType(), lhsPropertyIndex,
                (OuterJoinLoadable) elementPersister, sessionFactory);
        String[] joinElementColumnNames = collection.getElementColumnNames();
        String[] joinOwnerColumnNames = collection.getKeyColumnNames();

        StringBuilder subQueryBuilder = new StringBuilder();
        subQueryBuilder.append("exists(select * from ").append(collection.getTableName()).append(" as ")
                .append(MIDDLE_TABLE_ALIAS).append(" where ");
        for (int i = 0; i < joinElementColumnNames.length; i++) {
            if (i != 0) {
                subQueryBuilder.append(" and ");
            }
            subQueryBuilder.append("{alias}.").append(lhsColumnNames[i]).append(" = ")
                    .append(MIDDLE_TABLE_ALIAS).append(".").append(joinElementColumnNames[i]);
        }
        for (int i = 0; i < joinOwnerColumnNames.length; i++) {
            subQueryBuilder.append(" and ").append(MIDDLE_TABLE_ALIAS).append('.')
                    .append(joinOwnerColumnNames[i]).append(" = ?");
        }
        for (int i = 0; i < indexNames.length; i++) {
            subQueryBuilder.append(" and ").append(MIDDLE_TABLE_ALIAS).append('.').append(indexNames[i])
                    .append(" = ?");
        }
        subQueryBuilder.append(')');
        criteria.add(Restrictions.sqlRestriction(subQueryBuilder.toString(), new Object[] { ownerKey, key },
                new Type[] { collection.getKeyType(), collection.getIndexType() }));
    }
    FlushMode oldFlushMode = session.getFlushMode();
    session.setFlushMode(FlushMode.MANUAL);
    try {
        return new Ref<V>((V) criteria.uniqueResult());
    } finally {
        session.setFlushMode(oldFlushMode);
    }
}

From source file:org.babyfish.hibernate.collection.spi.persistence.SetBasePersistence.java

License:Open Source License

/**
 * This method is used to replace //from ww w . j a va  2 s.  c  o m
 * "org.hibernate.collection.AbstractPersistentCollection#readElementExistence(Object element)"
 * @param element The example element to be read
 * @return The ref or readed element
 * <ul>
 *  <li>NonNull: Read successfully, check the value of ref to check the read value is null or not</li>
 *  <li>Null: Read failed</li>
 * </ul>
 */
@SuppressWarnings("unchecked")
public Ref<E> visionallyRead(E element) {

    Arguments.mustNotBeNull("element", element);
    String role = this.getNonNullRole();

    SessionImplementor session = this.getSession();
    if (session == null || !session.isOpen() || !session.isConnected()) {
        return null;
    }

    SessionFactoryImplementor sessionFactory = session.getFactory();
    QueryableCollection collection = (QueryableCollection) sessionFactory.getCollectionPersister(role);
    EntityPersister elementPersister = collection.getElementPersister();
    Object elementId = elementPersister.getIdentifier(element, this.getSession());
    if (elementId == null) {
        return new Ref<>();
    }
    if (elementPersister.getEntityMetamodel().getIdentifierProperty().getUnsavedValue()
            .isUnsaved((Serializable) elementId)) {
        return new Ref<>();
    }

    CriteriaImpl criteria = new CriteriaImpl(elementPersister.getEntityName(), session);

    /*
     * Add the condition of element.
     */
    criteria.add(Restrictions.idEq(elementId));

    //ownerKey, not ownerId
    Object ownerKey = collection.getCollectionType().getKeyOfOwner(this.getOwner(), session);
    //In Hibernate, isOneToMany means that there is no middle table
    //The @OneToMany of JPA with middle table is consider as many-to-many in Hibernate
    if (sessionFactory.getCollectionPersister(role).isOneToMany()) {
        String[] joinOwnerColumns = collection.getKeyColumnNames();
        StringBuilder sqlBuilder = new StringBuilder();
        for (int i = 0; i < joinOwnerColumns.length; i++) {
            if (i != 0) {
                sqlBuilder.append(" and ");
            }
            sqlBuilder.append("{alias}.").append(joinOwnerColumns[i]).append(" = ?");
        }
        criteria.add(Restrictions.sqlRestriction(sqlBuilder.toString(), ownerKey, collection.getKeyType()));
    } else {
        String lhsPropertyName = collection.getCollectionType().getLHSPropertyName();
        int lhsPropertyIndex = -1;
        if (lhsPropertyName != null) {
            String[] propertyNames = collection.getOwnerEntityPersister().getPropertyNames();
            for (int i = propertyNames.length - 1; i >= 0; i--) {
                if (propertyNames[i].equals(lhsPropertyName)) {
                    lhsPropertyIndex = i;
                    break;
                }
            }
        }
        String[] lhsColumnNames = JoinHelper.getLHSColumnNames(collection.getCollectionType(), lhsPropertyIndex,
                (OuterJoinLoadable) elementPersister, sessionFactory);
        String[] joinElementColumnNames = collection.getElementColumnNames();
        String[] joinOwnerColumnNames = collection.getKeyColumnNames();
        StringBuilder subQueryBuilder = new StringBuilder();
        subQueryBuilder.append("exists(select * from ").append(collection.getTableName()).append(" as ")
                .append(MIDDLE_TABLE_ALIAS).append(" where ");
        for (int i = 0; i < joinElementColumnNames.length; i++) {
            if (i != 0) {
                subQueryBuilder.append(" and ");
            }
            subQueryBuilder.append("{alias}.").append(lhsColumnNames[i]).append(" = ")
                    .append(MIDDLE_TABLE_ALIAS).append('.').append(joinElementColumnNames[i]);
        }
        for (int i = 0; i < joinOwnerColumnNames.length; i++) {
            subQueryBuilder.append(" and ").append(MIDDLE_TABLE_ALIAS).append(".")
                    .append(joinOwnerColumnNames[i]).append(" = ?");
        }
        subQueryBuilder.append(')');
        criteria.add(
                Restrictions.sqlRestriction(subQueryBuilder.toString(), ownerKey, collection.getKeyType()));
    }
    FlushMode oldFlushMode = session.getFlushMode();
    session.setFlushMode(FlushMode.MANUAL);
    try {
        return new Ref<>((E) criteria.uniqueResult());
    } finally {
        session.setFlushMode(oldFlushMode);
    }
}