Example usage for org.hibernate FlushMode MANUAL

List of usage examples for org.hibernate FlushMode MANUAL

Introduction

In this page you can find the example usage for org.hibernate FlushMode MANUAL.

Prototype

FlushMode MANUAL

To view the source code for org.hibernate FlushMode MANUAL.

Click Source Link

Document

The Session is only ever flushed when Session#flush is explicitly called by the application.

Usage

From source file:org.wintersleep.repository.TxTest.java

License:Apache License

@Test
public void testContextualSession() {
    try {/*from w  w w . j a  va  2  s. co m*/
        FlushMode flushMode = personService.testContextualSession();
        assertEquals(FlushMode.MANUAL, flushMode);
        fail();
    } catch (HibernateException e) {
        assertEquals("Unable to locate current JTA transaction", e.getMessage());
    }
}

From source file:org.yes.cart.dao.impl.GenericDAOHibernateImpl.java

License:Apache License

public void fullTextSearchReindex(PK primaryKey, boolean purgeOnly) {
    if (persistentClassIndexble) {
        sessionFactory.getCache().evictEntity(getPersistentClass(), primaryKey);

        FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.getCurrentSession());
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        fullTextSession.purge(getPersistentClass(), primaryKey);
        if (!purgeOnly) {
            T entity = findById(primaryKey);
            if (entity != null) {
                final T unproxied = (T) HibernateHelper.unproxy(entity);

                if (entityIndexingInterceptor != null) {
                    if (IndexingOverride.APPLY_DEFAULT == entityIndexingInterceptor.onUpdate(unproxied)) {
                        fullTextSession.index(unproxied);
                    }/*from  w ww .  j a  v a  2  s  .  c  o m*/
                } else {
                    fullTextSession.index(unproxied);
                }
            }

        }
        fullTextSession.flushToIndexes(); //apply changes to indexes
        fullTextSession.clear(); //clear since the queue is processed

    }
}

From source file:org.yes.cart.dao.impl.GenericDAOHibernateImpl.java

License:Apache License

private Runnable createIndexingRunnable(final boolean async, final int batchSize, final IndexFilter<T> filter) {
    return new Runnable() {
        @Override/*from  w w w  .j  a va2s . c  o m*/
        public void run() {
            int index = 0;
            final Logger log = LOGFTQ;
            try {

                if (persistentClassIndexble) {

                    currentIndexingCount.set(0);

                    if (log.isInfoEnabled()) {
                        log.info("Full reindex for {} class", persistentClass);
                    }
                    FullTextSession fullTextSession = Search.getFullTextSession(
                            async ? sessionFactory.openSession() : sessionFactory.getCurrentSession());
                    fullTextSession.setFlushMode(FlushMode.MANUAL);
                    fullTextSession.setCacheMode(CacheMode.IGNORE);
                    if (filter == null) { // only purge global full reindex because this clears all entries
                        fullTextSession.purgeAll(getPersistentClass());
                    }
                    ScrollableResults results = fullTextSession.createCriteria(persistentClass)
                            .setFetchSize(batchSize).scroll(ScrollMode.FORWARD_ONLY);

                    try {
                        while (results.next()) {

                            final T entity = (T) HibernateHelper.unproxy(results.get(0));

                            if (filter != null && filter.skipIndexing(entity)) {
                                continue; // skip this object
                            }

                            if (entityIndexingInterceptor != null) {
                                if (IndexingOverride.APPLY_DEFAULT == entityIndexingInterceptor
                                        .onUpdate(entity)) {
                                    fullTextSession.index(entity);
                                }
                            } else {
                                fullTextSession.index(entity);
                            }
                            index++;

                            if (index % batchSize == 0) {
                                fullTextSession.flushToIndexes(); //apply changes to indexes
                                fullTextSession.clear(); //clear since the queue is processed
                                if (log.isInfoEnabled()) {
                                    log.info("Indexed {} items of {} class", index, persistentClass);
                                }
                            }
                            currentIndexingCount.compareAndSet(index - 1, index);
                        }
                    } finally {
                        results.close();
                    }
                    fullTextSession.flushToIndexes(); //apply changes to indexes
                    fullTextSession.clear(); //clear since the queue is processed
                    if (log.isInfoEnabled()) {
                        log.info("Indexed {} items of {} class", index, persistentClass);
                    }
                    fullTextSession.getSearchFactory().optimize(getPersistentClass());
                }
            } catch (Exception exp) {
                LOGFTQ.error("Error during indexing", exp);
            } finally {
                asyncRunningState.set(COMPLETED);
                if (async) {
                    try {
                        if (persistentClassIndexble) {
                            sessionFactory.getCurrentSession().close();
                        }
                    } catch (Exception exp) {
                    }
                }
                if (log.isInfoEnabled()) {
                    log.info("Full reindex for {} class ... COMPLETED", persistentClass);
                }
            }
        }
    };
}

From source file:org.zanata.model.validator.UniqueValidator.java

License:Open Source License

private int countRows(Object value) {
    // we need to use entityManager.unwrap because  injected session will
    // be a weld proxy and criteria.getExecutableCriteria method will try
    // to cast it to SessionImplementor (ClassCastException)
    Session session = entityManager.unwrap(Session.class);
    ClassMetadata metadata = session.getSessionFactory().getClassMetadata(value.getClass());
    String idName = metadata.getIdentifierPropertyName();
    // FIXME was EntityMode.POJO
    Serializable id = metadata.getIdentifier(value, null);

    DetachedCriteria criteria = DetachedCriteria.forClass(value.getClass());
    for (String property : parameters.properties()) {
        // FIXME was EntityMode.POJO
        criteria.add(Restrictions.eq(property, metadata.getPropertyValue(value, property)));
    }/* w w  w . java2  s  . com*/

    // Id property
    if (id != null) {
        criteria.add(Restrictions.ne(idName, id));
    }
    criteria.setProjection(Projections.rowCount());

    // change the flush mode temporarily to perform the query or else
    // incomplete entities will try to get flushed
    // After the query, go back to the original mode
    FlushMode flushMode = session.getFlushMode();
    session.setFlushMode(FlushMode.MANUAL);
    List results = criteria.getExecutableCriteria(session).list();
    Number count = (Number) results.iterator().next();
    session.setFlushMode(flushMode);
    return count.intValue();
}

From source file:org.zanata.search.ClassIndexer.java

License:Open Source License

public void index(FullTextSession session) throws Exception {
    log.info("Setting manual-flush and ignore-cache for {}", entityType);
    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(CacheMode.IGNORE);
    indexingStrategy.invoke(handle, session);
    session.flushToIndexes(); // apply changes to indexes
    session.clear(); // clear since the queue is processed
}

From source file:org.zanata.search.HTextFlowTargetIndexingStrategy.java

License:Open Source License

private static void reindexScrollableResultSet(FullTextSession session, ScrollableResults scrollableResults,
        AsyncTaskHandle handle) {/*  w  w  w . j a  v a 2 s. com*/

    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(CacheMode.IGNORE);
    int rowNum = 0;
    try {
        while (scrollableResults.next()) {

            rowNum++;
            HTextFlowTarget entity = (HTextFlowTarget) scrollableResults.get(0);
            // TODO pahuang do I need to purge first then reindex?
            session.index(entity);
            if (handle != null) {
                handle.increaseProgress(1);
            }

            if (rowNum % sessionClearBatchSize == 0) {
                log.info("periodic flush and clear for HTextFlowTarget (n={})", rowNum);
                session.flushToIndexes(); // apply changes to indexes
                session.clear(); // clear since the queue is processed
            }
        }
    } finally {
        if (scrollableResults != null) {
            scrollableResults.close();
        }
    }
    session.flushToIndexes(); // apply changes to indexes
    session.clear(); // clear since the queue is processed
}

From source file:org.zanata.tmx.TMXParser.java

License:Open Source License

public void parseAndSaveTMX(InputStream input, TransMemory transMemory)
        throws TMXParseException, SecurityException, IllegalStateException, RollbackException,
        HeuristicMixedException, HeuristicRollbackException, SystemException, NotSupportedException {
    int handledTUs = 0;
    try {// w  w w.ja  va2 s  .c  o  m
        log.info("parsing started for: {}", transMemory.getSlug());
        session.setFlushMode(FlushMode.MANUAL);
        session.setCacheMode(CacheMode.IGNORE);
        XMLInputFactory factory = XMLInputFactory.newInstance();
        factory.setProperty(XMLInputFactory.SUPPORT_DTD, true);
        factory.setProperty(XMLInputFactory.IS_VALIDATING, true);
        factory.setXMLResolver(new TmxDtdResolver());
        @Cleanup
        XMLStreamReader reader = factory.createXMLStreamReader(input);

        QName tmx = new QName("tmx");

        while (reader.hasNext() && reader.next() != XMLStreamConstants.START_ELEMENT) {
        }
        if (!reader.hasNext())
            throw new TMXParseException("No root element");
        if (!reader.getName().equals(tmx))
            throw new TMXParseException("Wrong root element: expected tmx");

        // At this point, event = START_ELEMENT and name = tmx
        while (reader.hasNext()) {
            CommitBatch commitBatch = new CommitBatch(reader, 0, transMemory);
            TransactionUtil.get().runEx(commitBatch);
            handledTUs += commitBatch.handledTUs;
        }
    } catch (EntityExistsException e) {
        String msg = "Possible duplicate TU (duplicate tuid or duplicate" + "src content without tuid)";
        throw new TMXParseException(msg, e);
    } catch (Exception e) {
        Throwable rootCause = Throwables.getRootCause(e);
        if (rootCause instanceof TMXParseException) {
            throw (TMXParseException) e;
        } else if (rootCause instanceof XMLStreamException) {
            throw new TMXParseException(rootCause);
        } else {
            throw Throwables.propagate(e);
        }
    } finally {
        log.info("parsing stopped for: {}, TU count={}", transMemory.getSlug(), handledTUs);
    }
}

From source file:org.zkoss.zkgrails.ZKGrailsOpenSessionInViewListener.java

License:Open Source License

public void cleanup(Execution exec, Execution parent, List errs) {
    if (parent == null) { //the root execution of a servlet request
        try {/*from  ww w  . j a v a2 s  . c o m*/
            if (errs == null || errs.isEmpty()) {
                //if(sessionFactory.getCurrentSession().getTransaction().isActive()) {
                // Commit and cleanup
                log.debug("Committing the database transaction: " + exec);
                sessionFactory.getCurrentSession().getTransaction().commit();
                //}
            } else {
                final Throwable ex = (Throwable) errs.get(0);
                if (ex instanceof StaleObjectStateException) {
                    // default implementation does not do any optimistic concurrency
                    // control; it simply rollback the transaction.
                    handleStaleObjectStateException(exec, (StaleObjectStateException) ex);
                } else {
                    // default implementation log the stacktrace and then rollback
                    // the transaction.
                    handleOtherException(exec, ex);
                }
            }
        } finally {
            Session session = sessionFactory.getCurrentSession();
            if (!FlushMode.MANUAL.equals(session.getFlushMode())) {
                session.flush();
            }
            session.close();
        }
    }
}

From source file:service.DatabaseService.java

@Transactional
@Override//from   www  .  j a v  a 2 s. c o  m
public void saveEntity(Object entity) {
    SessionFactory sessionFactory = HibernateUtil.getSessionFactory();
    Session session = sessionFactory.openSession();
    session.setFlushMode(FlushMode.MANUAL);
    ManagedSessionContext.bind(session);
    Transaction tx = null;
    try {
        tx = session.beginTransaction();

        session.save(entity);
        ManagedSessionContext.unbind(sessionFactory);
        session.flush();
        tx.commit(); // Flush happens automatically

    } catch (RuntimeException e) {
        tx.rollback();
        throw e;
    } finally {
        session.close();
    }
}

From source file:service.DatabaseService.java

@Override
@Transactional// w ww .j ava  2s.c o m
public Object getEntityByName(String column, String table, String login) {

    SessionFactory sessionFactory = HibernateUtil.getSessionFactory();

    Session session = sessionFactory.openSession();
    session.setFlushMode(FlushMode.MANUAL);
    ManagedSessionContext.bind(session);

    String table_capitalized = table.substring(0, 1).toUpperCase() + table.substring(1);
    String str_query = "from " + table_capitalized + " t where t." + column.toLowerCase() + " = :crit";
    Query query = session.createQuery(str_query);
    query.setParameter("crit", login);

    //ManagedSessionContext.unbind(sessionFactory);
    session.flush();

    if (query.list().size() == 0)
        return null;
    return query.list().get(0);
}