Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:org.unitime.timetable.solver.studentsct.StudentSectioningDatabaseSaver.java

License:Open Source License

public void save() {
    iProgress.setStatus("Saving solution ...");
    iTimeStamp = new Date();
    org.hibernate.Session hibSession = null;
    Transaction tx = null;//  w w w  .  j  a va2  s.c  o m
    try {
        hibSession = SessionDAO.getInstance().getSession();
        hibSession.setCacheMode(CacheMode.IGNORE);
        hibSession.setFlushMode(FlushMode.MANUAL);

        tx = hibSession.beginTransaction();

        Session session = Session.getSessionUsingInitiativeYearTerm(iInitiative, iYear, iTerm);

        if (session == null)
            throw new Exception("Session " + iInitiative + " " + iTerm + iYear + " not found!");

        save(session, hibSession);

        StudentSectioningQueue.sessionStatusChanged(hibSession, null, session.getUniqueId(), true);

        hibSession.flush();

        tx.commit();
        tx = null;

    } catch (Exception e) {
        iProgress.fatal("Unable to save student schedule, reason: " + e.getMessage(), e);
        sLog.error(e.getMessage(), e);
        if (tx != null)
            tx.rollback();
    } finally {
        // here we need to close the session since this code may run in a separate thread
        if (hibSession != null && hibSession.isOpen())
            hibSession.close();
    }
}

From source file:org.unitime.timetable.solver.TimetableDatabaseLoader.java

License:Open Source License

public void load() {
    org.hibernate.Session hibSession = null;
    Transaction tx = null;//from   ww  w.j a v  a2s.  c om
    try {
        hibSession = TimetableManagerDAO.getInstance().getSession();
        hibSession.setCacheMode(CacheMode.IGNORE);
        hibSession.setFlushMode(FlushMode.COMMIT);

        tx = hibSession.beginTransaction();

        load(hibSession);

        tx.commit();
    } catch (Exception e) {
        iProgress.message(msglevel("loadFailed", Progress.MSGLEVEL_FATAL),
                "Unable to load input data, reason:" + e.getMessage(), e);
        tx.rollback();
    } finally {
        // here we need to close the session since this code may run in a separate thread
        if (hibSession != null && hibSession.isOpen())
            hibSession.close();
    }
}

From source file:org.unitime.timetable.solver.TimetableDatabaseSaver.java

License:Open Source License

public void save() {
    org.hibernate.Session hibSession = null;
    Transaction tx = null;/*  w w  w  . ja v  a2  s.c o  m*/
    try {
        TimetableManagerDAO dao = new TimetableManagerDAO();
        hibSession = dao.getSession();
        hibSession.setCacheMode(CacheMode.IGNORE);
        hibSession.setFlushMode(FlushMode.COMMIT);

        tx = hibSession.beginTransaction();

        Long[] solutionIds = save(hibSession);

        tx.commit();

        HashSet refreshIds = new HashSet();
        if (iCommitSolution && solutionIds != null) {
            HashSet<Solution> touchedSolutions = new HashSet<Solution>();
            if (hibSession != null && hibSession.isOpen())
                hibSession.close();
            hibSession = dao.getSession();

            iProgress.setPhase("Committing solution ...", 2 * solutionIds.length);
            tx = hibSession.beginTransaction();
            for (int i = 0; i < solutionIds.length; i++) {
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                Solution committedSolution = solution.getOwner().getCommittedSolution();
                if (committedSolution != null) {
                    committedSolution.uncommitSolution(hibSession,
                            getModel().getProperties().getProperty("General.OwnerPuid"));
                    refreshIds.add(committedSolution.getUniqueId());
                    touchedSolutions.add(committedSolution);
                }
                touchedSolutions.add(solution);
                iProgress.incProgress();
            }
            for (int i = 0; i < solutionIds.length; i++) {
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                List<String> messages = new ArrayList<String>();
                solution.commitSolution(messages, hibSession,
                        getModel().getProperties().getProperty("General.OwnerPuid"));
                touchedSolutions.add(solution);
                for (String m : messages) {
                    iProgress.error("Unable to commit: " + m);
                }
                hibSession.update(solution);
                iProgress.incProgress();
            }
            tx.commit();
            String className = ApplicationProperty.ExternalActionSolutionCommit.value();
            if (className != null && className.trim().length() > 0) {
                ExternalSolutionCommitAction commitAction = (ExternalSolutionCommitAction) (Class
                        .forName(className).newInstance());
                commitAction.performExternalSolutionCommitAction(touchedSolutions, hibSession);
            }
        }

        iProgress.setPhase("Refreshing solution ...", solutionIds.length + refreshIds.size());
        for (Iterator i = refreshIds.iterator(); i.hasNext();) {
            Long solutionId = (Long) i.next();
            refreshCourseSolution(solutionId);
            try {
            } catch (Exception e) {
                iProgress.warn("Unable to refresh solution " + solutionId + ", reason:" + e.getMessage(), e);
            }
            iProgress.incProgress();
        }
        for (int i = 0; i < solutionIds.length; i++) {
            try {
                refreshCourseSolution(solutionIds[i]);
            } catch (Exception e) {
                iProgress.warn("Unable to refresh solution " + solutionIds[i] + ", reason:" + e.getMessage(),
                        e);
            }
            iProgress.incProgress();
        }

        if (solutionIds != null) {
            getModel().getProperties().setProperty("General.SolutionId", solutionIds);
            iProgress.info("Solution successfully saved.");

            if (hibSession != null && hibSession.isOpen())
                hibSession.close();
            hibSession = dao.getSession();

            for (int i = 0; i < solutionIds.length; i++) {
                tx = hibSession.beginTransaction();
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                LogInfo lInfo = new LogInfo();
                lInfo.setLog(iProgress.getLog());
                SolutionInfo logInfo = new SolutionInfo();
                logInfo.setDefinition(SolverInfoDef.findByName(hibSession, "LogInfo"));
                logInfo.setOpt(null);
                logInfo.setSolution(solution);
                logInfo.setInfo(lInfo, getFileProxy());
                hibSession.save(logInfo);
                tx.commit();
            }
        }
    } catch (Exception e) {
        iProgress.fatal("Unable to save timetable, reason: " + e.getMessage(), e);
        sLog.error(e.getMessage(), e);
        tx.rollback();
    } finally {
        // here we need to close the session since this code may run in a separate thread
        if (hibSession != null && hibSession.isOpen())
            hibSession.close();
    }
}

From source file:org.wallride.service.SystemService.java

License:Apache License

@Async
@Transactional(propagation = Propagation.SUPPORTS)
public void reIndex() throws Exception {
    logger.info("Re-Index started");

    FullTextSession fullTextSession = Search.getFullTextSession((entityManager.unwrap(Session.class)));

    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);

    for (Class persistentClass : fullTextSession.getSearchFactory().getIndexedTypes()) {
        Transaction transaction = fullTextSession.beginTransaction();

        // Scrollable results will avoid loading too many objects in memory
        ScrollableResults results = fullTextSession.createCriteria(persistentClass).setFetchSize(BATCH_SIZE)
                .scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            index++;//from  ww  w .  ja v  a2 s. c  o m
            fullTextSession.index(results.get(0)); //index each element
            if (index % BATCH_SIZE == 0) {
                fullTextSession.flushToIndexes(); //apply changes to indexes
                fullTextSession.clear(); //free memory since the queue is processed
            }
        }
        transaction.commit();
    }
    logger.info("Re-Index finished");
}

From source file:org.webical.dao.hibernateImpl.CalendarDaoHibernateImpl.java

License:Open Source License

@Transaction(readOnly = false)
public void updateAllEntities() throws DaoException {
    getSession().setCacheMode(CacheMode.IGNORE);
    @SuppressWarnings("unchecked")
    List<Calendar> calendars = loadAll(Calendar.class);
    saveOrUpdateAll(calendars);/* w  w w .  j av  a  2 s .c o m*/
}

From source file:org.yes.cart.dao.impl.GenericDAOHibernateImpl.java

License:Apache License

public void fullTextSearchReindex(PK primaryKey, boolean purgeOnly) {
    if (persistentClassIndexble) {
        sessionFactory.getCache().evictEntity(getPersistentClass(), primaryKey);

        FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.getCurrentSession());
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        fullTextSession.purge(getPersistentClass(), primaryKey);
        if (!purgeOnly) {
            T entity = findById(primaryKey);
            if (entity != null) {
                final T unproxied = (T) HibernateHelper.unproxy(entity);

                if (entityIndexingInterceptor != null) {
                    if (IndexingOverride.APPLY_DEFAULT == entityIndexingInterceptor.onUpdate(unproxied)) {
                        fullTextSession.index(unproxied);
                    }/*from   www.j a v  a2 s .  c o  m*/
                } else {
                    fullTextSession.index(unproxied);
                }
            }

        }
        fullTextSession.flushToIndexes(); //apply changes to indexes
        fullTextSession.clear(); //clear since the queue is processed

    }
}

From source file:org.yes.cart.dao.impl.GenericDAOHibernateImpl.java

License:Apache License

private Runnable createIndexingRunnable(final boolean async, final int batchSize, final IndexFilter<T> filter) {
    return new Runnable() {
        @Override//www.  j a v a2 s .co m
        public void run() {
            int index = 0;
            final Logger log = LOGFTQ;
            try {

                if (persistentClassIndexble) {

                    currentIndexingCount.set(0);

                    if (log.isInfoEnabled()) {
                        log.info("Full reindex for {} class", persistentClass);
                    }
                    FullTextSession fullTextSession = Search.getFullTextSession(
                            async ? sessionFactory.openSession() : sessionFactory.getCurrentSession());
                    fullTextSession.setFlushMode(FlushMode.MANUAL);
                    fullTextSession.setCacheMode(CacheMode.IGNORE);
                    if (filter == null) { // only purge global full reindex because this clears all entries
                        fullTextSession.purgeAll(getPersistentClass());
                    }
                    ScrollableResults results = fullTextSession.createCriteria(persistentClass)
                            .setFetchSize(batchSize).scroll(ScrollMode.FORWARD_ONLY);

                    try {
                        while (results.next()) {

                            final T entity = (T) HibernateHelper.unproxy(results.get(0));

                            if (filter != null && filter.skipIndexing(entity)) {
                                continue; // skip this object
                            }

                            if (entityIndexingInterceptor != null) {
                                if (IndexingOverride.APPLY_DEFAULT == entityIndexingInterceptor
                                        .onUpdate(entity)) {
                                    fullTextSession.index(entity);
                                }
                            } else {
                                fullTextSession.index(entity);
                            }
                            index++;

                            if (index % batchSize == 0) {
                                fullTextSession.flushToIndexes(); //apply changes to indexes
                                fullTextSession.clear(); //clear since the queue is processed
                                if (log.isInfoEnabled()) {
                                    log.info("Indexed {} items of {} class", index, persistentClass);
                                }
                            }
                            currentIndexingCount.compareAndSet(index - 1, index);
                        }
                    } finally {
                        results.close();
                    }
                    fullTextSession.flushToIndexes(); //apply changes to indexes
                    fullTextSession.clear(); //clear since the queue is processed
                    if (log.isInfoEnabled()) {
                        log.info("Indexed {} items of {} class", index, persistentClass);
                    }
                    fullTextSession.getSearchFactory().optimize(getPersistentClass());
                }
            } catch (Exception exp) {
                LOGFTQ.error("Error during indexing", exp);
            } finally {
                asyncRunningState.set(COMPLETED);
                if (async) {
                    try {
                        if (persistentClassIndexble) {
                            sessionFactory.getCurrentSession().close();
                        }
                    } catch (Exception exp) {
                    }
                }
                if (log.isInfoEnabled()) {
                    log.info("Full reindex for {} class ... COMPLETED", persistentClass);
                }
            }
        }
    };
}

From source file:org.zanata.search.ClassIndexer.java

License:Open Source License

public void index(FullTextSession session) throws Exception {
    log.info("Setting manual-flush and ignore-cache for {}", entityType);
    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(CacheMode.IGNORE);
    indexingStrategy.invoke(handle, session);
    session.flushToIndexes(); // apply changes to indexes
    session.clear(); // clear since the queue is processed
}

From source file:org.zanata.search.HTextFlowTargetIndexingStrategy.java

License:Open Source License

private static void reindexScrollableResultSet(FullTextSession session, ScrollableResults scrollableResults,
        AsyncTaskHandle handle) {/*from ww w  .  j a  v a 2s. c  o m*/

    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(CacheMode.IGNORE);
    int rowNum = 0;
    try {
        while (scrollableResults.next()) {

            rowNum++;
            HTextFlowTarget entity = (HTextFlowTarget) scrollableResults.get(0);
            // TODO pahuang do I need to purge first then reindex?
            session.index(entity);
            if (handle != null) {
                handle.increaseProgress(1);
            }

            if (rowNum % sessionClearBatchSize == 0) {
                log.info("periodic flush and clear for HTextFlowTarget (n={})", rowNum);
                session.flushToIndexes(); // apply changes to indexes
                session.clear(); // clear since the queue is processed
            }
        }
    } finally {
        if (scrollableResults != null) {
            scrollableResults.close();
        }
    }
    session.flushToIndexes(); // apply changes to indexes
    session.clear(); // clear since the queue is processed
}

From source file:org.zanata.tmx.TMXParser.java

License:Open Source License

public void parseAndSaveTMX(InputStream input, TransMemory transMemory)
        throws TMXParseException, SecurityException, IllegalStateException, RollbackException,
        HeuristicMixedException, HeuristicRollbackException, SystemException, NotSupportedException {
    int handledTUs = 0;
    try {// w  w  w  . ja v  a  2  s . com
        log.info("parsing started for: {}", transMemory.getSlug());
        session.setFlushMode(FlushMode.MANUAL);
        session.setCacheMode(CacheMode.IGNORE);
        XMLInputFactory factory = XMLInputFactory.newInstance();
        factory.setProperty(XMLInputFactory.SUPPORT_DTD, true);
        factory.setProperty(XMLInputFactory.IS_VALIDATING, true);
        factory.setXMLResolver(new TmxDtdResolver());
        @Cleanup
        XMLStreamReader reader = factory.createXMLStreamReader(input);

        QName tmx = new QName("tmx");

        while (reader.hasNext() && reader.next() != XMLStreamConstants.START_ELEMENT) {
        }
        if (!reader.hasNext())
            throw new TMXParseException("No root element");
        if (!reader.getName().equals(tmx))
            throw new TMXParseException("Wrong root element: expected tmx");

        // At this point, event = START_ELEMENT and name = tmx
        while (reader.hasNext()) {
            CommitBatch commitBatch = new CommitBatch(reader, 0, transMemory);
            TransactionUtil.get().runEx(commitBatch);
            handledTUs += commitBatch.handledTUs;
        }
    } catch (EntityExistsException e) {
        String msg = "Possible duplicate TU (duplicate tuid or duplicate" + "src content without tuid)";
        throw new TMXParseException(msg, e);
    } catch (Exception e) {
        Throwable rootCause = Throwables.getRootCause(e);
        if (rootCause instanceof TMXParseException) {
            throw (TMXParseException) e;
        } else if (rootCause instanceof XMLStreamException) {
            throw new TMXParseException(rootCause);
        } else {
            throw Throwables.propagate(e);
        }
    } finally {
        log.info("parsing stopped for: {}, TU count={}", transMemory.getSlug(), handledTUs);
    }
}