Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultReporter.java

License:Open Source License

/**
 * Create a study of the "Confirmed Positives" for all the pool SilencingReagents in the DB.
 * (re: {@link DataType#CONFIRMED_POSITIVE_INDICATOR} ) <br>
 * <ul>//from   w  ww  . j  a  v  a  2  s  . c om
 * For RNAi
 * <li>Count of follow-up screens for well
 * <li>M+1 columns named "N duplexes confirming positive", where 0 <= N <= M, and M is the max number of duplexes per
 * pool in any library, currently = 4). The value in each column is the number of follow-up screens that confirmed the
 * well as a positive with N duplexes
 * </ul>
 * see [#2610] Confirmed Positives study creator<br>
 * 
 * @return total count of confirmed positives considered in this study (informational)
 */
public int createSilencingReagentConfirmedPositiveSummary(Screen study) {
    log.info("Get all of the pool reagents...");
    ScrollableResults sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() {
        public ScrollableResults execute(Session session) {
            HqlBuilder builder = new HqlBuilder();
            builder.select("pr").from(Library.class, "l").from("l", Library.wells, "w", JoinType.INNER)
                    .from("w", Well.latestReleasedReagent, "pr", JoinType.INNER)
                    .where("l", "pool", Operator.EQUAL, Boolean.TRUE);
            return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE)
                    .scroll(ScrollMode.FORWARD_ONLY);
        }
    });

    log.info("Create the annotation types for the study.");
    AnnotationType averageConfirmedPositivesPerScreen = study.createAnnotationType(
            DEFAULT_ANNOTATION_NAME_WEIGHTED_AVERAGE, DEFAULT_ANNOTATION_TITLE_WEIGHTED_AVERAGE, true);
    _dao.persistEntity(averageConfirmedPositivesPerScreen);
    AnnotationType numberOfScreensAT = study.createAnnotationType(DEFAULT_ANNOTATION_NAME_NUMBER_OF_SCREENS,
            DEFAULT_ANNOTATION_TITLE_NUMBER_OF_SCREENS, true);
    _dao.persistEntity(numberOfScreensAT);
    // Create the bin-count annotation types (for "screens confirming # duplexes...")
    Map<Integer, AnnotationType> binToAnnotationTypeMap = Maps.newHashMap();
    for (int i = 0; i <= 4; i++) // todo: make this a dynamic cardinality 
    {
        AnnotationType screenCounter = study.createAnnotationType(
                DEFAULT_ANNOTATION_NAME_COUNT_OF_SCREENS_N.format(i),
                DEFAULT_ANNOTATION_TITLE_COUNT_OF_SCREENS_N.format(i), true);
        binToAnnotationTypeMap.put(i, screenCounter);
        _dao.persistEntity(screenCounter);
    }
    _dao.flush();
    _dao.clear();

    log.info("scroll through the pool reagents...");
    int countOfDuplexReagentsConfirmed = 0;
    int count = 0;

    while (sr.next()) {
        SilencingReagent poolReagent = (SilencingReagent) sr.get(0);

        ConfirmationReport report = getDuplexReconfirmationReport(poolReagent);

        int[] binToScreenCount = report.getBinToScreenCount(poolReagent);
        int numberOfScreens = 0;
        for (int bin = 0; bin < binToScreenCount.length; bin++) {
            int screenCount = binToScreenCount[bin];

            AnnotationType at = binToAnnotationTypeMap.get(bin);
            // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method
            AnnotationValue av = new AnnotationValue(at, poolReagent, null, (double) screenCount);
            _dao.saveOrUpdateEntity(av);

            numberOfScreens += screenCount;
            countOfDuplexReagentsConfirmed += screenCount * bin;
        }

        if (numberOfScreens > 0) {
            // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method
            AnnotationValue av = new AnnotationValue(averageConfirmedPositivesPerScreen, poolReagent, null,
                    new Double("" + report.getWeightedAverage()));
            _dao.saveOrUpdateEntity(av);

        }
        // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method
        AnnotationValue av = new AnnotationValue(numberOfScreensAT, poolReagent, null,
                (double) numberOfScreens);
        _dao.saveOrUpdateEntity(av);

        // for memory performance clear the session every CACHE_SIZE number of iterations
        if (count++ % AbstractDAO.ROWS_TO_CACHE == 0) {
            log.debug("clearing & flushing session");
            _dao.flush();
            _dao.clear();
        }
        if (count % 1000 == 0) {
            log.info("" + count + " reagents processed");
        }
    }
    log.info("" + count + " reagents processed");
    _dao.flush();
    _dao.clear();

    log.info("countOfDuplexReagentsConfirmed: " + countOfDuplexReagentsConfirmed);
    log.info("populateStudyReagentLinkTable");
    _screenDao.populateStudyReagentLinkTable(study.getScreenId());
    log.info("Study created: " + study.getTitle() + ", reagents: " + countOfDuplexReagentsConfirmed);
    return countOfDuplexReagentsConfirmed;
}

From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultReporter.java

License:Open Source License

/**
 * for [#2268] new column to display # overlapping screens
 */// w w  w  . j av a  2s  .c o m
@Transactional
public int createScreenedReagentCounts(final ScreenType screenType, Screen study,
        AnnotationType positiveAnnotationType, AnnotationType overallAnnotationType) {
    // Break this into two separate queries because of an apparent Hibernate bug:
    // when using the "group by" clause with a full object (as opposed to an attribute of the object/table),
    // Hibernate is requiring that every attribute of the object be specified in a "group by" and not 
    // just the object itself.  so the workaround is to query once to get the id's then once again to 
    // get the objects.
    //    study = _dao.mergeEntity(study);
    //    positiveAnnotationType = _dao.mergeEntity(positiveAnnotationType);
    //    overallAnnotationType = _dao.mergeEntity(overallAnnotationType);
    //    _dao.flush();

    log.info("1. get the reagent id's for the positive counts");
    ScrollableResults sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() {
        public ScrollableResults execute(Session session) {
            HqlBuilder builder = new HqlBuilder();
            builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw")
                    .from("aw", AssayWell.libraryWell, "w", JoinType.INNER)
                    .from("w", Well.latestReleasedReagent, "r", JoinType.INNER)
                    .from("w", Well.library, "l", JoinType.INNER)
                    .where("l", "screenType", Operator.EQUAL, screenType)
                    .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL);
            builder.where("aw", "positive", Operator.EQUAL, Boolean.TRUE);
            builder.groupBy("r", "id");
            log.debug("hql: " + builder.toHql());
            return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE)
                    .scroll(ScrollMode.FORWARD_ONLY);
        }
    });

    Map<Integer, Long> positivesMap = Maps.newHashMap();
    while (sr.next()) {
        Object[] row = sr.get();
        positivesMap.put((Integer) row[0], (Long) row[1]);
    }

    log.info("2. get the reagent id's for the overall counts");
    sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() {
        public ScrollableResults execute(Session session) {
            HqlBuilder builder = new HqlBuilder();
            builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw")
                    .from("aw", AssayWell.libraryWell, "w", JoinType.INNER)
                    .from("w", Well.library, "l", JoinType.INNER)
                    .from("w", Well.latestReleasedReagent, "r", JoinType.INNER)
                    .where("l", "screenType", Operator.EQUAL, screenType)
                    .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL)
                    .groupBy("r", "id");
            log.debug("hql: " + builder.toHql());
            return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE)
                    .scroll(ScrollMode.FORWARD_ONLY);
        }
    });

    log.info("begin assigning values to the study");
    int overallCount = 0;
    Map<Integer, Long> overallMap = Maps.newHashMap();
    while (sr.next()) {
        Object[] row = sr.get();
        Integer r_id = (Integer) row[0];
        Long count = (Long) row[1];
        Reagent r = _dao.findEntityById(Reagent.class, r_id, true);
        // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method
        AnnotationValue av = new AnnotationValue(overallAnnotationType, r, null, (double) count);
        _dao.persistEntity(av);
        Long positiveCount = positivesMap.get(r_id);
        if (positiveCount != null) {
            // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method
            av = new AnnotationValue(positiveAnnotationType, r, null, (double) positiveCount.intValue());
            _dao.persistEntity(av);
        }
        // Note: due to memory performance, we will build the study_reagent_link later
        if (count++ % AbstractDAO.ROWS_TO_CACHE == 0) {
            log.debug("flushing");
            _dao.flush();
            _dao.clear();
        }
        if (++overallCount % 10000 == 0) {
            log.info("" + overallCount + " reagents processed");
        }
    }

    log.info("save the study");
    // unnecessary since study is already persisted, and the reagents will be linked by the populateStudyReagentLinkTable - sde4
    // _dao.mergeEntity(study);
    _dao.flush();
    log.info("populateStudyReagentLinkTable");
    int reagentCount = _screenDao.populateStudyReagentLinkTable(study.getScreenId());
    log.info("done: positives: " + positivesMap.size() + ", reagents: " + overallCount);
    return reagentCount;
}

From source file:edu.utah.further.ds.impl.executor.db.hibernate.criteria.HibernateCriteriaScrollableResultsExecutor.java

License:Apache License

/**
 * Execute Hibernate criteria query and return a scrollable result set.
 * //from  ww w  . j av a2s.c  om
 * @param criteria
 *            Hibernate criteria
 * @return scrollable result set
 * @see https://jira.chpc.utah.edu/browse/FUR-1274
 */
@HibernateExecutor
private ScrollableResults getResultListFromHibernate(final GenericCriteria criteria) {
    ScrollableResults results = null;
    // Need to use INSENSITIVE scroll-mode per FUR-1300 and
    // http://www.coderanch.com/t/301684/JDBC/java/Unsupported-syntax-refreshRow
    final ScrollMode scrollMode = ScrollMode.SCROLL_INSENSITIVE;
    try {
        results = criteria.setCacheMode(CacheMode.IGNORE).scroll(scrollMode);
    } catch (final HibernateException e) {
        log.error("An exception occurred while scrolling results", e);
    }

    // Normally we won't know up-front the size of a streaming result set. But for
    // easier debugging of the subsequent paging sub-chain, print out the size of the
    // list because we know it already in this case.
    if (log.isDebugEnabled()) {
        log.debug("Result set = " + StringUtil.getNullSafeToString(results));
    }
    return results;
}

From source file:es.logongas.ix3.dao.impl.GenericDAOImplHibernate.java

License:Apache License

@Override
final public EntityType readOriginalByNaturalKey(DataSession dataSession, Object naturalKey)
        throws BusinessException {
    Session session = (Session) dataSession.getDataBaseSessionAlternativeImpl();
    try {//from ww  w  . ja va2  s  .  c o  m
        session.setCacheMode(CacheMode.IGNORE);
        EntityType entity = (EntityType) session.bySimpleNaturalId(getEntityMetaData().getType())
                .load(naturalKey);
        if (entity != null) {
            session.evict(entity);
        }
        return entity;
    } catch (javax.validation.ConstraintViolationException cve) {
        throw new BusinessException(exceptionTranslator.getBusinessMessages(cve));
    } catch (org.hibernate.exception.ConstraintViolationException cve) {
        throw new BusinessException(exceptionTranslator.getBusinessMessages(cve));
    } catch (RuntimeException ex) {
        throw ex;
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }

}

From source file:es.logongas.ix3.dao.impl.GenericDAOImplHibernate.java

License:Apache License

@Override
final public EntityType readOriginal(DataSession dataSession, PrimaryKeyType id) throws BusinessException {
    Session session = (Session) dataSession.getDataBaseSessionAlternativeImpl();
    try {/*from  w  w w .  ja v a 2  s .c  om*/
        session.setCacheMode(CacheMode.IGNORE);
        EntityType entity = (EntityType) session.get(getEntityMetaData().getType(), id);
        if (entity != null) {
            session.evict(entity);
        }
        return entity;
    } catch (javax.validation.ConstraintViolationException cve) {
        throw new BusinessException(exceptionTranslator.getBusinessMessages(cve));
    } catch (org.hibernate.exception.ConstraintViolationException cve) {
        throw new BusinessException(exceptionTranslator.getBusinessMessages(cve));
    } catch (RuntimeException ex) {
        throw ex;
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
}

From source file:fr.mael.microrss.dao.impl.UserArticleDaoImpl.java

License:Open Source License

public void reindex() {
    FullTextSession searchSession = null;
    searchSession = Search.getFullTextSession(getSessionFactory().getCurrentSession());
    searchSession.setFlushMode(FlushMode.MANUAL);
    searchSession.setCacheMode(CacheMode.IGNORE);
    searchSession.purgeAll(UserArticle.class);
    manageResults(UserArticle.class, searchSession);
}

From source file:gov.nih.nci.indexgen.Indexer.java

License:BSD License

/**
 * Generates lucene documents/* w ww .  j  a  v  a 2  s .co  m*/
 */
public void run() {

    System.out.println("Started " + entity.getEntityName());

    long start = System.currentTimeMillis();

    try {
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        Transaction transaction = fullTextSession.beginTransaction();

        // Scrollable results will avoid loading too many objects in memory
        ScrollableResults results = fullTextSession.createQuery("from " + entity.getEntityName())
                .scroll(ScrollMode.FORWARD_ONLY);

        int i = 0;
        while (results.next()) {
            fullTextSession.index(results.get(0));
            if (++i % batchSize == 0)
                fullTextSession.clear();
        }

        transaction.commit();
    }

    finally {
        fullTextSession.close();
    }

    long end = System.currentTimeMillis();
    System.out.println("Completed " + entity.getEntityName() + " in " + (end - start) + " ms");
}

From source file:info.jtrac.hibernate.HibernateJtracDao.java

License:Apache License

public List<Item> findAllItems(final int firstResult, final int batchSize) {
    return getHibernateTemplate().executeFind(new HibernateCallback() {
        public Object doInHibernate(Session session) {
            session.clear();//from  w w  w .  j  av a 2  s  . c om
            Criteria criteria = session.createCriteria(Item.class);
            criteria.setCacheMode(CacheMode.IGNORE);
            criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
            criteria.setFetchMode("history", FetchMode.JOIN);
            criteria.add(Restrictions.ge("id", (long) firstResult));
            criteria.add(Restrictions.lt("id", (long) firstResult + batchSize));
            return criteria.list();
        }
    });
}

From source file:info.jtrac.hibernate.HibernateJtracDao.java

License:Apache License

public long loadNextSequenceNum(final long spaceSequenceId) {
    return (Long) getHibernateTemplate().execute(new HibernateCallback() {
        public Object doInHibernate(Session session) {
            session.flush();/* w ww.j a va2s .  c  o m*/
            session.setCacheMode(CacheMode.IGNORE);
            SpaceSequence ss = (SpaceSequence) session.get(SpaceSequence.class, spaceSequenceId);
            long next = ss.getAndIncrement();
            session.update(ss);
            session.flush();
            return next;
        }
    });
}

From source file:info.jtrac.repository.HibernateJtracDao.java

License:Apache License

@SuppressWarnings("unchecked")
@Override//  w  ww  .  ja  v  a 2s  .c om
@Transactional(propagation = Propagation.SUPPORTS)
public List<Item> findAllItems(final int firstResult, final int batchSize) {
    entityManager.clear();
    Session session = getSession();
    Criteria criteria = session.createCriteria(Item.class);
    criteria.setCacheMode(CacheMode.IGNORE);
    criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
    criteria.setFetchMode("history", FetchMode.JOIN);
    criteria.add(Restrictions.ge("id", (long) firstResult));
    criteria.add(Restrictions.lt("id", (long) firstResult + batchSize));
    return criteria.list();
}