Example usage for org.hibernate FlushMode MANUAL

List of usage examples for org.hibernate FlushMode MANUAL

Introduction

In this page you can find the example usage for org.hibernate FlushMode MANUAL.

Prototype

FlushMode MANUAL

To view the source code for org.hibernate FlushMode MANUAL.

Click Source Link

Document

The Session is only ever flushed when Session#flush is explicitly called by the application.

Usage

From source file:ubic.gemma.persistence.persister.AbstractPersister.java

License:Apache License

@Override
@Transactional/*from   w w  w .j  a  va2  s.  c  o m*/
public boolean isTransient(Object entity) {
    if (entity == null)
        return true;
    Long id = EntityUtils.getId(entity);

    if (id == null)
        return true; // assume.

    /*
     * We normally won't get past this point; the case where it might is when the transaction has been rolled back
     * and is being retried.
     */

    if (EntityUtils.isProxy(entity)) {
        if (AbstractPersister.log.isDebugEnabled())
            AbstractPersister.log.debug("Object is a proxy: " + entity.getClass().getSimpleName() + ":" + id);
        return false;
    }

    org.hibernate.Session session = this.getSessionFactory().getCurrentSession();
    if (session.contains(entity)) {
        if (AbstractPersister.log.isDebugEnabled())
            AbstractPersister.log
                    .debug("Found object in session: " + entity.getClass().getSimpleName() + ":" + id);
        return false;
    }

    //noinspection SynchronizationOnLocalVariableOrMethodParameter // Getting desperate ...
    synchronized (entity) {
        Session sess = this.getSessionFactory().openSession();
        sess.setFlushMode(FlushMode.MANUAL);
        Object pe = sess.get(entity.getClass(), id);
        sess.close();
        if (pe != null) {
            // Common case.
            if (AbstractPersister.log.isDebugEnabled())
                AbstractPersister.log
                        .debug("Found object in store: " + entity.getClass().getSimpleName() + ":" + id);
            return false;
        }
    }

    /*
     * Hibernate has a method that, pretty much, does what we've done so far ... but probably does it better.
     */
    String bestGuessEntityName = ((SessionImplementor) session).bestGuessEntityName(entity);
    if (ForeignKeys.isNotTransient(bestGuessEntityName, entity, null, (SessionImplementor) session)) {
        AbstractPersister.log.info("Hibernate says object is not transient: " + bestGuessEntityName + ":" + id);
        return false;
    }

    /*
     * The ID is filled in, but it probably is a survivor of a rolled-back transaction. It doesn't matter what we
     * return, it's not guaranteed to be right.
     */
    AbstractPersister.log.info("Object has ID but we can't tell if it is persistent: "
            + entity.getClass().getSimpleName() + ":" + id);
    return true;

}

From source file:ubic.gemma.persistence.service.AbstractDao.java

License:Apache License

/**
 * Lists all entities whose given property matches the given value.
 *
 * @param  propertyName  the name of property to be matched.
 * @param  propertyValue the value to look for.
 * @return               a list of entities whose properties matched the given value.
 *//* w  w  w . java  2  s .  c  o m*/
@SuppressWarnings("unchecked")
protected T findOneByProperty(String propertyName, Object propertyValue) {

    /*
     * Disable flush to avoid NonNullability constraint failures, etc. prematurely when running this during object
     * creation. This effectively makes this method read-only even in a read-write context. (the same setup might be
     * needed for other methods)
     */
    FlushMode fm = this.getSessionFactory().getCurrentSession().getFlushMode();
    this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.MANUAL);
    Criteria criteria = this.getSessionFactory().getCurrentSession().createCriteria(this.elementClass);
    criteria.add(Restrictions.eq(propertyName, propertyValue));
    criteria.setMaxResults(1);

    //noinspection unchecked
    T result = (T) criteria.uniqueResult();
    this.getSessionFactory().getCurrentSession().setFlushMode(fm);
    return result;
}

From source file:ubic.gemma.persistence.service.analysis.expression.diff.DifferentialExpressionResultDaoImpl.java

License:Apache License

@Override
public Map<Long, Map<Long, DiffExprGeneSearchResult>> findDiffExAnalysisResultIdsInResultSets(
        Collection<DiffExResultSetSummaryValueObject> resultSets, Collection<Long> geneIds) {

    Map<Long, Map<Long, DiffExprGeneSearchResult>> results = new HashMap<>();

    Session session = this.getSessionFactory().getCurrentSession();

    Map<Long, DiffExResultSetSummaryValueObject> resultSetIdsMap = EntityUtils.getIdMap(resultSets,
            "getResultSetId");

    Map<Long, Collection<Long>> foundInCache = this.fillFromCache(results, resultSetIdsMap.keySet(), geneIds);

    if (!foundInCache.isEmpty()) {
        AbstractDao.log.info("Results for " + foundInCache.size() + " resultsets found in cache");
    } else {//from   w ww  .j a v a2s  . c om
        AbstractDao.log.info("No results were in the cache");
    }

    Collection<Long> resultSetsNeeded = this.stripUnneededResultSets(foundInCache, resultSetIdsMap.keySet(),
            geneIds);

    // Are we finished?
    if (resultSetsNeeded.isEmpty()) {
        AbstractDao.log.info("All results were in the cache.");
        return results;
    }

    AbstractDao.log.info(foundInCache.size() + "/" + resultSetIdsMap.size()
            + " resultsSets had at least some cached results; still need to query " + resultSetsNeeded.size());

    assert !resultSetsNeeded.isEmpty();

    org.hibernate.SQLQuery queryObject = session.createSQLQuery(
            DifferentialExpressionResultDaoImpl.fetchBatchDifferentialExpressionAnalysisResultsByResultSetsAndGeneQuery);

    /*
     * These values have been tweaked to probe for performance issues.
     */
    int resultSetBatchSize = 50;
    int geneBatchSize = 100;

    if (resultSetsNeeded.size() > geneIds.size()) {
        resultSetBatchSize = Math.min(500, resultSetsNeeded.size());
        AbstractDao.log.info("Batching by result sets (" + resultSetsNeeded.size() + " resultSets); "
                + geneIds.size() + " genes; batch size=" + resultSetBatchSize);

    } else {
        geneBatchSize = Math.min(200, geneIds.size());
        AbstractDao.log.info("Batching by genes (" + geneIds.size() + " genes); " + resultSetsNeeded.size()
                + " resultSets; batch size=" + geneBatchSize);
    }

    final int numResultSetBatches = (int) Math.ceil(resultSetsNeeded.size() / resultSetBatchSize);

    queryObject.setFlushMode(FlushMode.MANUAL);

    StopWatch timer = new StopWatch();
    timer.start();
    int numResults = 0;
    long timeForFillingNonSig = 0;

    Map<Long, Map<Long, DiffExprGeneSearchResult>> resultsFromDb = new HashMap<>();

    int numResultSetBatchesDone = 0;

    // Iterate over batches of resultSets
    for (Collection<Long> resultSetIdBatch : new BatchIterator<>(resultSetsNeeded, resultSetBatchSize)) {

        if (AbstractDao.log.isDebugEnabled())
            AbstractDao.log.debug("Starting batch of resultsets: "
                    + StringUtils.abbreviate(StringUtils.join(resultSetIdBatch, ","), 100));

        /*
         * Get the probes using the CommonQueries gene2cs. Otherwise we (in effect) end up doing this over and over
         * again.
         */
        Map<Long, Collection<Long>> cs2GeneIdMap = this.getProbesForGenesInResultSetBatch(session, geneIds,
                resultSetIdsMap, resultSetIdBatch);

        queryObject.setParameterList("rs_ids", resultSetIdBatch);

        int numGeneBatchesDone = 0;
        final int numGeneBatches = (int) Math.ceil(cs2GeneIdMap.size() / geneBatchSize);

        StopWatch innerQt = new StopWatch();

        // iterate over batches of probes (genes)
        for (Collection<Long> probeBatch : new BatchIterator<>(cs2GeneIdMap.keySet(), geneBatchSize)) {

            if (AbstractDao.log.isDebugEnabled())
                AbstractDao.log.debug("Starting batch of probes: "
                        + StringUtils.abbreviate(StringUtils.join(probeBatch, ","), 100));

            // would it help to sort the probeBatch/
            List<Long> pbL = new Vector<>(probeBatch);
            Collections.sort(pbL);

            queryObject.setParameterList("probe_ids", pbL);

            innerQt.start();
            List<?> queryResult = queryObject.list();
            innerQt.stop();

            if (innerQt.getTime() > 2000) {
                // show the actual query with params.
                AbstractDao.log.info("Query time: " + innerQt.getTime() + "ms:\n "
                        + queryObject.getQueryString().replace(":probe_ids", StringUtils.join(probeBatch, ","))
                                .replace(":rs_ids", StringUtils.join(resultSetIdBatch, ",")));
            }
            innerQt.reset();

            /*
             * Each query tuple are the probe, result, resultsSet, qvalue, pvalue.
             */
            for (Object o : queryResult) {
                // Long resultSetId = ( ( BigInteger )((Object[])o)[2] ).longValue();
                // if (!resultSetId.equals)
                numResults += this.processResultTuple(o, resultsFromDb, cs2GeneIdMap);
            }

            if (timer.getTime() > 5000 && AbstractDao.log.isInfoEnabled()) {
                AbstractDao.log.info("Batch time: " + timer.getTime() + "ms; Fetched DiffEx " + numResults
                        + " results so far. " + numResultSetBatchesDone + "/" + numResultSetBatches
                        + " resultset batches completed. " + numGeneBatchesDone + "/" + numGeneBatches
                        + " gene batches done.");
                timer.reset();
                timer.start();
            }

            // Check if task was cancelled.
            if (Thread.currentThread().isInterrupted()) {
                throw new TaskCancelledException("Search was cancelled");
            }

            numGeneBatchesDone++;

            if (DifferentialExpressionResultDaoImpl.CORRECTED_PVALUE_THRESHOLD_TO_BE_CONSIDERED_DIFF_EX < 1.0) {
                timeForFillingNonSig += this.fillNonSignificant(pbL, resultSetIdsMap, resultsFromDb,
                        resultSetIdBatch, cs2GeneIdMap, session);
            }
        } // over probes.

        // Check if task was cancelled.
        if (Thread.currentThread().isInterrupted()) {
            throw new TaskCancelledException("Search was cancelled");
        }

        numResultSetBatchesDone++;

    }

    if (timer.getTime() > 1000 && AbstractDao.log.isInfoEnabled()) {
        AbstractDao.log.info("Fetching DiffEx from DB took total of " + timer.getTime() + " ms : geneIds="
                + StringUtils.abbreviate(StringUtils.join(geneIds, ","), 50) + " result set="
                + StringUtils.abbreviate(StringUtils.join(resultSetsNeeded, ","), 50));
        if (timeForFillingNonSig > 100) {
            AbstractDao.log.info("Filling in non-significant values: " + timeForFillingNonSig + "ms in total");
        }
    }

    // Add the DB results to the cached results.
    this.addToCache(resultsFromDb, resultSetsNeeded, geneIds);

    for (Long resultSetId : resultsFromDb.keySet()) {
        Map<Long, DiffExprGeneSearchResult> geneResults = resultsFromDb.get(resultSetId);
        if (results.containsKey(resultSetId)) {
            results.get(resultSetId).putAll(geneResults);
        } else {
            results.put(resultSetId, geneResults);
        }
    }

    return results;
}

From source file:ubic.gemma.persistence.service.common.auditAndSecurity.UserDaoImpl.java

License:Apache License

@Override
public User findByUserName(final String userName) {
    Session session = this.getSessionFactory().getCurrentSession();

    //noinspection unchecked
    List<User> users = session.createCriteria(User.class).setFlushMode(FlushMode.MANUAL)
            .add(Restrictions.eq("userName", userName)).list();

    if (users.isEmpty()) {
        return null;
    } else if (users.size() > 1) {
        throw new IllegalStateException("Multiple users with name=" + userName);
    }/*w w  w. jav  a  2s. co m*/
    User u = users.get(0);
    session.setReadOnly(u, true); // TESTING
    return u;
}

From source file:ubic.gemma.persistence.service.expression.bioAssayData.BioAssayDimensionDaoImpl.java

License:Apache License

@Override
public BioAssayDimension find(BioAssayDimension bioAssayDimension) {

    if (bioAssayDimension.getBioAssays().isEmpty()) {
        throw new IllegalArgumentException("BioAssayDimension had no BioAssays");
    }/*from   w ww . j  a  v a  2s  .co  m*/

    Criteria queryObject = this.getSessionFactory().getCurrentSession().createCriteria(BioAssayDimension.class);
    queryObject.setReadOnly(true);
    queryObject.setFlushMode(FlushMode.MANUAL);

    if (StringUtils.isNotBlank(bioAssayDimension.getName())) {
        queryObject.add(Restrictions.eq("name", bioAssayDimension.getName()));
    }

    if (StringUtils.isNotBlank(bioAssayDimension.getDescription())) {
        queryObject.add(Restrictions.eq("description", bioAssayDimension.getDescription()));
    }

    queryObject.add(Restrictions.sizeEq("bioAssays", bioAssayDimension.getBioAssays().size()));

    Collection<String> names = new HashSet<>();
    for (BioAssay bioAssay : bioAssayDimension.getBioAssays()) {
        names.add(bioAssay.getName());
    }
    queryObject.createCriteria("bioAssays").add(Restrictions.in("name", names));

    BioAssayDimension candidate = (BioAssayDimension) queryObject.uniqueResult();

    if (candidate == null)
        return null;

    // Now check that the bioassays and order are exactly the same.
    Collection<BioAssay> desiredBioAssays = bioAssayDimension.getBioAssays();
    Collection<BioAssay> candidateBioAssays = candidate.getBioAssays();

    assert desiredBioAssays.size() == candidateBioAssays.size();

    Iterator<BioAssay> dit = desiredBioAssays.iterator();
    Iterator<BioAssay> cit = candidateBioAssays.iterator();

    while (dit.hasNext()) {
        BioAssay d = dit.next();
        BioAssay c = cit.next();
        if (!c.equals(d))
            return null;
    }

    return candidate;

}

From source file:ubic.gemma.persistence.service.expression.bioAssayData.DesignElementDataVectorDaoImpl.java

License:Apache License

/**
 * @param  ee      ee//from   www.  j  a v a  2  s .  c o  m
 * @param  cs2gene Map of probes to genes.
 * @return         map of vectors to gene ids.
 */
Map<T, Collection<Long>> getVectorsForProbesInExperiments(Long ee, Map<Long, Collection<Long>> cs2gene) {

    // Do not do in clause for experiments, as it can't use the indices
    //language=HQL
    String queryString = "select dedv, dedv.designElement.id from ProcessedExpressionDataVector dedv fetch all properties"
            + " where dedv.designElement.id in ( :cs ) and dedv.expressionExperiment.id = :eeId ";

    Session session = this.getSessionFactory().getCurrentSession();
    org.hibernate.Query queryObject = session.createQuery(queryString);
    queryObject.setReadOnly(true);
    queryObject.setFlushMode(FlushMode.MANUAL);

    Map<T, Collection<Long>> dedv2genes = new HashMap<>();
    StopWatch timer = new StopWatch();
    timer.start();

    queryObject.setLong("eeId", ee);

    int batchSize = 100;
    for (Collection<Long> batch : new BatchIterator<>(cs2gene.keySet(), batchSize)) {
        this.getVectorsBatch(cs2gene, queryObject, dedv2genes, batch);
    }

    if (timer.getTime() > Math.max(200, 20 * dedv2genes.size())) {
        AbstractDao.log.info("Fetched " + dedv2genes.size() + " vectors for " + cs2gene.size() + " probes in "
                + timer.getTime() + "ms\n" + "Vector query was: " + queryString);

    }
    return dedv2genes;
}

From source file:ubic.gemma.persistence.service.expression.bioAssayData.DesignElementDataVectorDaoImpl.java

License:Apache License

Map<T, Collection<Long>> getVectorsForProbesInExperiments(Map<Long, Collection<Long>> cs2gene) {

    //language=HQL
    String queryString = "select dedv, dedv.designElement.id from ProcessedExpressionDataVector dedv fetch all properties"
            + " where dedv.designElement.id in ( :cs ) ";

    Session session = this.getSessionFactory().getCurrentSession();
    org.hibernate.Query queryObject = session.createQuery(queryString);
    queryObject.setReadOnly(true);//from   w ww .ja  v  a 2s . com
    queryObject.setFlushMode(FlushMode.MANUAL);

    Map<T, Collection<Long>> dedv2genes = new HashMap<>();
    StopWatch timer = new StopWatch();
    timer.start();

    int batchSize = 100;
    for (Collection<Long> batch : new BatchIterator<>(cs2gene.keySet(), batchSize)) {
        this.getVectorsBatch(cs2gene, queryObject, dedv2genes, batch);
    }

    if (timer.getTime() > Math.max(200, 20 * dedv2genes.size())) {
        AbstractDao.log.info("Fetched " + dedv2genes.size() + " vectors for " + cs2gene.size() + " probes in "
                + timer.getTime() + "ms\n" + "Vector query was: " + queryString);

    }
    return dedv2genes;
}

From source file:ubic.gemma.persistence.service.expression.bioAssayData.DesignElementDataVectorDaoImpl.java

License:Apache License

private void getVectorsBatch(Map<Long, Collection<Long>> cs2gene, org.hibernate.Query queryObject,
        Map<T, Collection<Long>> dedv2genes, Collection<Long> batch) {
    queryObject.setParameterList("cs", batch);
    queryObject.setFlushMode(FlushMode.MANUAL);
    queryObject.setReadOnly(true);//ww  w. j a v  a  2s.  c  o m
    ScrollableResults results = queryObject.scroll(ScrollMode.FORWARD_ONLY);

    while (results.next()) {
        @SuppressWarnings("unchecked")
        T dedv = (T) results.get(0);
        Long cs = (Long) results.get(1);
        Collection<Long> associatedGenes = cs2gene.get(cs);
        if (!dedv2genes.containsKey(dedv)) {
            dedv2genes.put(dedv, associatedGenes);
        } else {
            Collection<Long> mappedGenes = dedv2genes.get(dedv);
            mappedGenes.addAll(associatedGenes);
        }
    }

    results.close();
}

From source file:ubic.gemma.persistence.service.expression.bioAssayData.ProcessedExpressionDataVectorDaoImpl.java

License:Apache License

/**
 * @param limit if non-null and positive, you will get a random set of vectors for the experiment
 * @param ee    ee/*from w  ww .  j ava  2  s  .  c  o m*/
 * @return processed data vectors
 */
private Collection<ProcessedExpressionDataVector> getProcessedVectors(ExpressionExperiment ee, Integer limit) {

    if (limit == null || limit < 0) {
        return this.getProcessedVectors(ee);
    }

    StopWatch timer = new StopWatch();
    timer.start();
    List<ProcessedExpressionDataVector> result;

    Integer availableVectorCount = ee.getNumberOfDataVectors();
    if (availableVectorCount == null || availableVectorCount == 0) {
        AbstractDao.log.info("Experiment does not have vector count populated.");
        // cannot fix this here, because we're read-only.
    }

    Query q = this.getSessionFactory().getCurrentSession()
            .createQuery(" from ProcessedExpressionDataVector dedv where dedv.expressionExperiment.id = :ee");
    q.setParameter("ee", ee.getId(), LongType.INSTANCE);
    q.setMaxResults(limit);
    if (availableVectorCount != null && availableVectorCount > limit) {
        q.setFirstResult(new Random().nextInt(availableVectorCount - limit));
    }

    // we should already be read-only, so this is probably pointless.
    q.setReadOnly(true);

    // and so this probably doesn't do anything useful.
    q.setFlushMode(FlushMode.MANUAL);

    //noinspection unchecked
    result = q.list();
    if (timer.getTime() > 1000)
        AbstractDao.log
                .info("Fetch " + limit + " vectors from " + ee.getShortName() + ": " + timer.getTime() + "ms");

    if (result.isEmpty()) {
        AbstractDao.log.warn("Experiment does not have any processed data vectors");
        return result;
    }

    this.thaw(result); // needed?
    return result;
}

From source file:ubic.gemma.persistence.service.genome.biosequence.BioSequenceDaoImpl.java

License:Apache License

@SuppressWarnings("unchecked")
@Override/*w  w w .  ja  va2s  .  co  m*/
public BioSequence find(BioSequence bioSequence) {

    BusinessKey.checkValidKey(bioSequence);

    Criteria queryObject = BusinessKey.createQueryObject(this.getSessionFactory().getCurrentSession(),
            bioSequence);
    queryObject.setReadOnly(true);
    queryObject.setFlushMode(FlushMode.MANUAL);
    /*
     * this initially matches on name and taxon only.
     */
    java.util.List<?> results = queryObject.list();
    Object result = null;
    if (results != null) {
        if (results.size() > 1) {
            this.debug(bioSequence, results);

            // Try to find the best match. See BusinessKey for more
            // explanation of why this is needed.
            BioSequence match = null;
            for (BioSequence res : (Collection<BioSequence>) results) {
                if (res.equals(bioSequence)) {
                    if (match != null) {
                        AbstractDao.log.warn("More than one sequence in the database matches " + bioSequence
                                + ", returning arbitrary match: " + match);
                        break;
                    }
                    match = res;
                }
            }

            return match;

        } else if (results.size() == 1) {
            result = results.iterator().next();
        }
    }
    return (BioSequence) result;
}