Example usage for org.hibernate ScrollableResults get

List of usage examples for org.hibernate ScrollableResults get

Introduction

In this page you can find the example usage for org.hibernate ScrollableResults get.

Prototype

Object get(int i);

Source Link

Document

Get the ith object in the current row of results, without initializing any other results in the row.

Usage

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

@Override
public void unbindAllObjects(long milestoneId) {

    final String[] entities = { "TestCase", "RequirementVersion", "Campaign" };

    Session session = currentSession();/*from  w w w.ja v a 2 s.c  om*/

    for (String entity : entities) {
        LOGGER.info("About to fetch entities {}", entity);

        String namedQuery = entity + ".findAllBoundToMilestone";
        LOGGER.debug("Fetching bound entities with query named {}", namedQuery);
        Query query = session.getNamedQuery(namedQuery);
        query.setParameter(MILESTONE_ID, milestoneId);
        ScrollableResults holders = scrollableResults(query);

        int count = 0;

        while (holders.next()) {
            MilestoneHolder holder = (MilestoneHolder) holders.get(0);
            holder.unbindMilestone(milestoneId);
            if (++count % BATCH_UPDATE_SIZE == 0) {
                // flush a batch of updates and release memory:
                session.flush();
                session.clear();
            }
        }
    }
}

From source file:org.wallride.service.SystemService.java

License:Apache License

@Async
@Transactional(propagation = Propagation.SUPPORTS)
public void reIndex() throws Exception {
    logger.info("Re-Index started");

    FullTextSession fullTextSession = Search.getFullTextSession((entityManager.unwrap(Session.class)));

    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);

    for (Class persistentClass : fullTextSession.getSearchFactory().getIndexedTypes()) {
        Transaction transaction = fullTextSession.beginTransaction();

        // Scrollable results will avoid loading too many objects in memory
        ScrollableResults results = fullTextSession.createCriteria(persistentClass).setFetchSize(BATCH_SIZE)
                .scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            index++;/*from w w w .  ja v  a2  s .c o  m*/
            fullTextSession.index(results.get(0)); //index each element
            if (index % BATCH_SIZE == 0) {
                fullTextSession.flushToIndexes(); //apply changes to indexes
                fullTextSession.clear(); //free memory since the queue is processed
            }
        }
        transaction.commit();
    }
    logger.info("Re-Index finished");
}

From source file:org.xerela.provider.scheduler.Scheduler.java

License:Mozilla Public License

/** {@inheritDoc} */
public PageData getExecutionData(PageData pageData, String sortColumn, boolean descending) {
    Session session = SchedulerActivator.getSessionFactory().getCurrentSession();

    Criteria criteria = session.createCriteria(ExecutionData.class).add(Restrictions.isNotNull("startTime"))
            .setFirstResult(pageData.getOffset()).setMaxResults(pageData.getPageSize());

    if (pageData.getOffset() == 0) {
        // Set the total result size into the page data.
        criteria.setProjection(Projections.count("id"));

        Integer total = (Integer) criteria.uniqueResult();
        pageData.setTotal(total);//from www. j  av  a  2s .c o  m

        criteria.setProjection(null);
    }

    if (sortColumn != null) {
        criteria.addOrder((descending ? Order.desc(sortColumn.trim()) : Order.asc(sortColumn.trim())));
    }

    List<ExecutionData> list = new ArrayList<ExecutionData>();
    ScrollableResults scroll = criteria.scroll(ScrollMode.SCROLL_INSENSITIVE);
    while (scroll.next()) {
        list.add((ExecutionData) scroll.get(0));
    }
    scroll.close();

    pageData.setExecutionData(list);

    return pageData;
}

From source file:org.yes.cart.dao.impl.GenericDAOHibernateImpl.java

License:Apache License

private Runnable createIndexingRunnable(final boolean async, final int batchSize, final IndexFilter<T> filter) {
    return new Runnable() {
        @Override//from   w  w w  . ja v  a  2 s . co m
        public void run() {
            int index = 0;
            final Logger log = LOGFTQ;
            try {

                if (persistentClassIndexble) {

                    currentIndexingCount.set(0);

                    if (log.isInfoEnabled()) {
                        log.info("Full reindex for {} class", persistentClass);
                    }
                    FullTextSession fullTextSession = Search.getFullTextSession(
                            async ? sessionFactory.openSession() : sessionFactory.getCurrentSession());
                    fullTextSession.setFlushMode(FlushMode.MANUAL);
                    fullTextSession.setCacheMode(CacheMode.IGNORE);
                    if (filter == null) { // only purge global full reindex because this clears all entries
                        fullTextSession.purgeAll(getPersistentClass());
                    }
                    ScrollableResults results = fullTextSession.createCriteria(persistentClass)
                            .setFetchSize(batchSize).scroll(ScrollMode.FORWARD_ONLY);

                    try {
                        while (results.next()) {

                            final T entity = (T) HibernateHelper.unproxy(results.get(0));

                            if (filter != null && filter.skipIndexing(entity)) {
                                continue; // skip this object
                            }

                            if (entityIndexingInterceptor != null) {
                                if (IndexingOverride.APPLY_DEFAULT == entityIndexingInterceptor
                                        .onUpdate(entity)) {
                                    fullTextSession.index(entity);
                                }
                            } else {
                                fullTextSession.index(entity);
                            }
                            index++;

                            if (index % batchSize == 0) {
                                fullTextSession.flushToIndexes(); //apply changes to indexes
                                fullTextSession.clear(); //clear since the queue is processed
                                if (log.isInfoEnabled()) {
                                    log.info("Indexed {} items of {} class", index, persistentClass);
                                }
                            }
                            currentIndexingCount.compareAndSet(index - 1, index);
                        }
                    } finally {
                        results.close();
                    }
                    fullTextSession.flushToIndexes(); //apply changes to indexes
                    fullTextSession.clear(); //clear since the queue is processed
                    if (log.isInfoEnabled()) {
                        log.info("Indexed {} items of {} class", index, persistentClass);
                    }
                    fullTextSession.getSearchFactory().optimize(getPersistentClass());
                }
            } catch (Exception exp) {
                LOGFTQ.error("Error during indexing", exp);
            } finally {
                asyncRunningState.set(COMPLETED);
                if (async) {
                    try {
                        if (persistentClassIndexble) {
                            sessionFactory.getCurrentSession().close();
                        }
                    } catch (Exception exp) {
                    }
                }
                if (log.isInfoEnabled()) {
                    log.info("Full reindex for {} class ... COMPLETED", persistentClass);
                }
            }
        }
    };
}

From source file:org.zanata.search.HTextFlowTargetIndexingStrategy.java

License:Open Source License

private static void reindexScrollableResultSet(FullTextSession session, ScrollableResults scrollableResults,
        AsyncTaskHandle handle) {/* w  ww . ja  v  a  2 s.  co  m*/

    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(CacheMode.IGNORE);
    int rowNum = 0;
    try {
        while (scrollableResults.next()) {

            rowNum++;
            HTextFlowTarget entity = (HTextFlowTarget) scrollableResults.get(0);
            // TODO pahuang do I need to purge first then reindex?
            session.index(entity);
            if (handle != null) {
                handle.increaseProgress(1);
            }

            if (rowNum % sessionClearBatchSize == 0) {
                log.info("periodic flush and clear for HTextFlowTarget (n={})", rowNum);
                session.flushToIndexes(); // apply changes to indexes
                session.clear(); // clear since the queue is processed
            }
        }
    } finally {
        if (scrollableResults != null) {
            scrollableResults.close();
        }
    }
    session.flushToIndexes(); // apply changes to indexes
    session.clear(); // clear since the queue is processed
}

From source file:se.vgregion.webbisar.svc.impl.WebbisDaoHibernate.java

License:Open Source License

/**
 * {@inheritDoc}//from  ww  w  . j av  a  2  s .c  o  m
 */
public void reindex() {
    getJpaTemplate().execute(new JpaCallback() {
        public Object doInJpa(EntityManager em) throws PersistenceException {
            FullTextSession fullTextSession = getFullTextSession(getHibernateSession(em));
            fullTextSession.purgeAll(Webbis.class);

            // Do not update the second level cache. It will just slow things down.
            fullTextSession.setCacheMode(CacheMode.GET);

            // Read 5000 entries at a time.
            final int BATCH_SIZE = 5000;

            // Due to a bug in Hibernate (HHH-1283) a join does not work here.
            // See http://opensource.atlassian.com/projects/hibernate/browse/HHH-1283
            ScrollableResults results = fullTextSession.createQuery("from Webbis w").scroll();

            int index = 0;
            while (results.next()) {
                index++;
                fullTextSession.index(results.get(0)); // index each element
                if (index % BATCH_SIZE == 0) {
                    fullTextSession.flushToIndexes(); // apply changes to indexes
                    fullTextSession.clear(); // clear since the queue is processed
                }
            }
            return null;
        }
    });
}

From source file:ubic.gemma.persistence.service.association.phenotype.PhenotypeAssociationDaoImpl.java

License:Apache License

/**
 * find category terms currently used in the database by evidence
 *///w  w w .  j  av a2 s .  c  o m
@Override
public Collection<CharacteristicValueObject> findEvidenceCategoryTerms() {

    Collection<CharacteristicValueObject> mgedCategory = new TreeSet<>();

    String queryString = "SELECT DISTINCT CATEGORY_URI, category FROM PHENOTYPE_ASSOCIATION "
            + "JOIN INVESTIGATION ON PHENOTYPE_ASSOCIATION.EXPERIMENT_FK = INVESTIGATION.ID "
            + "JOIN CHARACTERISTIC ON CHARACTERISTIC.INVESTIGATION_FK= INVESTIGATION.ID";
    org.hibernate.SQLQuery queryObject = this.getSessionFactory().getCurrentSession()
            .createSQLQuery(queryString);

    ScrollableResults results = queryObject.scroll(ScrollMode.FORWARD_ONLY);
    while (results.next()) {

        CharacteristicValueObject characteristicValueObject = new CharacteristicValueObject(-1L);
        characteristicValueObject.setCategoryUri((String) results.get(0));
        characteristicValueObject.setCategory((String) results.get(1));
        mgedCategory.add(characteristicValueObject);
    }
    results.close();

    return mgedCategory;
}

From source file:ubic.gemma.persistence.service.association.phenotype.PhenotypeAssociationDaoImpl.java

License:Apache License

/**
 * return the list of the owners that have evidence in the system
 *//*w  ww .j  ava 2 s.  c  o  m*/
@Override
public Collection<String> findEvidenceOwners() {

    Set<String> owners = new HashSet<>();

    String sqlQuery = "SELECT DISTINCT sid.PRINCIPAL FROM ACLOBJECTIDENTITY aoi JOIN ACLENTRY ace ON ace.OBJECTIDENTITY_FK = "
            + "aoi.ID JOIN ACLSID sid ON sid.ID = aoi.OWNER_SID_FK WHERE aoi.OBJECT_CLASS " + "IN  "
            + PhenotypeAssociationDaoImpl.DISCRIMINATOR_CLAUSE;

    SQLQuery queryObject = this.getSessionFactory().getCurrentSession().createSQLQuery(sqlQuery);

    ScrollableResults results = queryObject.scroll(ScrollMode.FORWARD_ONLY);

    while (results.next()) {
        String owner = (String) results.get(0);
        owners.add(owner);
    }

    return owners;
}

From source file:ubic.gemma.persistence.service.association.phenotype.PhenotypeAssociationDaoImpl.java

License:Apache License

@Override
public Set<Long> findPrivateEvidenceId(Long taxonId, Integer limit) {

    String limitAbs;/*  w ww.j  a  va2s . c o m*/
    String orderBy;

    if (limit < 0) {
        limitAbs = "limit " + limit * -1;
        orderBy = "order by LAST_UPDATED asc ";
    } else {
        orderBy = "order by LAST_UPDATED desc ";
        limitAbs = "limit " + limit;
    }

    Set<Long> ids = new HashSet<>();

    String sqlQuery = "select distinct phen.ID ";
    sqlQuery += this.getPhenotypesGenesAssociationsBeginQuery(false);

    if (!SecurityUtil.isUserAdmin()) { // admins have no restrictions.
        if (!sqlQuery.trim().endsWith("where")) {
            sqlQuery += " AND ";
        }
        sqlQuery += EntityUtils.addGroupAndUserNameRestriction(true, false);
    }

    if (taxonId != null) {
        if (!sqlQuery.trim().endsWith("where")) {
            sqlQuery += " AND ";
        }
        sqlQuery += " tax.ID = :taxonId ";
    }

    sqlQuery += orderBy + limitAbs;

    SQLQuery queryObject = this.getSessionFactory().getCurrentSession().createSQLQuery(sqlQuery);

    if (taxonId != null) {
        queryObject.setParameter("taxonId", taxonId);
    }

    EntityUtils.addUserAndGroupParameters(queryObject, this.getSessionFactory());

    ScrollableResults results = queryObject.scroll(ScrollMode.FORWARD_ONLY);

    while (results.next()) {
        Long phenotypeId = ((BigInteger) results.get(0)).longValue();
        ids.add(phenotypeId);
    }

    results.close();
    return ids;
}

From source file:ubic.gemma.persistence.service.association.phenotype.PhenotypeAssociationDaoImpl.java

License:Apache License

/**
 * @param queryObject execute sqlQuery and populate phenotypesGenesAssociations is : phenotype --&gt; genes
 * @return map//from   w  ww .  j a  v a2  s.c om
 */
private Map<String, Set<Integer>> populateGenesAssociations(SQLQuery queryObject) {
    Map<String, Set<Integer>> phenotypesGenesAssociations = new HashMap<>();
    ScrollableResults results = queryObject.scroll(ScrollMode.FORWARD_ONLY);
    while (results.next()) {

        Integer geneNcbiId = (Integer) results.get(0);
        String valueUri = (String) results.get(1);

        EntityUtils.populateMapSet(phenotypesGenesAssociations, valueUri, geneNcbiId);
    }
    results.close();
    return phenotypesGenesAssociations;
}