List of usage examples for org.hibernate ScrollableResults get
Object get(int i);
From source file:easycare.load.util.db.loader.OrganisationDataLoader.java
@Transactional public void buildAllLocations(ContextOfCurrentLoad context) { log.info("starting to load locations"); Session session = (Session) em.getDelegate(); ScrollableResults scrollableResults = session .createQuery("from Organisation where name != 'ResMed' order by accountNumber").scroll(); int numberOfLoadedLocations = 0; int numberOfLoadedStdOrgLocations = 0; int numberOfLocationsToCreateInStandardOrg = context.getInitialNumOfLocationsInStdOrg(); int numberOfUpdatedOrganisations = 0; int numberOfPatientsPlacedIntoLocations = 0; while (scrollableResults.next()) { Organisation organisation = (Organisation) scrollableResults.get(0); if (isStandardOrg(context, numberOfUpdatedOrganisations)) { log.info("creating locations for org " + (numberOfUpdatedOrganisations + 1)); for (int i = 0; i < numberOfLocationsToCreateInStandardOrg && numberOfLoadedStdOrgLocations < context.getTotalNumberOfLocationsInStandardOrgs(); i++) { int oldPatientsInOrg = numberOfPatientsPlacedIntoLocations; numberOfPatientsPlacedIntoLocations += (context.getNumberOfPatientsInStandardOrgLocation()); log.info("creating location " + (i + 1) + "with patients from " + (oldPatientsInOrg + 1) + " to " + (numberOfPatientsPlacedIntoLocations + 1)); createAndAddLocationToOrganisation(context, i, organisation); numberOfLoadedStdOrgLocations++; numberOfLoadedLocations++; }/*from w ww .java 2 s . co m*/ session.update(organisation); em.flush(); em.clear(); numberOfLocationsToCreateInStandardOrg++; } else { int oldPatientsInOrg = numberOfPatientsPlacedIntoLocations; numberOfPatientsPlacedIntoLocations += context.getNumberOfPatientsPerSmallOrg(); log.info( "creating org " + (numberOfUpdatedOrganisations + 1) + " as a small org with patients from " + (oldPatientsInOrg + 1) + " to " + (numberOfPatientsPlacedIntoLocations + 1)); createAndAddLocationToOrganisation(context, ContextOfCurrentLoad.NUMBER_OF_LOCATIONS_IN_SMALL_ORG, organisation); numberOfLoadedLocations++; } numberOfUpdatedOrganisations++; if (numberOfLoadedLocations % SMALL_CHUNK == 0) { em.flush(); em.clear(); log.debug("finished small chunk of locations up to " + numberOfLoadedLocations + "the location"); } } log.info("finished to load locations "); }
From source file:easycare.load.util.db.loader.UserDataLoader.java
@Transactional public void buildAllUsers(ContextOfCurrentLoad context) { log.info("starting to load users "); Session session = (Session) em.getDelegate(); ScrollableResults scrollableResults = session .createQuery("from Organisation where name != 'ResMed' order by accountNumber").scroll(); int numberOfOrganisationsWithAddedUsers = 0; while (scrollableResults.next()) { Organisation organisation = (Organisation) scrollableResults.get(0); String organisationNumber = StringUtils.remove(organisation.getAccountNumber(), "APT"); loadOrgUsers(context, organisation, organisationNumber); if (++numberOfOrganisationsWithAddedUsers % SMALL_CHUNK == 0) { em.flush();//from w ww. j a v a 2 s .c o m em.clear(); log.debug("finished small chunk of orgs with users up to " + numberOfOrganisationsWithAddedUsers + "org"); } log.info("Finished users for org " + organisation); } log.info("Finished to load users "); }
From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultReporter.java
License:Open Source License
/** * Create a study of the "Confirmed Positives" for all the pool SilencingReagents in the DB. * (re: {@link DataType#CONFIRMED_POSITIVE_INDICATOR} ) <br> * <ul>/* ww w. j av a2s . com*/ * For RNAi * <li>Count of follow-up screens for well * <li>M+1 columns named "N duplexes confirming positive", where 0 <= N <= M, and M is the max number of duplexes per * pool in any library, currently = 4). The value in each column is the number of follow-up screens that confirmed the * well as a positive with N duplexes * </ul> * see [#2610] Confirmed Positives study creator<br> * * @return total count of confirmed positives considered in this study (informational) */ public int createSilencingReagentConfirmedPositiveSummary(Screen study) { log.info("Get all of the pool reagents..."); ScrollableResults sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("pr").from(Library.class, "l").from("l", Library.wells, "w", JoinType.INNER) .from("w", Well.latestReleasedReagent, "pr", JoinType.INNER) .where("l", "pool", Operator.EQUAL, Boolean.TRUE); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); log.info("Create the annotation types for the study."); AnnotationType averageConfirmedPositivesPerScreen = study.createAnnotationType( DEFAULT_ANNOTATION_NAME_WEIGHTED_AVERAGE, DEFAULT_ANNOTATION_TITLE_WEIGHTED_AVERAGE, true); _dao.persistEntity(averageConfirmedPositivesPerScreen); AnnotationType numberOfScreensAT = study.createAnnotationType(DEFAULT_ANNOTATION_NAME_NUMBER_OF_SCREENS, DEFAULT_ANNOTATION_TITLE_NUMBER_OF_SCREENS, true); _dao.persistEntity(numberOfScreensAT); // Create the bin-count annotation types (for "screens confirming # duplexes...") Map<Integer, AnnotationType> binToAnnotationTypeMap = Maps.newHashMap(); for (int i = 0; i <= 4; i++) // todo: make this a dynamic cardinality { AnnotationType screenCounter = study.createAnnotationType( DEFAULT_ANNOTATION_NAME_COUNT_OF_SCREENS_N.format(i), DEFAULT_ANNOTATION_TITLE_COUNT_OF_SCREENS_N.format(i), true); binToAnnotationTypeMap.put(i, screenCounter); _dao.persistEntity(screenCounter); } _dao.flush(); _dao.clear(); log.info("scroll through the pool reagents..."); int countOfDuplexReagentsConfirmed = 0; int count = 0; while (sr.next()) { SilencingReagent poolReagent = (SilencingReagent) sr.get(0); ConfirmationReport report = getDuplexReconfirmationReport(poolReagent); int[] binToScreenCount = report.getBinToScreenCount(poolReagent); int numberOfScreens = 0; for (int bin = 0; bin < binToScreenCount.length; bin++) { int screenCount = binToScreenCount[bin]; AnnotationType at = binToAnnotationTypeMap.get(bin); // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(at, poolReagent, null, (double) screenCount); _dao.saveOrUpdateEntity(av); numberOfScreens += screenCount; countOfDuplexReagentsConfirmed += screenCount * bin; } if (numberOfScreens > 0) { // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(averageConfirmedPositivesPerScreen, poolReagent, null, new Double("" + report.getWeightedAverage())); _dao.saveOrUpdateEntity(av); } // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(numberOfScreensAT, poolReagent, null, (double) numberOfScreens); _dao.saveOrUpdateEntity(av); // for memory performance clear the session every CACHE_SIZE number of iterations if (count++ % AbstractDAO.ROWS_TO_CACHE == 0) { log.debug("clearing & flushing session"); _dao.flush(); _dao.clear(); } if (count % 1000 == 0) { log.info("" + count + " reagents processed"); } } log.info("" + count + " reagents processed"); _dao.flush(); _dao.clear(); log.info("countOfDuplexReagentsConfirmed: " + countOfDuplexReagentsConfirmed); log.info("populateStudyReagentLinkTable"); _screenDao.populateStudyReagentLinkTable(study.getScreenId()); log.info("Study created: " + study.getTitle() + ", reagents: " + countOfDuplexReagentsConfirmed); return countOfDuplexReagentsConfirmed; }
From source file:edu.utah.further.core.data.util.HibernateUtil.java
License:Apache License
/** * @param <T>/* ww w. j ava 2s.c o m*/ * @param results * @return */ public static <T> List<T> asList(final ScrollableResults results) { final List<T> resultList = CollectionUtil.newList(); while (results.next()) { resultList.add((T) results.get(0)); } return resultList; }
From source file:fr.gael.dhus.service.KeyStoreService.java
License:Open Source License
/** * Retrieves entries of a keyStore, oldest first. * @param keyStoreName the name of the keyStore * @return an Iterator of KeyStoreEntry/* w w w.ja v a 2s. co m*/ */ public Iterator<KeyStoreEntry> getOldestEntries(String keyStoreName) { final ScrollableResults entries = keyStoreEntryDao.readOldestEntries(keyStoreName); return new Iterator<KeyStoreEntry>() { @Override public boolean hasNext() { return entries.next(); } @Override public KeyStoreEntry next() { return (KeyStoreEntry) entries.get(0); } @Override public void remove() { throw new UnsupportedOperationException("Remove not supported."); } }; }
From source file:fr.mael.microrss.dao.impl.UserArticleDaoImpl.java
License:Open Source License
private void manageResults(Class clazz, FullTextSession searchSession) { ScrollableResults results = searchSession.createCriteria(clazz).setFetchMode("article", FetchMode.JOIN) .setFetchSize(100).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++;/* w w w.ja v a 2 s . c om*/ searchSession.index(results.get(0)); if (index % 100 == 0) { searchSession.flushToIndexes(); searchSession.clear(); } } searchSession.flushToIndexes(); searchSession.clear(); }
From source file:gov.nih.nci.indexgen.Indexer.java
License:BSD License
/** * Generates lucene documents//from w w w .ja va 2s .c o m */ public void run() { System.out.println("Started " + entity.getEntityName()); long start = System.currentTimeMillis(); try { fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); Transaction transaction = fullTextSession.beginTransaction(); // Scrollable results will avoid loading too many objects in memory ScrollableResults results = fullTextSession.createQuery("from " + entity.getEntityName()) .scroll(ScrollMode.FORWARD_ONLY); int i = 0; while (results.next()) { fullTextSession.index(results.get(0)); if (++i % batchSize == 0) fullTextSession.clear(); } transaction.commit(); } finally { fullTextSession.close(); } long end = System.currentTimeMillis(); System.out.println("Completed " + entity.getEntityName() + " in " + (end - start) + " ms"); }
From source file:gr.abiss.calipso.service.impl.UserServiceImpl.java
License:Open Source License
@Override @Transactional(readOnly = false)// ww w . j a v a2 s . co m public void expireResetPasswordTokens() { // get a hibernate session suitable for read-only access to large datasets StatelessSession session = ((Session) this.repository.getEntityManager().getDelegate()).getSessionFactory() .openStatelessSession(); Date yesterday = DateUtils.addDays(new Date(), -1); // send email notifications for account confirmation tokens that expired org.hibernate.Query query = session.createQuery( "SELECT new gr.abiss.calipso.model.UserDTO(u.id, u.firstName, u.lastName,u.username, u.email, u.emailHash) FROM User u " + "WHERE u.password IS NULL and u.resetPasswordTokenCreated IS NOT NULL and u.resetPasswordTokenCreated < :yesterday"); query.setParameter("yesterday", yesterday); query.setFetchSize(Integer.valueOf(1000)); query.setReadOnly(true); query.setLockMode("a", LockMode.NONE); ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY); while (results.next()) { UserDTO dto = (UserDTO) results.get(0); // TODO: send expiration email this.emailService.sendAccountConfirmationExpired(new User(dto)); } results.close(); session.close(); // expire tokens, including password reset requests this.repository.expireResetPasswordTokens(yesterday); }
From source file:it.jugpadova.blo.EventBo.java
License:Apache License
public void regenerateLuceneIndexes() { Session session = this.eventDao.getHibernateTemplate().getSessionFactory().getCurrentSession(); FullTextSession fullTextSession = Search.createFullTextSession(session); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); ScrollableResults results = fullTextSession.createCriteria(Event.class).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++;// w ww .jav a2s. c om fullTextSession.index(results.get(0)); //index each element if (index % 50 == 0) { fullTextSession.clear(); //clear every batchSize since the queue is processed } } }
From source file:kr.debop4j.data.ogm.dao.HibernateOgmDao.java
License:Apache License
@Override public void indexAll(Class<?> clazz, int batchSize) { if (isDebugEnabled) log.debug("[{}]? ? ??? ...", clazz); clearIndex(clazz);//www . j a v a 2s . co m if (batchSize < DEFAUALT_BATCH_SIZE) batchSize = DEFAUALT_BATCH_SIZE; FullTextSession fts = getFullTextSession(); FlushMode currentFlushMode = fts.getFlushMode(); CacheMode currentCacheMode = fts.getCacheMode(); fts.setFlushMode(FlushMode.MANUAL); fts.setCacheMode(CacheMode.IGNORE); try { Transaction tx = fts.beginTransaction(); ScrollableResults results = fts.createCriteria(clazz).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { fts.index(results.get(0)); if (++index % batchSize == 0) { fts.flushToIndexes(); fts.clear(); if (isTraceEnabled) log.trace("?? . index=[{}]", index); } } fts.flushToIndexes(); tx.commit(); log.info("[{}]? [{}] ??? !!!", clazz, index); } finally { fts.setFlushMode(currentFlushMode); fts.setCacheMode(currentCacheMode); } }