List of usage examples for org.hibernate ScrollMode FORWARD_ONLY
ScrollMode FORWARD_ONLY
To view the source code for org.hibernate ScrollMode FORWARD_ONLY.
Click Source Link
From source file:compecon.engine.dao.hibernate.impl.MarketOrderDAOImpl.java
License:Open Source License
@Override public Iterator<MarketOrder> getIterator(final Currency currency, final GoodType goodType) { final String queryString = "FROM MarketOrderImpl m " + "WHERE m.currency = :currency AND m.goodType = :goodType " + "ORDER BY m.pricePerUnit ASC"; final ScrollableResults itemCursor = getSession().createQuery(queryString) .setParameter("currency", currency).setParameter("goodType", goodType) .scroll(ScrollMode.FORWARD_ONLY); return new HibernateIteratorImpl<MarketOrder>(itemCursor); }
From source file:compecon.engine.dao.hibernate.impl.MarketOrderDAOImpl.java
License:Open Source License
@Override public Iterator<MarketOrder> getIterator(final Currency currency, final Currency commodityCurrency) { final String queryString = "FROM MarketOrderImpl m " + "WHERE m.currency = :currency AND m.commodityCurrency = :commodityCurrency " + "ORDER BY m.pricePerUnit ASC"; final ScrollableResults itemCursor = getSession().createQuery(queryString) .setParameter("currency", currency).setParameter("commodityCurrency", commodityCurrency) .scroll(ScrollMode.FORWARD_ONLY); return new HibernateIteratorImpl<MarketOrder>(itemCursor); }
From source file:compecon.engine.dao.hibernate.impl.MarketOrderDAOImpl.java
License:Open Source License
@Override public Iterator<MarketOrder> getIterator(final Currency currency, final Class<? extends Property> propertyClass) { final String queryString = "FROM MarketOrderImpl m " + "WHERE m.currency = :currency AND m.property.class = :propertyClass " + "ORDER BY m.pricePerUnit ASC"; final ScrollableResults itemCursor = getSession().createQuery(queryString) .setParameter("currency", currency).setParameter("propertyClass", propertyClass.getSimpleName()) .scroll(ScrollMode.FORWARD_ONLY); return new HibernateIteratorImpl<MarketOrder>(itemCursor); }
From source file:de.powerstaff.business.service.impl.ProfileIndexerServiceImpl.java
License:Open Source License
/** * Run the indexer.//from www . java 2 s .co m */ @Transactional public void runIndexer() { if (!systemParameterService.isIndexingEnabled()) { LOGGER.info("Indexing disabled"); return; } if (running) { LOGGER.info("Indexing already running"); } running = true; LOGGER.info("Running indexing"); readerFactory.initialize(); serviceLogger.logStart(SERVICE_ID, ""); // Jetzt luft er long theStartTime = System.currentTimeMillis(); try { int theFetchSize = 100; int theLogCount = theFetchSize * 10; Session theHibernateSession = sessionFactory.getCurrentSession(); FullTextSession theFT = Search.getFullTextSession(theHibernateSession); org.hibernate.Query theQuery = theHibernateSession.createQuery("from Freelancer"); theQuery.setFetchSize(theFetchSize); ScrollableResults theResults = theQuery.scroll(ScrollMode.FORWARD_ONLY); int counter = 0; while (theResults.next()) { Freelancer theFreelancer = (Freelancer) theResults.get(0); boolean needsToUpdate = true; TermQuery theTermQuery = new TermQuery(new Term("id", "" + theFreelancer.getId())); FullTextQuery theHibernateQuery = theFT.createFullTextQuery(theTermQuery, Freelancer.class); theHibernateQuery.setProjection(FullTextQuery.DOCUMENT); for (Object theSingleEntity : theHibernateQuery.list()) { needsToUpdate = false; Object[] theRow = (Object[]) theSingleEntity; Document theDocument = (Document) theRow[0]; long theNumberOfProfiles = Long.parseLong(theDocument.get(ProfileIndexerService.NUM_PROFILES)); List<FreelancerProfile> theProfiles = profileSearchService.loadProfilesFor(theFreelancer); if (theNumberOfProfiles != theProfiles.size()) { LOGGER.info("Updating freelancer " + theFreelancer.getId() + " as the number of profiles changed from " + theNumberOfProfiles + " to " + theProfiles.size()); needsToUpdate = true; } else { for (int i = 1; i <= theNumberOfProfiles; i++) { String theFileName = theDocument.get(ProfileIndexerService.PROFILE_PATH_PREFIX + i); File theFileOnServer = new File(theFileName); if (theFileOnServer.exists()) { long theModification = Long.parseLong( theDocument.get(ProfileIndexerService.PROFILE_MODIFICATION_PREFIX + i)); long theLastModified = theFileOnServer.lastModified() / 1000; if (theModification != theLastModified) { LOGGER.info("Updating freelancer " + theFreelancer.getId() + " as profile " + theFileOnServer + " was modified"); needsToUpdate = true; } } else { LOGGER.info("Updating freelancer " + theFreelancer.getId() + " as profile " + theFileOnServer + " seems to be deleted"); needsToUpdate = true; } } } } if (needsToUpdate) { theFT.index(theFreelancer); } if (counter % theLogCount == 0) { LOGGER.info("Processing record " + counter); } if (counter % theFetchSize == 0) { LOGGER.debug("Flushing session and index"); theFT.flushToIndexes(); theFT.clear(); theHibernateSession.clear(); } counter++; } } catch (Exception ex) { LOGGER.error("Error on indexing", ex); } finally { theStartTime = System.currentTimeMillis() - theStartTime; LOGGER.info("Indexing finished"); serviceLogger.logEnd(SERVICE_ID, "Dauer = " + theStartTime + "ms"); running = false; } }
From source file:de.powerstaff.business.service.impl.WrongDataServiceImpl.java
License:Open Source License
private void processFreelancer(File aReportFile) throws FileNotFoundException, ParseException { File theDBOhneProfil = new File(aReportFile, "Freiberufler_mit_Code_ohne_Profil.csv"); File theFreelancerOhneNewsletter = new File(aReportFile, "Freiberufler_ohne_Newsletter.csv"); File theFreelancerMitHomepageOhneKontakt = new File(aReportFile, "Freiberufler_mit_Homepage_ohne_Kontakt.csv"); File theFreelancerForNewsletter = new File(aReportFile, "Freiberufler_fr_Newsletter.csv"); File theProfileOhneDB = new File(aReportFile, "Profile_ohne_Datenbankeintrag.csv"); File theProfileDoppelterCode = new File(aReportFile, "Profile_Kodierung_doppelt.csv"); PrintWriter theDBOhneProfilWriter = null; PrintWriter theFreelancerOhneNewsletterWriter = null; PrintWriter theFreelancerMitHomepageOhneKontaktWriter = null; PrintWriter theFreelancerForNewsletterWriter = null; PrintWriter theProfileOhneDBWriter = null; PrintWriter theProfileDoppelterCodeWriter = null; FreelancerBackingBeanDataModel theModel = new FreelancerBackingBeanDataModel(); try {/*from www .j a va 2 s . c om*/ theProfileDoppelterCodeWriter = new PrintWriter(theProfileDoppelterCode); theDBOhneProfilWriter = new PrintWriter(theDBOhneProfil); theFreelancerOhneNewsletterWriter = new PrintWriter(theFreelancerOhneNewsletter); theFreelancerMitHomepageOhneKontaktWriter = new PrintWriter(theFreelancerMitHomepageOhneKontakt); theFreelancerForNewsletterWriter = new PrintWriter(theFreelancerForNewsletter); theProfileOhneDBWriter = new PrintWriter(theProfileOhneDB); theDBOhneProfilWriter.println("Kodierung;Name;Vorname;Kreditor"); theFreelancerOhneNewsletterWriter.println("Kodierung;Name;Vorname;Mail"); theFreelancerMitHomepageOhneKontaktWriter.println("Kodierung;Name;Vorname;Homepage"); theFreelancerForNewsletterWriter.println( "Krzel;Name;Vorname;Titel;eMail;Eintrag in Kreditor;Verfgbarkeit;Homepage;letzter Kontakt;Status;Xing;Gulp"); theProfileOhneDBWriter.println("Kodierung;Dateinamen"); theProfileDoppelterCodeWriter.println("Kodierung;Dateinamen"); boolean newsletterEnabled = systemParameterService.isNewsletterEnabled(); Set<String> theMails = new HashSet<String>(); Date theStartDate = null; DateFormat theDateFormat = new SimpleDateFormat("dd.MM.yyyy"); if (newsletterEnabled) { theStartDate = theDateFormat.parse(systemParameterService.getStartDateForNotInNewsletter()); for (NewsletterMail theMail : websiteDao.getConfirmedMails()) { theMails.add(theMail.getMail().toLowerCase()); } } Session theSession = sessionFactory.getCurrentSession(); int theFetchSize = 100; int theLogCount = theFetchSize * 10; Query theQuery = theSession.createQuery("from Freelancer"); theQuery.setFetchSize(theFetchSize); ScrollableResults theResults = theQuery.scroll(ScrollMode.FORWARD_ONLY); int counter = 0; Set<String> theKnownCodes = new HashSet<String>(); while (theResults.next()) { Freelancer theFreelancer = (Freelancer) theResults.get(0); String theCode = theFreelancer.getCode(); if (!StringUtils.isEmpty(theCode)) { theCode = theCode.toLowerCase(); theKnownCodes.add(theCode); Set<File> theFiles = fsCache.getFilesForCode(theCode); if ((theFiles == null || theFiles.size() == 0)) { theDBOhneProfilWriter.println(theCode + ";" + saveString(theFreelancer.getName1()) + ";" + saveString(theFreelancer.getName2()) + ";" + saveString(theFreelancer.getKreditorNr())); } } List<FreelancerContact> theMailContacts = theFreelancer.getEMailContacts(); List<FreelancerContact> theWebContacts = theFreelancer.getWebContacts(); Date theLastContact = theFreelancer.getLastContactDate(); if (!theFreelancer.isContactforbidden()) { String theMail = null; for (FreelancerContact theContact : theMailContacts) { if (StringUtils.isEmpty(theMail) && "eMail".equalsIgnoreCase(theContact.getType().getDescription())) { theMail = theContact.getValue(); } } String theWeb = ""; for (FreelancerContact theContact : theWebContacts) { if (StringUtils.isEmpty(theWeb) && "Web".equalsIgnoreCase(theContact.getType().getDescription())) { theWeb = theContact.getValue(); } } String theGulp = ""; for (FreelancerContact theContact : theWebContacts) { if (StringUtils.isEmpty(theWeb) && "Gulp".equalsIgnoreCase(theContact.getType().getDescription())) { theGulp = theContact.getValue(); } } String theXing = ""; for (FreelancerContact theContact : theWebContacts) { if (StringUtils.isEmpty(theWeb) && "Xing".equalsIgnoreCase(theContact.getType().getDescription())) { theXing = theContact.getValue(); } } String theAvailable = ""; Date theAvailability = theFreelancer.getAvailabilityAsDate(); if (theAvailability != null) { theAvailable = theDateFormat.format(theAvailability); } theFreelancerForNewsletterWriter.print(saveString(theFreelancer.getCode())); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theFreelancer.getName1())); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theFreelancer.getName2())); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theFreelancer.getTitel())); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theMail)); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theFreelancer.getKreditorNr())); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theAvailable)); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theWeb)); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theLastContact)); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter .print(saveString(theModel.getStatusAsString(theFreelancer.getStatus()))); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theXing)); theFreelancerForNewsletterWriter.print(";"); theFreelancerForNewsletterWriter.print(saveString(theGulp)); theFreelancerForNewsletterWriter.println(); } if (newsletterEnabled) { if (theLastContact != null && !theFreelancer.isContactforbidden()) { String theMail = ""; boolean hasMail = false; for (FreelancerContact theContact : theMailContacts) { theMail = theContact.getValue(); if (theMails.contains(theMail.toLowerCase())) { hasMail = true; } } if (!hasMail) { theFreelancerOhneNewsletterWriter.println(theFreelancer.getCode() + ";" + theFreelancer.getName1() + ";" + theFreelancer.getName2() + ";" + theMail); } } } if (theLastContact == null) { boolean hasHomepage = false; String theHomepage = null; for (FreelancerContact theContact : theWebContacts) { theHomepage = theContact.getValue(); hasHomepage = true; } if (hasHomepage) { theFreelancerMitHomepageOhneKontaktWriter.println(theFreelancer.getCode() + ";" + theFreelancer.getName1() + ";" + theFreelancer.getName2() + ";" + theHomepage); } } if (counter % theLogCount == 0) { LOGGER.info("Processing record " + counter); } if (counter % theFetchSize == 0) { LOGGER.debug("Flushing session"); theSession.clear(); } counter++; } Set<String> theCodesFromFiles = new HashSet<String>(); theCodesFromFiles.addAll(fsCache.getKnownCodes()); for (String theCode : theCodesFromFiles) { Set<File> theFiles = fsCache.getFilesForCode(theCode); if (theFiles != null && theFiles.size() > 1) { // Doppelter Code StringBuilder theBuilder = new StringBuilder(); for (File theFile : theFiles) { if (theBuilder.length() > 0) { theBuilder.append(";"); } theBuilder.append(theFile.toString()); } theProfileDoppelterCodeWriter.println(theCode + ";" + theBuilder); } } theCodesFromFiles.removeAll(theKnownCodes); for (String theCode : theCodesFromFiles) { Set<File> theFiles = fsCache.getFilesForCode(theCode); if (theFiles != null) { for (File theFile : theFiles) { theProfileOhneDBWriter.println(theCode + ";" + theFile); } } } } catch (Exception e) { LOGGER.error("Error processing freelancer", e); } finally { IOUtils.closeQuietly(theDBOhneProfilWriter); IOUtils.closeQuietly(theFreelancerOhneNewsletterWriter); IOUtils.closeQuietly(theFreelancerMitHomepageOhneKontaktWriter); IOUtils.closeQuietly(theFreelancerForNewsletterWriter); IOUtils.closeQuietly(theProfileOhneDBWriter); IOUtils.closeQuietly(theProfileDoppelterCodeWriter); } }
From source file:de.tudarmstadt.ukp.lmf.transform.DBToXMLTransformer.java
License:Apache License
protected void doTransform(boolean includeAxes, final Lexicon... includeLexicons) throws SAXException { final int bufferSize = 100; commitCounter = 1;/*from w w w .j a va 2 s. c o m*/ writeStartElement(lexicalResource); // Iterate over all lexicons if (includeLexicons == null || includeLexicons.length > 0) { for (Lexicon lexicon : lexicalResource.getLexicons()) { String lexiconName = lexicon.getName(); // Check if we want to include this lexicon. if (includeLexicons != null) { boolean found = false; for (Lexicon l : includeLexicons) { if (lexiconName.equals(l.getName())) { found = true; break; } } if (!found) { continue; } } logger.info("Processing lexicon: " + lexiconName); writeStartElement(lexicon); // Iterate over all possible sub-elements of this Lexicon and // write them to the XML Class<?>[] lexiconClassesToSave = { LexicalEntry.class, SubcategorizationFrame.class, SubcategorizationFrameSet.class, SemanticPredicate.class, Synset.class, SynSemCorrespondence.class, //ConstraintSet.class }; // "Unfortunately, MySQL does not treat large offset values efficiently by default and will still read all the rows prior to an offset value. It is common to see a query with an offset above 100,000 take over 20 times longer than an offset of zero!" // http://www.numerati.com/2012/06/26/reading-large-result-sets-with-hibernate-and-mysql/ for (Class<?> clazz : lexiconClassesToSave) { /*DetachedCriteria criteria = DetachedCriteria.forClass(clazz) .add(Restrictions.sqlRestriction("lexiconId = '" + lexicon.getId() + "'")); CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize); while (iter.hasNext()) { Object obj = iter.next(); writeElement(obj); session.evict(obj); commitCounter++; if (commitCounter % 1000 == 0) logger.info("progress: " + commitCounter + " class instances written to file"); }*/ Session lookupSession = sessionFactory.openSession(); Query query = lookupSession.createQuery("FROM " + clazz.getSimpleName() + " WHERE lexiconId = '" + lexicon.getId() + "' ORDER BY id"); query.setReadOnly(true); if (DBConfig.MYSQL.equals(dbConfig.getDBType())) { query.setFetchSize(Integer.MIN_VALUE); // MIN_VALUE gives hint to JDBC driver to stream results } else { query.setFetchSize(1000); } ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY); while (results.next()) { // For streamed query results, no further queries are allowed (incl. lazy proxy queries!) // Detach the object from the lookup session and reload it using the "official" session. Object[] rows = results.get(); Object row = rows[0]; lookupSession.evict(row); lookupSession.evict(rows); rows = null; row = session.get(row.getClass(), ((IHasID) row).getId()); writeElement(row); session.evict(row); row = null; commitCounter++; if (commitCounter % 1000 == 0) { logger.info("progress: " + commitCounter + " class instances written to file"); } if (commitCounter % 10000 == 0) { closeSession(); openSession(); } } results.close(); lookupSession.close(); } writeEndElement(lexicon); } } // Iterate over SenseAxes and write them to XMLX when not only // lexicons should be converted if (includeAxes) { logger.info("Processing sense axes"); DetachedCriteria criteria = DetachedCriteria.forClass(SenseAxis.class) .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'")); CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize); while (iter.hasNext()) { Object obj = iter.next(); writeElement(obj); session.evict(obj); commitCounter++; if (commitCounter % 1000 == 0) { logger.info("progress: " + commitCounter + " class instances written to file"); } } logger.info("Processing predicateargument axes"); DetachedCriteria criteria2 = DetachedCriteria.forClass(PredicateArgumentAxis.class) .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'")); CriteriaIterator<Object> iter2 = new CriteriaIterator<Object>(criteria2, sessionFactory, bufferSize); while (iter2.hasNext()) { Object obj = iter2.next(); writeElement(obj); session.evict(obj); commitCounter++; if (commitCounter % 1000 == 0) { logger.info("progress: " + commitCounter + " class instances written to file"); } } } writeEndElement(lexicalResource); writeEndDocument(); }
From source file:edu.harvard.med.screensaver.db.ScreenResultsDAOImpl.java
License:Open Source License
public int createScreenedReagentCounts(final ScreenType screenType, Screen study, AnnotationType positiveAnnotationType, AnnotationType overallAnnotationType) { // Break this into two separate queries because of Hibernate bug (http://opensource.atlassian.com/projects/hibernate/browse/HHH-1615): // when using the "group by" clause with a full object (as opposed to an attribute of the object/table), // Hibernate is requiring that every attribute of the object be specified in a "group by" and not // just the object itself. so the workaround is to query once to get the id's then once again to // get the objects. log.info("1. get the reagent id's for the positive counts"); ScrollableResults sr = runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw") .from("aw", AssayWell.libraryWell, "w", JoinType.INNER) .from("w", Well.latestReleasedReagent, "r", JoinType.INNER) .from("w", Well.library, "l", JoinType.INNER) .where("l", "screenType", Operator.EQUAL, screenType) .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL); builder.where("aw", "positive", Operator.EQUAL, Boolean.TRUE); builder.groupBy("r", "id"); log.debug("hql: " + builder.toHql()); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); }// w ww. ja va2 s.com }); Map<Integer, Long> positivesMap = Maps.newHashMap(); while (sr.next()) { Object[] row = sr.get(); positivesMap.put((Integer) row[0], (Long) row[1]); } log.info("2. get the reagent id's for the overall counts"); sr = runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw") .from("aw", AssayWell.libraryWell, "w", JoinType.INNER) .from("w", Well.library, "l", JoinType.INNER) .from("w", Well.latestReleasedReagent, "r", JoinType.INNER) .where("l", "screenType", Operator.EQUAL, screenType) .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL) .groupBy("r", "id"); log.debug("hql: " + builder.toHql()); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); Map<Integer, Long> overallMap = Maps.newHashMap(); while (sr.next()) { Object[] row = sr.get(); overallMap.put((Integer) row[0], (Long) row[1]); } log.info("3. get the Reagents"); sr = runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("r").distinctProjectionValues().from(AssayWell.class, "aw") .from("aw", AssayWell.libraryWell, "w", JoinType.INNER) .from("w", Well.library, "l", JoinType.INNER) .from("w", Well.latestReleasedReagent, "r", JoinType.INNER) .where("l", "screenType", Operator.EQUAL, screenType) .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL); log.debug("hql: " + builder.toHql()); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); log.info("4. build the Study: positives: " + positivesMap.size() + ", reagents: " + overallMap.size()); int count = 0; while (sr.next()) { Reagent r = (Reagent) sr.get()[0]; AnnotationValue av = new AnnotationValue(overallAnnotationType, r, null, (double) overallMap.get(r.getReagentId()).intValue()); _dao.saveOrUpdateEntity(av); Long positiveCount = positivesMap.get(r.getReagentId()); if (positiveCount != null) { av = new AnnotationValue(positiveAnnotationType, r, null, (double) positiveCount.intValue()); _dao.saveOrUpdateEntity(av); } // Note: due to memory performance, we will build the study_reagent_link later if (count++ % ROWS_TO_CACHE == 0) { log.debug("flushing"); _dao.flush(); _dao.clear(); } if (count % 10000 == 0) { log.info("" + count + " reagents processed"); } } log.info("save the study"); _dao.saveOrUpdateEntity(study); _dao.flush(); log.info("populateStudyReagentLinkTable"); int reagentCount = populateStudyReagentLinkTable(study.getScreenId()); log.info("done: positives: " + positivesMap.size() + ", reagents: " + overallMap.size()); return reagentCount; }
From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultReporter.java
License:Open Source License
/** * Create a study of the "Confirmed Positives" for all the pool SilencingReagents in the DB. * (re: {@link DataType#CONFIRMED_POSITIVE_INDICATOR} ) <br> * <ul>//from ww w. j ava 2 s.com * For RNAi * <li>Count of follow-up screens for well * <li>M+1 columns named "N duplexes confirming positive", where 0 <= N <= M, and M is the max number of duplexes per * pool in any library, currently = 4). The value in each column is the number of follow-up screens that confirmed the * well as a positive with N duplexes * </ul> * see [#2610] Confirmed Positives study creator<br> * * @return total count of confirmed positives considered in this study (informational) */ public int createSilencingReagentConfirmedPositiveSummary(Screen study) { log.info("Get all of the pool reagents..."); ScrollableResults sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("pr").from(Library.class, "l").from("l", Library.wells, "w", JoinType.INNER) .from("w", Well.latestReleasedReagent, "pr", JoinType.INNER) .where("l", "pool", Operator.EQUAL, Boolean.TRUE); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); log.info("Create the annotation types for the study."); AnnotationType averageConfirmedPositivesPerScreen = study.createAnnotationType( DEFAULT_ANNOTATION_NAME_WEIGHTED_AVERAGE, DEFAULT_ANNOTATION_TITLE_WEIGHTED_AVERAGE, true); _dao.persistEntity(averageConfirmedPositivesPerScreen); AnnotationType numberOfScreensAT = study.createAnnotationType(DEFAULT_ANNOTATION_NAME_NUMBER_OF_SCREENS, DEFAULT_ANNOTATION_TITLE_NUMBER_OF_SCREENS, true); _dao.persistEntity(numberOfScreensAT); // Create the bin-count annotation types (for "screens confirming # duplexes...") Map<Integer, AnnotationType> binToAnnotationTypeMap = Maps.newHashMap(); for (int i = 0; i <= 4; i++) // todo: make this a dynamic cardinality { AnnotationType screenCounter = study.createAnnotationType( DEFAULT_ANNOTATION_NAME_COUNT_OF_SCREENS_N.format(i), DEFAULT_ANNOTATION_TITLE_COUNT_OF_SCREENS_N.format(i), true); binToAnnotationTypeMap.put(i, screenCounter); _dao.persistEntity(screenCounter); } _dao.flush(); _dao.clear(); log.info("scroll through the pool reagents..."); int countOfDuplexReagentsConfirmed = 0; int count = 0; while (sr.next()) { SilencingReagent poolReagent = (SilencingReagent) sr.get(0); ConfirmationReport report = getDuplexReconfirmationReport(poolReagent); int[] binToScreenCount = report.getBinToScreenCount(poolReagent); int numberOfScreens = 0; for (int bin = 0; bin < binToScreenCount.length; bin++) { int screenCount = binToScreenCount[bin]; AnnotationType at = binToAnnotationTypeMap.get(bin); // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(at, poolReagent, null, (double) screenCount); _dao.saveOrUpdateEntity(av); numberOfScreens += screenCount; countOfDuplexReagentsConfirmed += screenCount * bin; } if (numberOfScreens > 0) { // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(averageConfirmedPositivesPerScreen, poolReagent, null, new Double("" + report.getWeightedAverage())); _dao.saveOrUpdateEntity(av); } // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(numberOfScreensAT, poolReagent, null, (double) numberOfScreens); _dao.saveOrUpdateEntity(av); // for memory performance clear the session every CACHE_SIZE number of iterations if (count++ % AbstractDAO.ROWS_TO_CACHE == 0) { log.debug("clearing & flushing session"); _dao.flush(); _dao.clear(); } if (count % 1000 == 0) { log.info("" + count + " reagents processed"); } } log.info("" + count + " reagents processed"); _dao.flush(); _dao.clear(); log.info("countOfDuplexReagentsConfirmed: " + countOfDuplexReagentsConfirmed); log.info("populateStudyReagentLinkTable"); _screenDao.populateStudyReagentLinkTable(study.getScreenId()); log.info("Study created: " + study.getTitle() + ", reagents: " + countOfDuplexReagentsConfirmed); return countOfDuplexReagentsConfirmed; }
From source file:edu.harvard.med.screensaver.io.screenresults.ScreenResultReporter.java
License:Open Source License
/** * for [#2268] new column to display # overlapping screens *///from w w w . ja v a2s.c om @Transactional public int createScreenedReagentCounts(final ScreenType screenType, Screen study, AnnotationType positiveAnnotationType, AnnotationType overallAnnotationType) { // Break this into two separate queries because of an apparent Hibernate bug: // when using the "group by" clause with a full object (as opposed to an attribute of the object/table), // Hibernate is requiring that every attribute of the object be specified in a "group by" and not // just the object itself. so the workaround is to query once to get the id's then once again to // get the objects. // study = _dao.mergeEntity(study); // positiveAnnotationType = _dao.mergeEntity(positiveAnnotationType); // overallAnnotationType = _dao.mergeEntity(overallAnnotationType); // _dao.flush(); log.info("1. get the reagent id's for the positive counts"); ScrollableResults sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw") .from("aw", AssayWell.libraryWell, "w", JoinType.INNER) .from("w", Well.latestReleasedReagent, "r", JoinType.INNER) .from("w", Well.library, "l", JoinType.INNER) .where("l", "screenType", Operator.EQUAL, screenType) .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL); builder.where("aw", "positive", Operator.EQUAL, Boolean.TRUE); builder.groupBy("r", "id"); log.debug("hql: " + builder.toHql()); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); Map<Integer, Long> positivesMap = Maps.newHashMap(); while (sr.next()) { Object[] row = sr.get(); positivesMap.put((Integer) row[0], (Long) row[1]); } log.info("2. get the reagent id's for the overall counts"); sr = _dao.runScrollQuery(new edu.harvard.med.screensaver.db.ScrollQuery() { public ScrollableResults execute(Session session) { HqlBuilder builder = new HqlBuilder(); builder.select("r", "id").selectExpression("count(*)").from(AssayWell.class, "aw") .from("aw", AssayWell.libraryWell, "w", JoinType.INNER) .from("w", Well.library, "l", JoinType.INNER) .from("w", Well.latestReleasedReagent, "r", JoinType.INNER) .where("l", "screenType", Operator.EQUAL, screenType) .where("w", "libraryWellType", Operator.EQUAL, LibraryWellType.EXPERIMENTAL) .groupBy("r", "id"); log.debug("hql: " + builder.toHql()); return builder.toQuery(session, true).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); } }); log.info("begin assigning values to the study"); int overallCount = 0; Map<Integer, Long> overallMap = Maps.newHashMap(); while (sr.next()) { Object[] row = sr.get(); Integer r_id = (Integer) row[0]; Long count = (Long) row[1]; Reagent r = _dao.findEntityById(Reagent.class, r_id, true); // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method AnnotationValue av = new AnnotationValue(overallAnnotationType, r, null, (double) count); _dao.persistEntity(av); Long positiveCount = positivesMap.get(r_id); if (positiveCount != null) { // note: for memory performance, we're side-stepping the AnnotationType.createAnnotationValue() method av = new AnnotationValue(positiveAnnotationType, r, null, (double) positiveCount.intValue()); _dao.persistEntity(av); } // Note: due to memory performance, we will build the study_reagent_link later if (count++ % AbstractDAO.ROWS_TO_CACHE == 0) { log.debug("flushing"); _dao.flush(); _dao.clear(); } if (++overallCount % 10000 == 0) { log.info("" + overallCount + " reagents processed"); } } log.info("save the study"); // unnecessary since study is already persisted, and the reagents will be linked by the populateStudyReagentLinkTable - sde4 // _dao.mergeEntity(study); _dao.flush(); log.info("populateStudyReagentLinkTable"); int reagentCount = _screenDao.populateStudyReagentLinkTable(study.getScreenId()); log.info("done: positives: " + positivesMap.size() + ", reagents: " + overallCount); return reagentCount; }
From source file:edu.scripps.fl.pubchem.app.AssayDownloader.java
License:Apache License
protected Set<Long> getAIDsfromLocalDB() { SQLQuery query = PubChemDB.getSession().createSQLQuery("select assay_aid from pcassay"); ScrollableResults scroll = query.scroll(ScrollMode.FORWARD_ONLY); Iterator<Long> iterator = new ScrollableResultsIterator<Long>(Long.class, scroll); Set<Long> set = new HashSet(); while (iterator.hasNext()) set.add(iterator.next());/*from w w w . j a v a2 s. c om*/ return set; }