List of usage examples for org.hibernate ScrollableResults get
Object get(int i);
From source file:net.mlw.vlh.adapter.hibernate3.HibernateAdapter.java
License:Open Source License
/** * @see net.mlw.vlh.ValueListAdapter#getValueList(java.lang.String, * net.mlw.vlh.ValueListInfo)// w w w . jav a 2 s.co m */ public ValueList getValueList(String name, ValueListInfo info) { LOGGER.debug("getValueList(String, ValueListInfo) - start"); if (info.getSortingColumn() == null) { info.setPrimarySortColumn(getDefaultSortColumn()); info.setPrimarySortDirection(getDefaultSortDirectionInteger()); if (LOGGER.isDebugEnabled()) { LOGGER.debug("The default sort column '" + getDefaultSortColumn() + "' with direction '" + getDefaultSortDirectionInteger() + "' was set."); } } int numberPerPage = info.getPagingNumberPer(); if (numberPerPage == Integer.MAX_VALUE) { numberPerPage = getDefaultNumberPerPage(); info.setPagingNumberPer(numberPerPage); if (LOGGER.isDebugEnabled()) { LOGGER.debug("The paging number per page '" + numberPerPage + "' was set."); } } Session session = SessionFactoryUtils.getSession(getSessionFactory(), allowCreate); try { Query query; boolean doFocus = ((getAdapterType() & DO_FOCUS) == 0) && info.isFocusEnabled() && info.isDoFocus() && (namedQuery == null); if (doFocus) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Start to focusing adapterName '" + name + "', ValueListInfo info = " + info + "'"); } ScrollableResults results = getScrollableResults(getQueryForFocus(info, session), info); results.beforeFirst(); doFocusFor(info, results); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Focusing finished for adapterName '" + name + "', ValueListInfo info '" + info + "'"); } } query = getQuery(info, session); boolean doPaging = ((getAdapterType() & DO_PAGE) == 0); List list; if (doPaging) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("getValueList(String adapterName = " + name + ", ValueListInfo info = " + info + ") - Start to paging result set"); } list = new ArrayList(numberPerPage); ScrollableResults results = getScrollableResults(query, info); results.last(); int lastRowNumber = results.getRowNumber(); info.setTotalNumberOfEntries(lastRowNumber + 1); if (numberPerPage == 0) { numberPerPage = getDefaultNumberPerPage(); } int pageNumber = info.getPagingPage(); boolean isResult; if (pageNumber > 1) { if ((pageNumber - 1) * numberPerPage > lastRowNumber) { pageNumber = (lastRowNumber / numberPerPage) + 1; info.setPagingPage(pageNumber); } } // fixed by liujuan 2008.6.5 isResult = results.first(); if (pageNumber > 1) { // isResult = results.scroll((pageNumber - 1) * numberPerPage - lastRowNumber); isResult = results.scroll((pageNumber - 1) * numberPerPage); } /*else { isResult = results.first(); }*/ for (int i = 0; i < numberPerPage && isResult; i++) { list.add(results.get(0)); isResult = results.next(); } LOGGER.debug("Sorting finished."); } else { LOGGER.debug("Retrieving a list directly from the query."); list = query.list(); info.setTotalNumberOfEntries(list.size()); } ValueList returnValueList = getListBackedValueList(info, list); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Retrieved list was wrapped in valuelist, info=" + info); } return returnValueList; } catch (HibernateException e) { LOGGER.error("Error getting data in adapater '" + name + "' with info = '" + info + "'", e); throw SessionFactoryUtils.convertHibernateAccessException(e); } catch (Exception e) { LOGGER.fatal("Fatal error getting data in adapater '" + name + "' with info = '" + info + "'", e); return null; } finally { SessionFactoryUtils.releaseSession(session, getSessionFactory()); } }
From source file:net.mlw.vlh.adapter.hibernate3.HibernateAdapter.java
License:Open Source License
/** * @param info/*from w w w. j a v a 2 s . co m*/ * @param results * @throws HibernateException * @throws NoSuchMethodException * @throws InvocationTargetException * @throws IllegalAccessException */ private void doFocusFor(ValueListInfo info, ScrollableResults results) throws HibernateException { info.setFocusStatus(ValueListInfo.FOCUS_NOT_FOUND); int currentRow; if (isFocusOptimalization()) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Focusing only property '" + info.getFocusProperty() + "' == '" + info.getFocusValue() + "'."); } for (currentRow = 0; ((results.next()) && (currentRow < maxRowsForFocus)); currentRow++) { String value = results.get(0).toString(); if (value.equalsIgnoreCase(info.getFocusValue())) { if (LOGGER.isInfoEnabled()) { LOGGER.info( "Focus property '" + info.getFocusProperty() + "' in row '" + currentRow + "'."); } info.setPagingPageFromRowNumber(results.getRowNumber()); info.setFocusedRowNumberInTable(results.getRowNumber()); info.setFocusStatus(ValueListInfo.FOCUS_FOUND); break; } } } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Focusing object with the property '" + info.getFocusProperty() + "' == '" + info.getFocusValue() + "'."); } for (currentRow = 0; ((results.next()) && (currentRow < maxRowsForFocus)); currentRow++) { Object value; try { value = PropertyUtils.getProperty(results.get(0), info.getFocusProperty()); } catch (HibernateException e) { LOGGER.error("Error getting focus property '" + info.getFocusProperty() + "'", e); throw e; } catch (Exception e) { LOGGER.warn("Ingoring error while getting focus property '" + info.getFocusProperty() + "'", e); continue; } if (value.toString().equalsIgnoreCase(info.getFocusValue())) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Focus object's property '" + info.getFocusProperty() + "' was found in the row '" + currentRow + "'."); } info.setPagingPageFromRowNumber(results.getRowNumber()); info.setFocusedRowNumberInTable(results.getRowNumber()); info.setFocusStatus(ValueListInfo.FOCUS_FOUND); break; } } } if (currentRow == maxRowsForFocus) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Focus for property '" + info.getFocusProperty() + "' exceded maximum rows for focus '" + maxRowsForFocus + "'."); } info.setFocusStatus(ValueListInfo.FOCUS_TOO_MANY_ITEMS); } }
From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java
License:Open Source License
/** * Recreates an index. If the force parameter is false, execute only if the index is corrupt or missing */// w ww . ja v a2 s .co m private void rebuild(final Class<? extends Indexable> entityType, final boolean force, final boolean createAlert) { boolean execute = true; // When not forced, run only if (!force) { final IndexStatus status = indexHandler.getIndexStatus(entityType); execute = status != IndexStatus.CORRUPT && status != IndexStatus.MISSING; } if (!execute) { return; } if (createAlert) { // Create the alert for index rebuilding createAlert(SystemAlert.Alerts.INDEX_REBUILD_START, entityType); } IndexWriter indexWriter = cachedWriters.get(entityType); if (indexWriter != null) { try { indexWriter.close(); } catch (final Exception e) { // Silently ignore } cachedWriters.remove(entityType); } // Remove all files and recreate the directory final File dir = indexHandler.getIndexDir(entityType); try { FileUtils.deleteDirectory(dir); } catch (final IOException e) { // Silently ignore } dir.mkdirs(); final DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType); final IndexWriter writer = getWriter(entityType); // Now, we should add all entities to the index boolean success = readonlyTransactionTemplate.execute(new TransactionCallback<Boolean>() { public Boolean doInTransaction(final TransactionStatus status) { Session session = getSession(); ScrollableResults scroll = session.createQuery(resolveHql(entityType)) .scroll(ScrollMode.FORWARD_ONLY); try { int index = 0; while (scroll.next()) { Indexable entity = (Indexable) scroll.get(0); Document document = documentMapper.map(entity); try { writer.addDocument(document); } catch (CorruptIndexException e) { handleIndexCorrupted(entityType); return false; } catch (IOException e) { LOG.error("Error while adding document to index after rebuilding " + ClassHelper.getClassName(entityType), e); return false; } // Every batch, clear the session and commit the writer if (++index % 30 == 0) { session.clear(); commit(entityType, writer); } } return true; } finally { scroll.close(); } } }); // Finish the writer operation try { if (success) { commit(entityType, writer); } else { rollback(entityType, writer); } } finally { if (createAlert) { // Create the alert for index rebuilding createAlert(SystemAlert.Alerts.INDEX_REBUILD_END, entityType); } } }
From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java
License:Open Source License
private boolean rebuildMemberAds(final Long userId, final Analyzer analyzer, final Session session) { final Class<? extends Indexable> entityType = Ad.class; final IndexWriter writer = getWriter(entityType); boolean success = false; DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType); try {/* w w w .j a v a2 s . co m*/ writer.deleteDocuments(new Term("owner", userId.toString())); } catch (CorruptIndexException e) { handleIndexCorrupted(entityType); success = false; } catch (IOException e) { LOG.error("Error while reindexing a member's advertisements", e); success = false; } ScrollableResults scroll = session .createQuery("from Ad a where a.deleteDate is null and a.owner.id = " + userId) .scroll(ScrollMode.FORWARD_ONLY); try { int index = 0; while (scroll.next()) { Indexable entity = (Indexable) scroll.get(0); Document document = documentMapper.map(entity); try { writer.addDocument(document, analyzer); } catch (CorruptIndexException e) { handleIndexCorrupted(entityType); success = false; break; } catch (IOException e) { LOG.error("Error while adding advertisements to index", e); success = false; break; } // Every batch, clear the session and commit the writer if (++index % 30 == 0) { session.clear(); } } success = true; } finally { scroll.close(); } // Finish the writer operation if (success) { commit(entityType, writer); return true; } else { rollback(entityType, writer); return false; } }
From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java
License:Open Source License
private boolean rebuildMemberRecords(final Long userId, final Analyzer analyzer, final Session session) { final Class<? extends Indexable> entityType = MemberRecord.class; final IndexWriter writer = getWriter(entityType); boolean success = false; DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType); try {//from w ww . java 2s . c om writer.deleteDocuments(new Term("element", userId.toString())); } catch (CorruptIndexException e) { handleIndexCorrupted(entityType); success = false; } catch (IOException e) { LOG.error("Error while reindexing an user's records", e); success = false; } ScrollableResults scroll = session.createQuery("from MemberRecord mr where mr.element.id = " + userId) .scroll(ScrollMode.FORWARD_ONLY); try { int index = 0; while (scroll.next()) { Indexable entity = (Indexable) scroll.get(0); Document document = documentMapper.map(entity); try { writer.addDocument(document, analyzer); } catch (CorruptIndexException e) { handleIndexCorrupted(entityType); success = false; break; } catch (IOException e) { LOG.error("Error while adding member records to index", e); success = false; break; } // Every batch, clear the session and commit the writer if (++index % 30 == 0) { session.clear(); } } success = true; } finally { scroll.close(); } // Finish the writer operation if (success) { commit(entityType, writer); return true; } else { rollback(entityType, writer); return false; } }
From source file:ome.services.db.SearchTest.java
License:Open Source License
@Test public void testReindexingExperimenter() throws Exception { HibernateTest ht = new HibernateTest(); ht.setupSession();/*from w ww . j a v a2s.c om*/ FullTextSession fullTextSession = Search.getFullTextSession(ht.s); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); Transaction transaction = fullTextSession.beginTransaction(); // Scrollable results will avoid loading too many objects in memory ScrollableResults results = fullTextSession.createCriteria(Experimenter.class) .scroll(ScrollMode.FORWARD_ONLY); int index = 0; int batchSize = 10; while (results.next()) { index++; fullTextSession.index(results.get(0)); // index each element if (index % batchSize == 0) { ht.s.clear(); // clear every batchSize since the queue is // processed } } ht.closeSession(); ht.setupSession(); List<Experimenter> list = query(ht, "root", Experimenter.class, "omeName"); assertTrue(list.toString(), list.size() == 1); ht.closeSession(); }
From source file:onl.netfishers.netshot.work.tasks.CheckGroupComplianceTask.java
License:Open Source License
@Override public void run() { logger.debug("Starting check compliance task for group {}.", deviceGroup.getId()); this.logIt(String.format("Check compliance task for group %s.", deviceGroup.getName()), 5); Session session = Database.getSession(); try {/*from w w w . j a v a2 s. c om*/ @SuppressWarnings("unchecked") List<Policy> policies = session.createCriteria(Policy.class).list(); session.beginTransaction(); session.createQuery( "delete from CheckResult c where c.key.device.id in (select d.id as id from DeviceGroup g1 join g1.cachedDevices d where g1.id = :id)") .setLong("id", deviceGroup.getId()).executeUpdate(); for (Policy policy : policies) { ScrollableResults devices = session.createQuery( "from Device d join fetch d.lastConfig where d.id in (select d.id as id from DeviceGroup g1 join g1.cachedDevices d join d.ownerGroups g2 join g2.appliedPolicies p where g1.id = :id and p.id = :pid)") .setLong("id", deviceGroup.getId()).setLong("pid", policy.getId()) .setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY); while (devices.next()) { Device device = (Device) devices.get(0); policy.check(device, session); session.flush(); session.evict(device); } } session.getTransaction().commit(); this.status = Status.SUCCESS; } catch (Exception e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Error while checking compliance.", e); this.logIt("Error while checking compliance: " + e.getMessage(), 2); this.status = Status.FAILURE; return; } finally { session.close(); } }
From source file:onl.netfishers.netshot.work.tasks.CheckGroupSoftwareTask.java
License:Open Source License
@Override public void run() { logger.debug("Starting check software compliance and hardware support status task for group {}.", deviceGroup.getId());/*from w w w .j a v a 2 s .c om*/ this.logIt(String.format("Check software compliance task for group %s.", deviceGroup.getName()), 5); Session session = Database.getSession(); try { logger.debug("Retrieving the software rules"); @SuppressWarnings("unchecked") List<SoftwareRule> softwareRules = session.createCriteria(SoftwareRule.class) .addOrder(Property.forName("priority").asc()).list(); logger.debug("Retrieving the hardware rules"); @SuppressWarnings("unchecked") List<HardwareRule> hardwareRules = session.createCriteria(HardwareRule.class).list(); session.beginTransaction(); ScrollableResults devices = session .createQuery("select d from DeviceGroup g join g.cachedDevices d where g.id = :id") .setLong("id", deviceGroup.getId()).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); while (devices.next()) { Device device = (Device) devices.get(0); device.setSoftwareLevel(ConformanceLevel.UNKNOWN); for (SoftwareRule rule : softwareRules) { rule.check(device); if (device.getSoftwareLevel() != ConformanceLevel.UNKNOWN) { break; } } device.resetEoX(); for (HardwareRule rule : hardwareRules) { rule.check(device); } session.save(device); session.flush(); session.evict(device); } session.getTransaction().commit(); this.status = Status.SUCCESS; } catch (Exception e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Error while checking compliance.", e); this.logIt("Error while checking compliance: " + e.getMessage(), 2); this.status = Status.FAILURE; return; } finally { session.close(); } }
From source file:onl.netfishers.netshot.work.tasks.PurgeDatabaseTask.java
License:Open Source License
@Override public void run() { logger.debug("Starting cleanup process."); {//from w w w. j av a 2s. com Session session = Database.getSession(); try { session.beginTransaction(); logger.trace("Cleaning up tasks finished more than {} days ago...", days); this.logIt(String.format("Cleaning up tasks more than %d days ago...", days), 5); Calendar when = Calendar.getInstance(); when.add(Calendar.DATE, -1 * days); ScrollableResults tasks = session .createQuery("from Task t where (t.status = :cancelled or t.status = :failure " + "or t.status = :success) and (t.executionDate < :when)") .setParameter("cancelled", Task.Status.CANCELLED) .setParameter("failure", Task.Status.FAILURE).setParameter("success", Task.Status.SUCCESS) .setDate("when", when.getTime()).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); int count = 0; while (tasks.next()) { Task task = (Task) tasks.get(0); session.delete(task); if (++count % 50 == 0) { session.flush(); session.clear(); } } session.getTransaction().commit(); logger.trace("Cleaning up done on tasks, {} entries affected.", count); this.logIt(String.format("Cleaning up done on tasks, %d entries affected.", count), 5); } catch (HibernateException e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Database error while purging the old tasks from the database.", e); this.logIt("Database error during the task purge.", 1); this.status = Status.FAILURE; return; } catch (Exception e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Error while purging the old tasks from the database.", e); this.logIt("Error during the task purge.", 1); this.status = Status.FAILURE; return; } finally { session.close(); } } if (configDays > 0) { Session session = Database.getSession(); try { session.beginTransaction(); logger.trace("Cleaning up configurations taken more than {} days ago...", configDays); this.logIt(String.format("Cleaning up configurations older than %d days...", configDays), 5); Calendar when = Calendar.getInstance(); when.add(Calendar.DATE, -1 * configDays); Query query; if (configSize > 0) { query = session.createQuery( "select c from Config c join c.attributes a where (a.class = ConfigLongTextAttribute) group by c.id having (max(length(a.longText.text)) > :size) and (c.changeDate < :when) order by c.device asc, c.changeDate desc") .setInteger("size", configSize * 1024); } else { query = session.createQuery( "from Config c where (c.changeDate < :when) order by c.device asc, c.changeDate desc"); } ScrollableResults configs = query.setCalendar("when", when).setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); long dontDeleteDevice = -1; Date dontDeleteBefore = null; int count = 0; while (configs.next()) { try { Config config = (Config) configs.get(0); if ((config.getDevice().getLastConfig() != null && config.getDevice().getLastConfig().getId() == config.getId()) || (dontDeleteBefore != null && config.getChangeDate().before(dontDeleteBefore)) || (configKeepDays > 0 && dontDeleteDevice != config.getDevice().getId())) { if (configKeepDays > 0) { Calendar limitCalendar = Calendar.getInstance(); limitCalendar.setTime(config.getChangeDate()); limitCalendar.add(Calendar.DATE, -1 * configKeepDays); dontDeleteBefore = limitCalendar.getTime(); } } else { session.delete(config); if (++count % 30 == 0) { session.flush(); session.clear(); } } dontDeleteDevice = config.getDevice().getId(); } catch (NullPointerException e1) { } } session.getTransaction().commit(); logger.trace("Cleaning up done on configurations, {} entries affected.", count); this.logIt(String.format("Cleaning up done on configurations, %d entries affected.", count), 5); } catch (HibernateException e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Database error while purging the old configurations from the database.", e); this.logIt("Database error during the configuration purge.", 1); this.status = Status.FAILURE; return; } catch (Exception e) { try { session.getTransaction().rollback(); } catch (Exception e1) { } logger.error("Error while purging the old configurations from the database.", e); this.logIt("Error during the configuration purge.", 1); this.status = Status.FAILURE; return; } finally { session.close(); } } this.status = Status.SUCCESS; logger.trace("Cleaning up process finished."); }
From source file:org.candlepin.model.DetachedCandlepinQuery.java
License:Open Source License
/** * {@inheritDoc}//from w w w.j ava2s. c o m */ @Override @Transactional @SuppressWarnings("unchecked") public int forEach(int column, boolean evict, ResultProcessor<T> processor) { if (processor == null) { throw new IllegalArgumentException("processor is null"); } Criteria executable = this.getExecutableCriteria(); // We always override the cache mode here to ensure we don't evict things that may be in // cache from another request. if (evict) { executable.setCacheMode(CacheMode.GET); } ScrollableResults cursor = executable.scroll(ScrollMode.FORWARD_ONLY); int count = 0; try { boolean cont = true; if (evict) { while (cont && cursor.next()) { T result = (T) cursor.get(column); cont = processor.process(result); this.session.evict(result); ++count; } } else { while (cont && cursor.next()) { cont = processor.process((T) cursor.get(column)); ++count; } } } finally { cursor.close(); } return count; }