List of usage examples for org.hibernate ScrollMode FORWARD_ONLY
ScrollMode FORWARD_ONLY
To view the source code for org.hibernate ScrollMode FORWARD_ONLY.
Click Source Link
From source file:org.eurekastreams.server.persistence.mappers.cache.PersonCacheLoader.java
License:Apache License
/** * Query the database for all people, only requesting the fields that we're caching, paging for effeciency. *///from www .j ava 2 s . com private void queryAllPeople() { Criteria criteria = personQueryStrategy.getCriteria(getHibernateSession()); // page the data criteria.setFetchSize(FETCH_SIZE); criteria.setCacheMode(CacheMode.IGNORE); ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY); // loop through the results and store in cache long recordCounter = 0; while (scroll.next()) { if (++recordCounter % FETCH_SIZE == 0) { log.info("Loading " + recordCounter + "th person record, clearing session."); getHibernateSession().clear(); } PersonModelView result = (PersonModelView) scroll.get(0); getCache().set(CacheKeys.PERSON_BY_ID + result.getEntityId(), result); getCache().set(CacheKeys.PERSON_BY_ACCOUNT_ID + result.getAccountId(), result.getEntityId()); getCache().set(CacheKeys.PERSON_BY_OPEN_SOCIAL_ID + result.getOpenSocialId(), result.getEntityId()); } }
From source file:org.gbif.portal.dao.DAOUtils.java
License:Open Source License
/** * Process results, scrolling through each record in turn only loading that specific record. * /*from w ww.ja v a 2 s .c o m*/ * @param resultsOutputter * @param session * @param query * @param associationTraverser * @param batchSize * @throws IOException */ public static void scrollResults(final ResultsOutputter resultsOutputter, Session session, Query query, AssociationTraverser associationTraverser, int batchSize) throws IOException { query.setReadOnly(true); query.setFetchSize(batchSize); //Using scrollable results to prevent initiation of all model objects ScrollableResults sr = query.scroll(ScrollMode.FORWARD_ONLY); //go to beginning of resultset boolean isNotEmpty = sr.first(); if (!isNotEmpty) { //this is necessary due to a bug with ScrollableResults //allowing continuous scrolling over an empty resultset. genius. return; } //iterate through the results processScrollableResults(resultsOutputter, session, sr, associationTraverser, batchSize); //check for a chain if (resultsOutputter instanceof ChainableResultsOutputter) { ChainableResultsOutputter cro = (ChainableResultsOutputter) resultsOutputter; ResultsOutputter nextResultsOutputter = cro.getNextResultsOutputter(); while (nextResultsOutputter != null && !cro.isChainInOnePass()) { //back to the start sr.first(); processScrollableResults(nextResultsOutputter, session, sr, associationTraverser, batchSize); if (resultsOutputter instanceof ChainableResultsOutputter) { cro = (ChainableResultsOutputter) resultsOutputter; if (!cro.isChainInOnePass()) nextResultsOutputter = cro.getNextResultsOutputter(); else nextResultsOutputter = null; } else { nextResultsOutputter = null; } } } if (associationTraverser != null) associationTraverser.reset(); //close the results set sr.close(); }
From source file:org.geolatte.featureserver.dbase.StandardFeatureReader.java
License:Open Source License
private void scroll(Criteria crit) { crit.setFetchSize(1024); results = crit.scroll(ScrollMode.FORWARD_ONLY); }
From source file:org.glite.security.voms.admin.persistence.dao.VOMSUserDAO.java
License:Apache License
public ScrollableResults findAllWithCursor() { Query q = HibernateFactory.getSession().createQuery("select u from VOMSUser u order by u.surname asc"); q.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY); return q.scroll(); }
From source file:org.glite.security.voms.admin.persistence.tools.Print.java
License:Apache License
@Override public void execute(CommandLine line) { AuditSearchDAO dao = DAOFactory.instance().getAuditSearchDAO(); AuditLogSearchParams params = addConstraintsFromCommandLine(line); Integer maxResults = params.getMaxResults(); if (maxResults == null) { maxResults = DEFAULT_MAX_RESULTS; }//from w ww . j a v a 2 s. co m ScrollableAuditLogSearchResults results = dao.scrollEventsMatchingParams(params, ScrollMode.FORWARD_ONLY); ScrollableResults scroll = results.getResults(); int count = 0; while (scroll.next()) { count++; AuditEvent e = (AuditEvent) scroll.get()[0]; printAuditLogEvent(e); HibernateFactory.getSession().evict(e); } if (count == 0) { System.out.println("No events found."); } else { System.out.format("%d events printed\n", count); } }
From source file:org.goobi.production.chart.HibernateProjectTaskList.java
License:Open Source License
private synchronized void calculate(Project inProject, List<IProjectTask> myTaskList, Boolean countImages, Integer inMax) {//from w w w . j av a 2 s. c o m Session session = Helper.getHibernateSession(); Criteria crit = session.createCriteria(Task.class); crit.addOrder(Order.asc("ordering")); crit.createCriteria("process", "proz"); crit.add(Restrictions.eq("proz.template", Boolean.FALSE)); crit.add(Restrictions.eq("proz.project", inProject)); ScrollableResults list = crit.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY); while (list.next()) { Task step = (Task) list.get(0); String shorttitle = (step.getTitle().length() > 60 ? step.getTitle().substring(0, 60) + "..." : step.getTitle()); IProjectTask pt = null; for (IProjectTask task : myTaskList) { if (task.getTitle().equals(shorttitle)) { pt = task; break; } } if (pt == null) { pt = new ProjectTask(shorttitle, 0, 0); myTaskList.add(pt); } if (step.getProcessingStatusEnum() == TaskStatus.DONE) { if (countImages) { pt.setStepsCompleted(pt.getStepsCompleted() + step.getProcess().getSortHelperImages()); } else { pt.setStepsCompleted(pt.getStepsCompleted() + 1); } } if (countImages) { pt.setStepsMax(pt.getStepsMax() + step.getProcess().getSortHelperImages()); } else { pt.setStepsMax(pt.getStepsMax() + 1); } } }
From source file:org.grouter.common.hibernatesearch.FullIndexHandler.java
License:Apache License
/** * Creates index// ww w.j a v a 2 s . c om * * @param batchSize batch index parameter * @param session the Hibernate session * @param theIndexClass classes to index */ public void doFullIndex(int batchSize, Session session, Class... theIndexClass) { FullTextSession fullTextSession = Search.createFullTextSession(session); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); //Scrollable results will avoid loading too many objects in memory for (Class theClass : theIndexClass) { ScrollableResults results = fullTextSession.createCriteria(theClass).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++; fullTextSession.index(results.get(0)); //index each element if (index % batchSize == 0) { session.clear(); //clear every batchSize since the queue is processed } } } }
From source file:org.grouter.domain.dao.spring.SystemDAOImpl.java
License:Apache License
public void doFullIndex(int batchSize, Class theIndexClass, Session session)// Class... clazzes) { FullTextSession fullTextSession = Search.createFullTextSession(session); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); //Transaction transaction = fullTextSession.beginTransaction(); //Scrollable results will avoid loading too many objects in memory ScrollableResults results = fullTextSession.createCriteria(theIndexClass).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++;/* www . j a va 2 s. c o m*/ fullTextSession.index(results.get(0)); //index each element if (index % batchSize == 0) { session.clear(); //clear every batchSize since the queue is processed } } //transaction.commit(); }
From source file:org.headsupdev.agile.app.search.Reindex.java
License:Open Source License
public void run() { Task reindex = new ReindexTask(); Manager.getInstance().addTask(reindex); try {// w ww . ja v a 2 s . co m for (String className : HibernateUtil.getEntityClassNames()) { Session session = HibernateUtil.getCurrentSession(); FullTextSession fullTextSession = org.hibernate.search.Search .createFullTextSession(((SessionProxy) session).getRealSession()); Transaction tx = fullTextSession.beginTransaction(); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); Manager.getLogger(getClass().getName()).debug(" object type " + className); //Scrollable results will avoid loading too many objects in memory ScrollableResults results = fullTextSession.createCriteria(className).setFetchSize(BATCH_SIZE) .scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { Object o = results.get(0); index++; if (o.getClass().isAnnotationPresent(Indexed.class)) { if (HeadsUpConfiguration.isDebug()) { System.out.print("."); } fullTextSession.index(o); //index each element } if (index % BATCH_SIZE == 0) { fullTextSession.flushToIndexes(); //apply changes to indexes fullTextSession.clear(); //clear since the queue is processed } } tx.commit(); if (HeadsUpConfiguration.isDebug()) { System.out.println(); } } } catch (Exception e) { Manager.getLogger(getClass().getName()).error("Failed to reindex search data", e); } Manager.getInstance().removeTask(reindex); }
From source file:org.inbio.neoportal.index.Importer.java
License:Open Source License
/** * /*from w w w. j a v a2s . c o m*/ */ @Transactional public void indexOccurrences() { Session session = sessionFactory.getCurrentSession(); // FullTextSession fullTextSession = Search.getFullTextSession(session); // config session for bash job session.setFlushMode(FlushMode.MANUAL); // fullTextSession.setFlushMode(FlushMode.MANUAL); logger.log(org.apache.log4j.Level.DEBUG, "Starting importOccurrences process"); //get current date for dateLastModified field DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy"); Date date = new Date(); String dateLastModified = dateFormat.format(date); DateFormat sourceDateFormat = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH); int firstResult = 0; int setCounter = 0; //every 100 (the jdbc batch size) call flush DataProvider dp = dataProviderDAO.findAll().get(0); logger.log(org.apache.log4j.Level.DEBUG, "importOccurrences Begin Transaction"); ScrollableResults scroll = session.createCriteria(ImportDwc.class).setFetchSize(BATCH_SIZE) .setCacheMode(CacheMode.IGNORE).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY); boolean update; int batch = 0; int rowsCounter = 0; while (scroll.next()) { batch++; rowsCounter++; ImportDwc importDwc = (ImportDwc) scroll.get(0); logger.trace("ImportDwc after scroll.get"); try { //avoid repeated occurrenceId OccurrenceDwc occurrence = occurrenceDAO .findByCatalogNumberHql(importDwc.getCatalogNumber().replace("\"", "")); logger.trace("OccurrenceDwc after findByCatalogNumber " + importDwc.getCatalogNumber().replace("\"", "")); if (occurrence != null) { update = true; // continue; } else { update = false; occurrence = new OccurrenceDwc(); } Taxon taxon = null; //check if taxonId is empty (unidentify specimens) if (importDwc.getTaxonId().isEmpty()) { taxon = null; } else { // check if is the same taxon already associated with the occurrence if (update && occurrence.getTaxonId().equals(importDwc.getTaxonId().replace("\"", ""))) { taxon = occurrence.getTaxon(); logger.trace("Occurrence update with same taxon"); } else { // find taxon entity // taxon = taxonNewDAO.findById(new BigDecimal(importDwc.getTaxonId().replace("\"", ""))); List<Taxon> taxonList = taxonDAO.findByDefaultName(importDwc.getScientificName()); logger.trace("Taxon after findByDefaultName"); if (taxonList.size() == 1) taxon = taxonList.get(0); else if (taxonList.size() > 1) { for (Taxon taxon2 : taxonList) { if (taxon2.getKingdom().equals(importDwc.getKingdom())) { taxon = taxon2; break; } } } } } // TODO: fix, use specimenId instead occurrence.setOccurrenceId(importDwc.getCatalogNumber().replace("\"", "")); occurrence.setDataProvider(dp); occurrence.setTaxon(taxon); //find or create location Location location = locationDAO.findById(new BigDecimal(importDwc.getLocationId())); logger.trace("Location after findById"); if (location == null) { location = new Location(new BigDecimal(importDwc.getLocationId())); location.setHigherGeographyId(importDwc.getHigherGeographyId()); location.setHigherGeography(importDwc.getHigherGeography()); location.setContinent(importDwc.getContinent()); location.setWaterBody(importDwc.getWaterBody()); location.setIslandGroup(importDwc.getIslandGroup()); location.setIsland(importDwc.getIsland()); location.setCountry(importDwc.getCountry()); location.setCountryCode(importDwc.getCountryCode()); location.setStateProvince(importDwc.getStateProvince()); location.setCounty(importDwc.getCounty()); location.setMunicipality(importDwc.getMunicipality()); location.setLocality(importDwc.getLocality()); location.setVerbatimLocality(importDwc.getVerbatimLocality()); location.setVerbatimElevation(importDwc.getVerbatimElevation()); location.setMinimumElevationInMeters(importDwc.getMinimumElevationInMeters()); location.setMaximumElevationInMeters(importDwc.getMaximumElevationInMeters()); location.setVerbatimDepth(importDwc.getVerbatimDepth()); location.setMinimumDepthInMeters(importDwc.getMinimumDepthInMeters()); location.setMaximumDepthInMeters(importDwc.getMaximumDepthInMeters()); location.setMinimumDistanceAboveSurfaceInMeters( importDwc.getMinimumDistanceAboveSurfaceInMeters()); location.setMaximumDistanceAboveSurfaceInMeters( importDwc.getMaximumDistanceAboveSurfaceInMeters()); location.setLocationAccordingTo(importDwc.getLocationAccordingTo()); location.setLocationRemarks(importDwc.getLocationRemarks()); location.setVerbatimCoordinates(importDwc.getVerbatimCoordinates()); location.setVerbatimLatitude(importDwc.getVerbatimLatitude()); location.setVerbatimLongitude(importDwc.getVerbatimLongitude()); location.setVerbatimCoordinateSystem(importDwc.getVerbatimCoordinateSystem()); location.setVerbatimSRS(importDwc.getVerbatimSRS()); if (!importDwc.getDecimalLatitude().isEmpty()) location.setDecimalLatitude(Double.valueOf(importDwc.getDecimalLatitude())); if (!importDwc.getDecimalLongitude().isEmpty()) location.setDecimalLongitude(Double.valueOf(importDwc.getDecimalLongitude())); location.setGeodeticDatum(importDwc.getGeodeticDatum()); location.setCoordinateUncertaintyInMeters(importDwc.getCoordinateUncertaintyInMeters()); location.setCoordinatePrecision(importDwc.getCoordinatePrecision()); location.setPointRadiusSpatialFit(importDwc.getPointRadiusSpatialFit()); location.setFootprintWKT(importDwc.getFootprintWKT()); location.setFootprintSRS(importDwc.getFootprintSRS()); location.setFootprintSpatialFit(importDwc.getFootprintSpatialFit()); location.setGeoreferencedBy(importDwc.getGeoreferencedBy()); location.setGeoreferencedDate(importDwc.getGeoreferencedDate()); location.setGeoreferenceProtocol(importDwc.getGeoreferenceProtocol()); location.setGeoreferenceSources(importDwc.getGeoreferenceSources()); location.setGeoreferenceVerificationStatus(importDwc.getGeoreferenceVerificationStatus()); location.setGeoreferenceRemarks(importDwc.getGeoreferenceRemarks()); locationDAO.create(location); // increment batch because location should be inserted batch++; } occurrence.setLocation(location); occurrence.setType(importDwc.getType()); occurrence.setModified(importDwc.getModified()); occurrence.setLanguage(importDwc.getLanguage()); occurrence.setRights(importDwc.getRights()); occurrence.setRightsHolder(importDwc.getRightsHolder()); occurrence.setAccessRights(importDwc.getAccessRights()); occurrence.setBibliographicCitation(importDwc.getBibliographicCitation()); occurrence.setReferences(importDwc.getReferences()); occurrence.setInstitutionId(importDwc.getInstitutionId()); occurrence.setCollectionId(importDwc.getCollectionId()); occurrence.setDatasetId(importDwc.getDatasetId()); occurrence.setInstitutionCode(importDwc.getInstitutionCode()); occurrence.setCollectionCode(importDwc.getCollectionCode()); occurrence.setDatasetName(importDwc.getDatasetName()); occurrence.setOwnerInstitutionCode(importDwc.getOwnerInstitutionCode()); occurrence.setBasisOfRecord(importDwc.getBasisOfRecord()); occurrence.setInformationWithheld(importDwc.getInformationWithheld()); occurrence.setDataGeneralizations(importDwc.getDataGeneralizations()); occurrence.setDynamicProperties(importDwc.getDynamicProperties()); occurrence.setOccurrenceId(importDwc.getOccurrenceId().toString()); occurrence.setCatalogNumber(importDwc.getCatalogNumber()); occurrence.setOccurrenceRemarks(importDwc.getOccurrenceRemarks()); occurrence.setRecordNumber(importDwc.getRecordNumber()); occurrence.setRecordedBy(importDwc.getRecordedBy()); occurrence.setIndividualId(importDwc.getIndividualId()); occurrence.setIndividualCount(importDwc.getIndividualCount()); occurrence.setSex(importDwc.getSex()); occurrence.setLifeStage(importDwc.getLifeStage()); occurrence.setReproductiveCondition(importDwc.getReproductiveCondition()); occurrence.setBehavior(importDwc.getBehavior()); occurrence.setEstablishmentMeans(importDwc.getEstablishmentMeans()); occurrence.setOccurrenceStatus(importDwc.getOccurrenceStatus()); occurrence.setPreparations(importDwc.getPreparations()); occurrence.setDisposition(importDwc.getDisposition()); occurrence.setOtherCatalogNumbers(importDwc.getOtherCatalogNumbers()); occurrence.setPreviousIdentifications(importDwc.getPreviousIdentifications()); occurrence.setAssociatedMedia(importDwc.getAssociatedMedia()); occurrence.setAssociatedReferences(importDwc.getAssociatedReferences()); occurrence.setAssociatedOccurrences(importDwc.getAssociatedOccurrences()); occurrence.setAssociatedSequences(importDwc.getAssociatedSequences()); occurrence.setAssociatedTaxa(importDwc.getAssociatedTaxa()); occurrence.setEventId(importDwc.getEventId()); occurrence.setSamplingProtocol(importDwc.getSamplingProtocol()); occurrence.setSamplingEffort(importDwc.getSamplingEffort()); occurrence.setEventDate(importDwc.getEventDate()); occurrence.setEventTime(importDwc.getEventTime()); occurrence.setStartDayOfYear(importDwc.getStartDayOfYear()); occurrence.setEndDayOfYear(importDwc.getEndDayOfYear()); occurrence.setYear(importDwc.getYear()); occurrence.setMonth(importDwc.getMonth()); occurrence.setDay(importDwc.getDay()); occurrence.setVerbatimEventDate(importDwc.getVerbatimEventDate()); occurrence.setHabitat(importDwc.getHabitat()); occurrence.setFieldNotes(importDwc.getFieldNumber()); occurrence.setFieldNotes(importDwc.getFieldNotes()); occurrence.setEventRemarks(importDwc.getEventRemarks()); occurrence.setGeologicalContextId(importDwc.getGeologicalContextId()); occurrence.setEarliestEonOrLowestEonothem(importDwc.getEarliestEonOrLowestEonothem()); occurrence.setLatestEonOrHighestEonothem(importDwc.getLatestEonOrHighestEonothem()); occurrence.setEarliestEraOrLowestErathem(importDwc.getEarliestEraOrLowestErathem()); occurrence.setLatestEraOrHighestErathem(importDwc.getLatestEraOrHighestErathem()); occurrence.setEarliestPeriodOrLowestSystem(importDwc.getEarliestPeriodOrLowestSystem()); occurrence.setLatestPeriodOrHighestSystem(importDwc.getLatestPeriodOrHighestSystem()); occurrence.setEarliestEpochOrLowestSeries(importDwc.getEarliestEpochOrLowestSeries()); occurrence.setLatestEpochOrHighestSeries(importDwc.getLatestEpochOrHighestSeries()); occurrence.setEarliestAgeOrLowestStage(importDwc.getEarliestAgeOrLowestStage()); occurrence.setLatestAgeOrHighestStage(importDwc.getLatestAgeOrHighestStage()); occurrence.setLowestBiostratigraphicZone(importDwc.getLowestBiostratigraphicZone()); occurrence.setHighestBiostratigraphicZone(importDwc.getHighestBiostratigraphicZone()); occurrence.setLithostratigraphicTerms(importDwc.getLithostratigraphicTerms()); occurrence.setGroup(importDwc.getGroup()); occurrence.setFormation(importDwc.getFormation()); occurrence.setMember(importDwc.getMember()); occurrence.setBed(importDwc.getBed()); occurrence.setIdentificationId(importDwc.getIdentificationId()); occurrence.setIdentifiedBy(importDwc.getIdentifiedBy()); if (importDwc.getDateIdentified() != null && importDwc.getDateIdentified().length() > 0) occurrence.setDateIdentified(sourceDateFormat.parse(importDwc.getDateIdentified())); occurrence.setIdentificationReferences(importDwc.getIdentificationReferences()); occurrence.setIdentificationVerificationStatus(importDwc.getIdentificationVerificationStatus()); occurrence.setIdentificationRemarks(importDwc.getIdentificationRemarks()); occurrence.setIdentificationQualifier(importDwc.getIdentificationQualifier()); occurrence.setTypeStatus(importDwc.getTypeStatus()); occurrence.setTaxonId(importDwc.getTaxonId()); occurrence.setScientificNameId(importDwc.getScientificNameId()); occurrence.setAcceptedNameUsageId(importDwc.getAcceptedNameUsageId()); occurrence.setParentNameUsageId(importDwc.getParentNameUsageId()); occurrence.setOriginalNameUsageId(importDwc.getOriginalNameUsageId()); occurrence.setNameAccordingToId(importDwc.getNameAccordingToId()); occurrence.setNamePublishedInId(importDwc.getNamePublishedInId()); occurrence.setTaxonConceptId(importDwc.getTaxonConceptId()); occurrence.setScientificName(importDwc.getScientificName()); occurrence.setAcceptedNameUsage(importDwc.getAcceptedNameUsage()); occurrence.setParentNameUsage(importDwc.getParentNameUsage()); occurrence.setOriginalNameUsage(importDwc.getOriginalNameUsage()); occurrence.setNameAccordingTo(importDwc.getNameAccordingTo()); occurrence.setNamePublishedIn(importDwc.getNamePublishedIn()); occurrence.setNamePublishedInYear(importDwc.getNamePublishedInYear()); occurrence.setHigherClassification(importDwc.getHigherClassification()); occurrence.setKingdom(importDwc.getKingdom()); occurrence.setPhylum(importDwc.getPhylum()); occurrence.setClass_(importDwc.getClass_()); occurrence.setOrder(importDwc.getOrder()); occurrence.setFamily(importDwc.getFamily()); occurrence.setGenus(importDwc.getGenus()); occurrence.setSubgenus(importDwc.getSubgenus()); occurrence.setSpecificEpithet(importDwc.getSpecificEpithet()); occurrence.setInfraspecificEpithet(importDwc.getInfraspecificEpithet()); occurrence.setTaxonRank(importDwc.getTaxonRank()); occurrence.setVerbatimTaxonRank(importDwc.getVerbatimTaxonRank()); occurrence.setScientificNameAuthorship(importDwc.getScientificNameAuthorship()); occurrence.setVernacularName(importDwc.getVernacularName()); occurrence.setNomenclaturalCode(importDwc.getNomenclaturalCode()); occurrence.setTaxonomicStatus(importDwc.getTaxonomicStatus()); occurrence.setNomenclaturalStatus(importDwc.getNomenclaturalStatus()); occurrence.setTaxonRemarks(importDwc.getTaxonRemarks()); if (!update) occurrenceDAO.create(occurrence); else occurrenceDAO.update(occurrence); // clear objects occurrence.setImages(null); occurrence = null; taxon = null; location = null; } catch (NonUniqueResultException ex) { logger.warn("NonUniqueResultException occurrenceId " + importDwc.getCatalogNumber()); } catch (NumberFormatException ex) { logger.error("NumberFormatException occurrenceId " + importDwc.getCatalogNumber()); ex.printStackTrace(); System.exit(-1); } catch (ParseException e) { e.printStackTrace(); } // } // end for, 1000 importDwc rows session.evict(importDwc); if (batch >= BATCH_SIZE) { batch = 0; SessionStatistics statistics = session.getStatistics(); logger.trace("Entities before flush: " + String.valueOf(statistics.getEntityCount())); session.flush(); logger.trace("Entities before clear: " + String.valueOf(statistics.getEntityCount())); // fullTextSession.flushToIndexes(); session.clear(); logger.trace("Entities after clear: " + String.valueOf(statistics.getEntityCount())); // System.exit(1); } if (rowsCounter % maxResults == 0) { // fullTextSession.flushToIndexes(); logger.info("Occurrences added " + rowsCounter); SessionStatistics statistics = session.getStatistics(); logger.debug("Entities: " + String.valueOf(statistics.getEntityCount())); logger.debug("Collections: " + String.valueOf(statistics.getCollectionCount())); } // ******* for debug only *********** // if(rowsCounter == 1) { // session.getTransaction().rollback(); // scroll.close(); // System.exit(1); // } // firstResult += maxResults; // occurrencesDwcList = // importDwcDAO.scrollAll(ImportDwc.class, // maxResults, // firstResult); } // end while, no more importDwc rows scroll.close(); // transactionManager.commit(status); session.flush(); session.clear(); logger.info("Total occurrences processed " + rowsCounter); // session.getTransaction().commit(); // session.close(); }