List of usage examples for org.hibernate FlushMode MANUAL
FlushMode MANUAL
To view the source code for org.hibernate FlushMode MANUAL.
Click Source Link
From source file:org.hyperic.hq.hibernate.SessionManager.java
License:Open Source License
private void runInSessionInternal(final SessionRunner r) throws Exception { boolean participate = false; try {//from w w w. j av a 2s . com if (TransactionSynchronizationManager.hasResource(getSessionFactory())) { // Do not modify the Session: just set the participate flag. participate = true; } else { Session session = SessionFactoryUtils.getSession(getSessionFactory(), true); session.setFlushMode(FlushMode.MANUAL); TransactionSynchronizationManager.bindResource(getSessionFactory(), new SessionHolder(session)); } HibernateTemplate template = getHibernateTemplate(); template.execute(new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { try { r.run(); } catch (Exception e) { throw new HibernateException(e); } return null; } }); } finally { if (!participate) { // single session mode SessionHolder sessionHolder = (SessionHolder) TransactionSynchronizationManager .unbindResource(getSessionFactory()); SessionFactoryUtils.closeSession(sessionHolder.getSession()); } } }
From source file:org.hyperic.hq.measurement.server.session.MeasurementDAO.java
License:Open Source License
/** * Look up a Measurement, allowing for the query to return a stale copy (for * efficiency reasons)./*from w w w . ja va 2 s. co m*/ * * @param tid The MeasurementTemplate id * @param iid The instance id * @param allowStale <code>true</code> to allow stale copies of an alert * definition in the query results; <code>false</code> to never allow * stale copies, potentially always forcing a sync with the database. * @return The Measurement or <code>null</code>. */ Measurement findByTemplateForInstance(Integer tid, Integer iid, boolean allowStale) { Session session = getSession(); FlushMode oldFlushMode = session.getFlushMode(); try { if (allowStale) { session.setFlushMode(FlushMode.MANUAL); } String sql = "select distinct m from Measurement m " + "join m.template t " + "where t.id=? and m.instanceId=?"; return (Measurement) getSession().createQuery(sql).setInteger(0, tid.intValue()) .setInteger(1, iid.intValue()).setCacheable(true) .setCacheRegion("Measurement.findByTemplateForInstance").uniqueResult(); } finally { session.setFlushMode(oldFlushMode); } }
From source file:org.inbio.neoportal.index.Importer.java
License:Open Source License
/** * //from www. j a v a 2 s. c o m */ @Transactional public void indexOccurrences() { Session session = sessionFactory.getCurrentSession(); // FullTextSession fullTextSession = Search.getFullTextSession(session); // config session for bash job session.setFlushMode(FlushMode.MANUAL); // fullTextSession.setFlushMode(FlushMode.MANUAL); logger.log(org.apache.log4j.Level.DEBUG, "Starting importOccurrences process"); //get current date for dateLastModified field DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy"); Date date = new Date(); String dateLastModified = dateFormat.format(date); DateFormat sourceDateFormat = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH); int firstResult = 0; int setCounter = 0; //every 100 (the jdbc batch size) call flush DataProvider dp = dataProviderDAO.findAll().get(0); logger.log(org.apache.log4j.Level.DEBUG, "importOccurrences Begin Transaction"); ScrollableResults scroll = session.createCriteria(ImportDwc.class).setFetchSize(BATCH_SIZE) .setCacheMode(CacheMode.IGNORE).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY); boolean update; int batch = 0; int rowsCounter = 0; while (scroll.next()) { batch++; rowsCounter++; ImportDwc importDwc = (ImportDwc) scroll.get(0); logger.trace("ImportDwc after scroll.get"); try { //avoid repeated occurrenceId OccurrenceDwc occurrence = occurrenceDAO .findByCatalogNumberHql(importDwc.getCatalogNumber().replace("\"", "")); logger.trace("OccurrenceDwc after findByCatalogNumber " + importDwc.getCatalogNumber().replace("\"", "")); if (occurrence != null) { update = true; // continue; } else { update = false; occurrence = new OccurrenceDwc(); } Taxon taxon = null; //check if taxonId is empty (unidentify specimens) if (importDwc.getTaxonId().isEmpty()) { taxon = null; } else { // check if is the same taxon already associated with the occurrence if (update && occurrence.getTaxonId().equals(importDwc.getTaxonId().replace("\"", ""))) { taxon = occurrence.getTaxon(); logger.trace("Occurrence update with same taxon"); } else { // find taxon entity // taxon = taxonNewDAO.findById(new BigDecimal(importDwc.getTaxonId().replace("\"", ""))); List<Taxon> taxonList = taxonDAO.findByDefaultName(importDwc.getScientificName()); logger.trace("Taxon after findByDefaultName"); if (taxonList.size() == 1) taxon = taxonList.get(0); else if (taxonList.size() > 1) { for (Taxon taxon2 : taxonList) { if (taxon2.getKingdom().equals(importDwc.getKingdom())) { taxon = taxon2; break; } } } } } // TODO: fix, use specimenId instead occurrence.setOccurrenceId(importDwc.getCatalogNumber().replace("\"", "")); occurrence.setDataProvider(dp); occurrence.setTaxon(taxon); //find or create location Location location = locationDAO.findById(new BigDecimal(importDwc.getLocationId())); logger.trace("Location after findById"); if (location == null) { location = new Location(new BigDecimal(importDwc.getLocationId())); location.setHigherGeographyId(importDwc.getHigherGeographyId()); location.setHigherGeography(importDwc.getHigherGeography()); location.setContinent(importDwc.getContinent()); location.setWaterBody(importDwc.getWaterBody()); location.setIslandGroup(importDwc.getIslandGroup()); location.setIsland(importDwc.getIsland()); location.setCountry(importDwc.getCountry()); location.setCountryCode(importDwc.getCountryCode()); location.setStateProvince(importDwc.getStateProvince()); location.setCounty(importDwc.getCounty()); location.setMunicipality(importDwc.getMunicipality()); location.setLocality(importDwc.getLocality()); location.setVerbatimLocality(importDwc.getVerbatimLocality()); location.setVerbatimElevation(importDwc.getVerbatimElevation()); location.setMinimumElevationInMeters(importDwc.getMinimumElevationInMeters()); location.setMaximumElevationInMeters(importDwc.getMaximumElevationInMeters()); location.setVerbatimDepth(importDwc.getVerbatimDepth()); location.setMinimumDepthInMeters(importDwc.getMinimumDepthInMeters()); location.setMaximumDepthInMeters(importDwc.getMaximumDepthInMeters()); location.setMinimumDistanceAboveSurfaceInMeters( importDwc.getMinimumDistanceAboveSurfaceInMeters()); location.setMaximumDistanceAboveSurfaceInMeters( importDwc.getMaximumDistanceAboveSurfaceInMeters()); location.setLocationAccordingTo(importDwc.getLocationAccordingTo()); location.setLocationRemarks(importDwc.getLocationRemarks()); location.setVerbatimCoordinates(importDwc.getVerbatimCoordinates()); location.setVerbatimLatitude(importDwc.getVerbatimLatitude()); location.setVerbatimLongitude(importDwc.getVerbatimLongitude()); location.setVerbatimCoordinateSystem(importDwc.getVerbatimCoordinateSystem()); location.setVerbatimSRS(importDwc.getVerbatimSRS()); if (!importDwc.getDecimalLatitude().isEmpty()) location.setDecimalLatitude(Double.valueOf(importDwc.getDecimalLatitude())); if (!importDwc.getDecimalLongitude().isEmpty()) location.setDecimalLongitude(Double.valueOf(importDwc.getDecimalLongitude())); location.setGeodeticDatum(importDwc.getGeodeticDatum()); location.setCoordinateUncertaintyInMeters(importDwc.getCoordinateUncertaintyInMeters()); location.setCoordinatePrecision(importDwc.getCoordinatePrecision()); location.setPointRadiusSpatialFit(importDwc.getPointRadiusSpatialFit()); location.setFootprintWKT(importDwc.getFootprintWKT()); location.setFootprintSRS(importDwc.getFootprintSRS()); location.setFootprintSpatialFit(importDwc.getFootprintSpatialFit()); location.setGeoreferencedBy(importDwc.getGeoreferencedBy()); location.setGeoreferencedDate(importDwc.getGeoreferencedDate()); location.setGeoreferenceProtocol(importDwc.getGeoreferenceProtocol()); location.setGeoreferenceSources(importDwc.getGeoreferenceSources()); location.setGeoreferenceVerificationStatus(importDwc.getGeoreferenceVerificationStatus()); location.setGeoreferenceRemarks(importDwc.getGeoreferenceRemarks()); locationDAO.create(location); // increment batch because location should be inserted batch++; } occurrence.setLocation(location); occurrence.setType(importDwc.getType()); occurrence.setModified(importDwc.getModified()); occurrence.setLanguage(importDwc.getLanguage()); occurrence.setRights(importDwc.getRights()); occurrence.setRightsHolder(importDwc.getRightsHolder()); occurrence.setAccessRights(importDwc.getAccessRights()); occurrence.setBibliographicCitation(importDwc.getBibliographicCitation()); occurrence.setReferences(importDwc.getReferences()); occurrence.setInstitutionId(importDwc.getInstitutionId()); occurrence.setCollectionId(importDwc.getCollectionId()); occurrence.setDatasetId(importDwc.getDatasetId()); occurrence.setInstitutionCode(importDwc.getInstitutionCode()); occurrence.setCollectionCode(importDwc.getCollectionCode()); occurrence.setDatasetName(importDwc.getDatasetName()); occurrence.setOwnerInstitutionCode(importDwc.getOwnerInstitutionCode()); occurrence.setBasisOfRecord(importDwc.getBasisOfRecord()); occurrence.setInformationWithheld(importDwc.getInformationWithheld()); occurrence.setDataGeneralizations(importDwc.getDataGeneralizations()); occurrence.setDynamicProperties(importDwc.getDynamicProperties()); occurrence.setOccurrenceId(importDwc.getOccurrenceId().toString()); occurrence.setCatalogNumber(importDwc.getCatalogNumber()); occurrence.setOccurrenceRemarks(importDwc.getOccurrenceRemarks()); occurrence.setRecordNumber(importDwc.getRecordNumber()); occurrence.setRecordedBy(importDwc.getRecordedBy()); occurrence.setIndividualId(importDwc.getIndividualId()); occurrence.setIndividualCount(importDwc.getIndividualCount()); occurrence.setSex(importDwc.getSex()); occurrence.setLifeStage(importDwc.getLifeStage()); occurrence.setReproductiveCondition(importDwc.getReproductiveCondition()); occurrence.setBehavior(importDwc.getBehavior()); occurrence.setEstablishmentMeans(importDwc.getEstablishmentMeans()); occurrence.setOccurrenceStatus(importDwc.getOccurrenceStatus()); occurrence.setPreparations(importDwc.getPreparations()); occurrence.setDisposition(importDwc.getDisposition()); occurrence.setOtherCatalogNumbers(importDwc.getOtherCatalogNumbers()); occurrence.setPreviousIdentifications(importDwc.getPreviousIdentifications()); occurrence.setAssociatedMedia(importDwc.getAssociatedMedia()); occurrence.setAssociatedReferences(importDwc.getAssociatedReferences()); occurrence.setAssociatedOccurrences(importDwc.getAssociatedOccurrences()); occurrence.setAssociatedSequences(importDwc.getAssociatedSequences()); occurrence.setAssociatedTaxa(importDwc.getAssociatedTaxa()); occurrence.setEventId(importDwc.getEventId()); occurrence.setSamplingProtocol(importDwc.getSamplingProtocol()); occurrence.setSamplingEffort(importDwc.getSamplingEffort()); occurrence.setEventDate(importDwc.getEventDate()); occurrence.setEventTime(importDwc.getEventTime()); occurrence.setStartDayOfYear(importDwc.getStartDayOfYear()); occurrence.setEndDayOfYear(importDwc.getEndDayOfYear()); occurrence.setYear(importDwc.getYear()); occurrence.setMonth(importDwc.getMonth()); occurrence.setDay(importDwc.getDay()); occurrence.setVerbatimEventDate(importDwc.getVerbatimEventDate()); occurrence.setHabitat(importDwc.getHabitat()); occurrence.setFieldNotes(importDwc.getFieldNumber()); occurrence.setFieldNotes(importDwc.getFieldNotes()); occurrence.setEventRemarks(importDwc.getEventRemarks()); occurrence.setGeologicalContextId(importDwc.getGeologicalContextId()); occurrence.setEarliestEonOrLowestEonothem(importDwc.getEarliestEonOrLowestEonothem()); occurrence.setLatestEonOrHighestEonothem(importDwc.getLatestEonOrHighestEonothem()); occurrence.setEarliestEraOrLowestErathem(importDwc.getEarliestEraOrLowestErathem()); occurrence.setLatestEraOrHighestErathem(importDwc.getLatestEraOrHighestErathem()); occurrence.setEarliestPeriodOrLowestSystem(importDwc.getEarliestPeriodOrLowestSystem()); occurrence.setLatestPeriodOrHighestSystem(importDwc.getLatestPeriodOrHighestSystem()); occurrence.setEarliestEpochOrLowestSeries(importDwc.getEarliestEpochOrLowestSeries()); occurrence.setLatestEpochOrHighestSeries(importDwc.getLatestEpochOrHighestSeries()); occurrence.setEarliestAgeOrLowestStage(importDwc.getEarliestAgeOrLowestStage()); occurrence.setLatestAgeOrHighestStage(importDwc.getLatestAgeOrHighestStage()); occurrence.setLowestBiostratigraphicZone(importDwc.getLowestBiostratigraphicZone()); occurrence.setHighestBiostratigraphicZone(importDwc.getHighestBiostratigraphicZone()); occurrence.setLithostratigraphicTerms(importDwc.getLithostratigraphicTerms()); occurrence.setGroup(importDwc.getGroup()); occurrence.setFormation(importDwc.getFormation()); occurrence.setMember(importDwc.getMember()); occurrence.setBed(importDwc.getBed()); occurrence.setIdentificationId(importDwc.getIdentificationId()); occurrence.setIdentifiedBy(importDwc.getIdentifiedBy()); if (importDwc.getDateIdentified() != null && importDwc.getDateIdentified().length() > 0) occurrence.setDateIdentified(sourceDateFormat.parse(importDwc.getDateIdentified())); occurrence.setIdentificationReferences(importDwc.getIdentificationReferences()); occurrence.setIdentificationVerificationStatus(importDwc.getIdentificationVerificationStatus()); occurrence.setIdentificationRemarks(importDwc.getIdentificationRemarks()); occurrence.setIdentificationQualifier(importDwc.getIdentificationQualifier()); occurrence.setTypeStatus(importDwc.getTypeStatus()); occurrence.setTaxonId(importDwc.getTaxonId()); occurrence.setScientificNameId(importDwc.getScientificNameId()); occurrence.setAcceptedNameUsageId(importDwc.getAcceptedNameUsageId()); occurrence.setParentNameUsageId(importDwc.getParentNameUsageId()); occurrence.setOriginalNameUsageId(importDwc.getOriginalNameUsageId()); occurrence.setNameAccordingToId(importDwc.getNameAccordingToId()); occurrence.setNamePublishedInId(importDwc.getNamePublishedInId()); occurrence.setTaxonConceptId(importDwc.getTaxonConceptId()); occurrence.setScientificName(importDwc.getScientificName()); occurrence.setAcceptedNameUsage(importDwc.getAcceptedNameUsage()); occurrence.setParentNameUsage(importDwc.getParentNameUsage()); occurrence.setOriginalNameUsage(importDwc.getOriginalNameUsage()); occurrence.setNameAccordingTo(importDwc.getNameAccordingTo()); occurrence.setNamePublishedIn(importDwc.getNamePublishedIn()); occurrence.setNamePublishedInYear(importDwc.getNamePublishedInYear()); occurrence.setHigherClassification(importDwc.getHigherClassification()); occurrence.setKingdom(importDwc.getKingdom()); occurrence.setPhylum(importDwc.getPhylum()); occurrence.setClass_(importDwc.getClass_()); occurrence.setOrder(importDwc.getOrder()); occurrence.setFamily(importDwc.getFamily()); occurrence.setGenus(importDwc.getGenus()); occurrence.setSubgenus(importDwc.getSubgenus()); occurrence.setSpecificEpithet(importDwc.getSpecificEpithet()); occurrence.setInfraspecificEpithet(importDwc.getInfraspecificEpithet()); occurrence.setTaxonRank(importDwc.getTaxonRank()); occurrence.setVerbatimTaxonRank(importDwc.getVerbatimTaxonRank()); occurrence.setScientificNameAuthorship(importDwc.getScientificNameAuthorship()); occurrence.setVernacularName(importDwc.getVernacularName()); occurrence.setNomenclaturalCode(importDwc.getNomenclaturalCode()); occurrence.setTaxonomicStatus(importDwc.getTaxonomicStatus()); occurrence.setNomenclaturalStatus(importDwc.getNomenclaturalStatus()); occurrence.setTaxonRemarks(importDwc.getTaxonRemarks()); if (!update) occurrenceDAO.create(occurrence); else occurrenceDAO.update(occurrence); // clear objects occurrence.setImages(null); occurrence = null; taxon = null; location = null; } catch (NonUniqueResultException ex) { logger.warn("NonUniqueResultException occurrenceId " + importDwc.getCatalogNumber()); } catch (NumberFormatException ex) { logger.error("NumberFormatException occurrenceId " + importDwc.getCatalogNumber()); ex.printStackTrace(); System.exit(-1); } catch (ParseException e) { e.printStackTrace(); } // } // end for, 1000 importDwc rows session.evict(importDwc); if (batch >= BATCH_SIZE) { batch = 0; SessionStatistics statistics = session.getStatistics(); logger.trace("Entities before flush: " + String.valueOf(statistics.getEntityCount())); session.flush(); logger.trace("Entities before clear: " + String.valueOf(statistics.getEntityCount())); // fullTextSession.flushToIndexes(); session.clear(); logger.trace("Entities after clear: " + String.valueOf(statistics.getEntityCount())); // System.exit(1); } if (rowsCounter % maxResults == 0) { // fullTextSession.flushToIndexes(); logger.info("Occurrences added " + rowsCounter); SessionStatistics statistics = session.getStatistics(); logger.debug("Entities: " + String.valueOf(statistics.getEntityCount())); logger.debug("Collections: " + String.valueOf(statistics.getCollectionCount())); } // ******* for debug only *********** // if(rowsCounter == 1) { // session.getTransaction().rollback(); // scroll.close(); // System.exit(1); // } // firstResult += maxResults; // occurrencesDwcList = // importDwcDAO.scrollAll(ImportDwc.class, // maxResults, // firstResult); } // end while, no more importDwc rows scroll.close(); // transactionManager.commit(status); session.flush(); session.clear(); logger.info("Total occurrences processed " + rowsCounter); // session.getTransaction().commit(); // session.close(); }
From source file:org.j2free.jpa.Controller.java
License:Apache License
/** * It is critical that batchSize matches the hibernate.search.worker.batch_size you set * * @param <T>/*from w ww . j ava 2s. c o m*/ * @param entityClass * @param batchSize */ public <T> void hibernateSearchIndex(Class<T> entityClass, int batchSize) { FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession(getSession()); fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); ScrollableResults results = fullTextSession.createCriteria(entityClass).setFetchSize(batchSize) .scroll(ScrollMode.FORWARD_ONLY); try { int index = 0; while (results.next()) { index++; fullTextSession.index(results.get(0)); //index each element //clear every batchSize since the queue is processed if (index % batchSize == 0) { fullTextSession.flushToIndexes(); fullTextSession.clear(); } } } finally { results.close(); } }
From source file:org.jahia.modules.external.id.ExternalProviderInitializerServiceImpl.java
License:Open Source License
@Override public Integer getProviderId(String providerKey) throws RepositoryException { ExternalProviderID providerId = null; SessionFactory hibernateSession = getHibernateSessionFactory(); Session session = null;//from w w w .j a va 2 s . c o m try { session = hibernateSession.openSession(); List<?> list = session.createQuery("from ExternalProviderID where providerKey=:providerKey") .setString("providerKey", providerKey).setReadOnly(true).setFlushMode(FlushMode.MANUAL).list(); if (list.size() > 0) { providerId = (ExternalProviderID) list.get(0); } else { // not registered yet -> generate ID and store it providerId = new ExternalProviderID(); providerId.setProviderKey(providerKey); try { session.beginTransaction(); session.save(providerId); session.getTransaction().commit(); } catch (Exception e) { session.getTransaction().rollback(); throw new RepositoryException( "Issue when storing external provider ID for provider " + providerId, e); } } } catch (HibernateException e) { throw new RepositoryException("Issue when obtaining external provider ID for provider " + providerId, e); } finally { if (session != null) { session.close(); } } return providerId.getId(); }
From source file:org.jasig.ssp.service.impl.ScheduledTaskWrapperServiceImpl.java
License:Apache License
protected Runnable withHibernateSession(final Runnable work) { return new Runnable() { @Override/*from www .j a v a 2 s. c o m*/ public void run() { // Basically a copy/paste of Spring's // OpenSessionInViewFilter#doFilterInternal, with the // web-specific stuff removed boolean participate = false; try { if (TransactionSynchronizationManager.hasResource(sessionFactory)) { // Do not modify the Session: just set the participate flag. LOGGER.debug("Scheduled task joining existing Hibernate session/transaction"); participate = true; } else { LOGGER.debug("Scheduled task creating new Hibernate session"); Session session = sessionFactory.openSession(); session.setFlushMode(FlushMode.MANUAL); SessionHolder sessionHolder = new SessionHolder(session); TransactionSynchronizationManager.bindResource(sessionFactory, sessionHolder); } work.run(); } finally { if (!participate) { SessionHolder sessionHolder = (SessionHolder) TransactionSynchronizationManager .unbindResource(sessionFactory); LOGGER.debug("Scheduled task closing Hibernate session"); SessionFactoryUtils.closeSession(sessionHolder.getSession()); } else { LOGGER.debug( "Scheduled task joined existing Hibernate session/transaction so skipping that cleanup step"); } } } }; }
From source file:org.jboss.as.test.compat.jpa.hibernate.transformer.AbstractVerifyHibernate51CompatibilityTestCase.java
License:Open Source License
@Test public void testORA5_3_1_Compatibility_getFlushModeFromSession() { // setup Configuration and SessionFactory sfsb.setupConfig();// w ww. j a v a2 s. c o m try { assertEquals("can handle Hibernate ORM 5.1 call to Session.getFlushMode()", FlushMode.MANUAL, sfsb.getFlushModeFromSessionTest(FlushMode.MANUAL)); } finally { sfsb.cleanup(); } }
From source file:org.jboss.as.test.compat.jpa.hibernate.transformer.AbstractVerifyHibernate51CompatibilityTestCase.java
License:Open Source License
@Test public void testORA5_3_1_Compatibility_getFlushModeFromQuery() { // setup Configuration and SessionFactory sfsb.setupConfig();/*w w w .ja v a2 s .c om*/ try { assertEquals("can handle Hibernate ORM 5.1 call to Query.getFlushMode()", FlushMode.MANUAL, sfsb.getFlushModeFromQueryTest(FlushMode.MANUAL)); } finally { sfsb.cleanup(); } }
From source file:org.jboss.seam.persistence.hibernate.HibernateManagedSessionProxyHandler.java
License:Open Source License
private void changeFushMode(FlushModeType flushModeType) { switch (flushModeType) { case AUTO:// www . ja v a 2s . co m delegate.setFlushMode(FlushMode.AUTO); break; case MANUAL: delegate.setFlushMode(FlushMode.MANUAL); break; case COMMIT: delegate.setFlushMode(FlushMode.COMMIT); break; default: throw new RuntimeException("Unkown flush mode: " + flushModeType); } }
From source file:org.jboss.seam.persistence.hibernate.test.ManagedHibernateSessionFlushModeTestBase.java
License:Open Source License
@Test public void testHibernateSessionDefaultFlushMode() throws NotSupportedException, SystemException, SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException { manager.setFlushModeType(FlushModeType.MANUAL); Assert.assertEquals(FlushMode.MANUAL, session.getFlushMode()); }