List of usage examples for org.hibernate Session getStatistics
SessionStatistics getStatistics();
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
@Override public void update(Iterable<DataRecord> records) { assertPrepared();//www . ja va2 s. c om Session session = this.getCurrentSession(); try { storageClassLoader.bind(Thread.currentThread()); DataRecordConverter<Object> converter = new ObjectDataRecordConverter(storageClassLoader, session); for (DataRecord currentDataRecord : records) { TypeMapping mapping = mappingRepository.getMappingFromUser(currentDataRecord.getType()); Wrapper o = (Wrapper) converter.convert(currentDataRecord, mapping); if (session.contains(o) && session.isReadOnly(o)) { // A read only instance for an update? // Session#setReadOnly(...) does not always work as expected (especially in case of compound keys // see TMDM-7014). session.evict(o); o = (Wrapper) converter.convert(currentDataRecord, mapping); } DataRecordMetadata recordMetadata = currentDataRecord.getRecordMetadata(); Map<String, String> recordProperties = recordMetadata.getRecordProperties(); if (!ObjectUtils.equals(recordMetadata.getTaskId(), o.taskId())) { o.taskId(recordMetadata.getTaskId()); } for (Map.Entry<String, String> currentProperty : recordProperties.entrySet()) { String key = currentProperty.getKey(); String value = currentProperty.getValue(); ComplexTypeMetadata database = mapping.getDatabase(); if (database.hasField(key)) { Object convertedValue = StorageMetadataUtils.convert(value, database.getField(key)); if (!ObjectUtils.equals(convertedValue, o.get(key))) { o.set(key, convertedValue); } } else { throw new IllegalArgumentException("Can not store value '" + key //$NON-NLS-1$ + "' because there is no database field '" + key + "' in type '" + mapping.getName() //$NON-NLS-1$ //$NON-NLS-2$ + "' (storage is '" + toString() + "')"); //$NON-NLS-1$ //$NON-NLS-2$ } } session.saveOrUpdate(o); if (FLUSH_ON_LOAD && session.getStatistics().getEntityCount() % batchSize == 0) { // Periodically flush objects to avoid using too much memory. session.flush(); } } } catch (ConstraintViolationException e) { throw new com.amalto.core.storage.exception.ConstraintViolationException(e); } catch (PropertyValueException e) { throw new RuntimeException("Invalid value in record to update.", e); //$NON-NLS-1$ } catch (NonUniqueObjectException e) { throw new RuntimeException("Attempted to update multiple times same record within same transaction.", //$NON-NLS-1$ e); } catch (Exception e) { throw new RuntimeException("Exception occurred during update.", e); //$NON-NLS-1$ } finally { this.releaseSession(); storageClassLoader.unbind(Thread.currentThread()); } }
From source file:com.amalto.core.storage.hibernate.HibernateStorageTransaction.java
License:Open Source License
/** * Dumps all current entities in <code>session</code> using data model information from <code>storage</code>. * * @param session The Hibernate session that failed to be committed. * @param storage A {@link com.amalto.core.storage.hibernate.HibernateStorage} that can be used to retrieve metadata information for all objects in * <code>session</code>. */// w ww . j av a 2 s . co m private static void dumpTransactionContent(Session session, HibernateStorage storage) { Level currentLevel = Level.INFO; if (LOGGER.isEnabledFor(currentLevel)) { Set<EntityKey> failedKeys = new HashSet<>(session.getStatistics().getEntityKeys()); // Copy content to avoid concurrent modification issues. int i = 1; ObjectDataRecordReader reader = new ObjectDataRecordReader(); MappingRepository mappingRepository = storage.getTypeEnhancer().getMappings(); StorageClassLoader classLoader = storage.getClassLoader(); DataRecordXmlWriter writer = new DataRecordXmlWriter(); ResettableStringWriter xmlContent = new ResettableStringWriter(); for (EntityKey failedKey : failedKeys) { String entityTypeName = StringUtils.substringAfterLast(failedKey.getEntityName(), "."); //$NON-NLS-1$ LOGGER.log(currentLevel, "Entity #" + i++ + " (type=" + entityTypeName + ", id=" + failedKey.getIdentifier() + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ try { storage.getClassLoader().bind(Thread.currentThread()); Wrapper o = (Wrapper) ((SessionImpl) session).getPersistenceContext().getEntity(failedKey); if (!session.isReadOnly(o)) { if (o != null) { ComplexTypeMetadata type = classLoader .getTypeFromClass(classLoader.loadClass(failedKey.getEntityName())); if (type != null) { DataRecord record = reader.read(mappingRepository.getMappingFromDatabase(type), o); writer.write(record, xmlContent); LOGGER.log(currentLevel, xmlContent + "\n(taskId='" + o.taskId() + "', timestamp='" //$NON-NLS-1$//$NON-NLS-2$ + o.timestamp() + "')"); //$NON-NLS-1$ } else { LOGGER.warn("Could not find data model type for object " + o); //$NON-NLS-1$ } } else { LOGGER.warn("Could not find an object for entity " + failedKey); //$NON-NLS-1$ } } } catch (ObjectNotFoundException missingRefException) { LOGGER.log(currentLevel, "Can not log entity: contains a unresolved reference to '" //$NON-NLS-1$ + missingRefException.getEntityName() + "' with id '" //$NON-NLS-1$ + missingRefException.getIdentifier() + "'"); //$NON-NLS-1$ } catch (Exception serializationException) { LOGGER.log(currentLevel, "Failed to log entity content for type " + entityTypeName //$NON-NLS-1$ + " (enable DEBUG for exception details)."); //$NON-NLS-1$ if (LOGGER.isDebugEnabled()) { LOGGER.debug("Serialization exception occurred.", serializationException); //$NON-NLS-1$ } } finally { xmlContent.reset(); storage.getClassLoader().unbind(Thread.currentThread()); } if (i > TRANSACTION_DUMP_MAX) { if (!LOGGER.isDebugEnabled()) { int more = failedKeys.size() - i; if (more > 0) { LOGGER.log(currentLevel, "and " + more + " more... (enable DEBUG for full dump)"); //$NON-NLS-1$ //$NON-NLS-2$ } return; } else { currentLevel = Level.DEBUG; // Continue the dump but with a DEBUG level } } } } }
From source file:com.gemstone.gemfire.modules.HibernateJUnitTest.java
License:Apache License
@Test public void testInsert() { Session session = getSessionFactory(null).openSession(); Region r = GemFireCacheImpl.getExisting().getRegion(Person.class.getCanonicalName()); int initSize = r.size(); session.beginTransaction();/*from w w w.j av a 2 s .c o m*/ log.info("SWAP: Saving Person"); Person p = new Person(); p.setId(10L); p.setFirstname("foo"); p.setLastname("bar"); session.saveOrUpdate("Person", p); session.getTransaction().commit(); assertEquals(1, session.getStatistics().getEntityCount()); assertEquals(initSize + 1, r.size()); session.beginTransaction(); p.setAge(1); session.saveOrUpdate(p); session.getTransaction().commit(); assertEquals(1, session.getStatistics().getEntityCount()); }
From source file:edu.harvard.med.screensaver.db.GenericEntityDAOTest.java
License:Open Source License
public void testFlushAndClearSession() { genericEntityDao.runQuery(new Query() { public List execute(Session session) { Library library = new Library(_adminUser, "library", "library", ScreenType.SMALL_MOLECULE, LibraryType.COMMERCIAL, 1, 1, PlateSize.WELLS_384); library.createWell(new WellKey(1, "A01"), LibraryWellType.EMPTY); genericEntityDao.saveOrUpdateEntity(library); genericEntityDao.flush();/*from www . ja v a2s. com*/ assertTrue(session.contains(library)); assertEquals(3, session.getStatistics().getEntityCount()); assertEquals(library.getLibraryId(), session.getIdentifier(library)); genericEntityDao.clear(); assertEquals(0, session.getStatistics().getEntityCount()); assertFalse(session.contains(library)); return null; } }); assertNotNull(genericEntityDao.findEntityByProperty(Library.class, "libraryName", "library")); assertNotNull(genericEntityDao.findEntityById(Well.class, "00001:A01")); }
From source file:gov.nih.nci.cabig.caaers.dao.CaaersDao.java
License:BSD License
/** * Will clear-off the content in the current Hibernate Session * @see org.hibernate.Session// ww w .j ava2 s. co m */ @Transactional(readOnly = false) public void clearSession() { Session session = getHibernateTemplate().getSessionFactory().getCurrentSession(); if (log.isDebugEnabled()) { log.debug("Entity Count before clear() : " + session.getStatistics().getEntityCount()); log.debug("Collection Count before clear() : " + session.getStatistics().getCollectionCount()); } session.clear(); if (log.isDebugEnabled()) { log.debug("Entity Count after clear() : " + session.getStatistics().getEntityCount()); log.debug("Collection Count after clear() : " + session.getStatistics().getCollectionCount()); } }
From source file:org.inbio.neoportal.index.Importer.java
License:Open Source License
/** * //from www . j a v a2 s . c o m */ @Transactional public void indexOccurrences() { Session session = sessionFactory.getCurrentSession(); // FullTextSession fullTextSession = Search.getFullTextSession(session); // config session for bash job session.setFlushMode(FlushMode.MANUAL); // fullTextSession.setFlushMode(FlushMode.MANUAL); logger.log(org.apache.log4j.Level.DEBUG, "Starting importOccurrences process"); //get current date for dateLastModified field DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy"); Date date = new Date(); String dateLastModified = dateFormat.format(date); DateFormat sourceDateFormat = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH); int firstResult = 0; int setCounter = 0; //every 100 (the jdbc batch size) call flush DataProvider dp = dataProviderDAO.findAll().get(0); logger.log(org.apache.log4j.Level.DEBUG, "importOccurrences Begin Transaction"); ScrollableResults scroll = session.createCriteria(ImportDwc.class).setFetchSize(BATCH_SIZE) .setCacheMode(CacheMode.IGNORE).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY); boolean update; int batch = 0; int rowsCounter = 0; while (scroll.next()) { batch++; rowsCounter++; ImportDwc importDwc = (ImportDwc) scroll.get(0); logger.trace("ImportDwc after scroll.get"); try { //avoid repeated occurrenceId OccurrenceDwc occurrence = occurrenceDAO .findByCatalogNumberHql(importDwc.getCatalogNumber().replace("\"", "")); logger.trace("OccurrenceDwc after findByCatalogNumber " + importDwc.getCatalogNumber().replace("\"", "")); if (occurrence != null) { update = true; // continue; } else { update = false; occurrence = new OccurrenceDwc(); } Taxon taxon = null; //check if taxonId is empty (unidentify specimens) if (importDwc.getTaxonId().isEmpty()) { taxon = null; } else { // check if is the same taxon already associated with the occurrence if (update && occurrence.getTaxonId().equals(importDwc.getTaxonId().replace("\"", ""))) { taxon = occurrence.getTaxon(); logger.trace("Occurrence update with same taxon"); } else { // find taxon entity // taxon = taxonNewDAO.findById(new BigDecimal(importDwc.getTaxonId().replace("\"", ""))); List<Taxon> taxonList = taxonDAO.findByDefaultName(importDwc.getScientificName()); logger.trace("Taxon after findByDefaultName"); if (taxonList.size() == 1) taxon = taxonList.get(0); else if (taxonList.size() > 1) { for (Taxon taxon2 : taxonList) { if (taxon2.getKingdom().equals(importDwc.getKingdom())) { taxon = taxon2; break; } } } } } // TODO: fix, use specimenId instead occurrence.setOccurrenceId(importDwc.getCatalogNumber().replace("\"", "")); occurrence.setDataProvider(dp); occurrence.setTaxon(taxon); //find or create location Location location = locationDAO.findById(new BigDecimal(importDwc.getLocationId())); logger.trace("Location after findById"); if (location == null) { location = new Location(new BigDecimal(importDwc.getLocationId())); location.setHigherGeographyId(importDwc.getHigherGeographyId()); location.setHigherGeography(importDwc.getHigherGeography()); location.setContinent(importDwc.getContinent()); location.setWaterBody(importDwc.getWaterBody()); location.setIslandGroup(importDwc.getIslandGroup()); location.setIsland(importDwc.getIsland()); location.setCountry(importDwc.getCountry()); location.setCountryCode(importDwc.getCountryCode()); location.setStateProvince(importDwc.getStateProvince()); location.setCounty(importDwc.getCounty()); location.setMunicipality(importDwc.getMunicipality()); location.setLocality(importDwc.getLocality()); location.setVerbatimLocality(importDwc.getVerbatimLocality()); location.setVerbatimElevation(importDwc.getVerbatimElevation()); location.setMinimumElevationInMeters(importDwc.getMinimumElevationInMeters()); location.setMaximumElevationInMeters(importDwc.getMaximumElevationInMeters()); location.setVerbatimDepth(importDwc.getVerbatimDepth()); location.setMinimumDepthInMeters(importDwc.getMinimumDepthInMeters()); location.setMaximumDepthInMeters(importDwc.getMaximumDepthInMeters()); location.setMinimumDistanceAboveSurfaceInMeters( importDwc.getMinimumDistanceAboveSurfaceInMeters()); location.setMaximumDistanceAboveSurfaceInMeters( importDwc.getMaximumDistanceAboveSurfaceInMeters()); location.setLocationAccordingTo(importDwc.getLocationAccordingTo()); location.setLocationRemarks(importDwc.getLocationRemarks()); location.setVerbatimCoordinates(importDwc.getVerbatimCoordinates()); location.setVerbatimLatitude(importDwc.getVerbatimLatitude()); location.setVerbatimLongitude(importDwc.getVerbatimLongitude()); location.setVerbatimCoordinateSystem(importDwc.getVerbatimCoordinateSystem()); location.setVerbatimSRS(importDwc.getVerbatimSRS()); if (!importDwc.getDecimalLatitude().isEmpty()) location.setDecimalLatitude(Double.valueOf(importDwc.getDecimalLatitude())); if (!importDwc.getDecimalLongitude().isEmpty()) location.setDecimalLongitude(Double.valueOf(importDwc.getDecimalLongitude())); location.setGeodeticDatum(importDwc.getGeodeticDatum()); location.setCoordinateUncertaintyInMeters(importDwc.getCoordinateUncertaintyInMeters()); location.setCoordinatePrecision(importDwc.getCoordinatePrecision()); location.setPointRadiusSpatialFit(importDwc.getPointRadiusSpatialFit()); location.setFootprintWKT(importDwc.getFootprintWKT()); location.setFootprintSRS(importDwc.getFootprintSRS()); location.setFootprintSpatialFit(importDwc.getFootprintSpatialFit()); location.setGeoreferencedBy(importDwc.getGeoreferencedBy()); location.setGeoreferencedDate(importDwc.getGeoreferencedDate()); location.setGeoreferenceProtocol(importDwc.getGeoreferenceProtocol()); location.setGeoreferenceSources(importDwc.getGeoreferenceSources()); location.setGeoreferenceVerificationStatus(importDwc.getGeoreferenceVerificationStatus()); location.setGeoreferenceRemarks(importDwc.getGeoreferenceRemarks()); locationDAO.create(location); // increment batch because location should be inserted batch++; } occurrence.setLocation(location); occurrence.setType(importDwc.getType()); occurrence.setModified(importDwc.getModified()); occurrence.setLanguage(importDwc.getLanguage()); occurrence.setRights(importDwc.getRights()); occurrence.setRightsHolder(importDwc.getRightsHolder()); occurrence.setAccessRights(importDwc.getAccessRights()); occurrence.setBibliographicCitation(importDwc.getBibliographicCitation()); occurrence.setReferences(importDwc.getReferences()); occurrence.setInstitutionId(importDwc.getInstitutionId()); occurrence.setCollectionId(importDwc.getCollectionId()); occurrence.setDatasetId(importDwc.getDatasetId()); occurrence.setInstitutionCode(importDwc.getInstitutionCode()); occurrence.setCollectionCode(importDwc.getCollectionCode()); occurrence.setDatasetName(importDwc.getDatasetName()); occurrence.setOwnerInstitutionCode(importDwc.getOwnerInstitutionCode()); occurrence.setBasisOfRecord(importDwc.getBasisOfRecord()); occurrence.setInformationWithheld(importDwc.getInformationWithheld()); occurrence.setDataGeneralizations(importDwc.getDataGeneralizations()); occurrence.setDynamicProperties(importDwc.getDynamicProperties()); occurrence.setOccurrenceId(importDwc.getOccurrenceId().toString()); occurrence.setCatalogNumber(importDwc.getCatalogNumber()); occurrence.setOccurrenceRemarks(importDwc.getOccurrenceRemarks()); occurrence.setRecordNumber(importDwc.getRecordNumber()); occurrence.setRecordedBy(importDwc.getRecordedBy()); occurrence.setIndividualId(importDwc.getIndividualId()); occurrence.setIndividualCount(importDwc.getIndividualCount()); occurrence.setSex(importDwc.getSex()); occurrence.setLifeStage(importDwc.getLifeStage()); occurrence.setReproductiveCondition(importDwc.getReproductiveCondition()); occurrence.setBehavior(importDwc.getBehavior()); occurrence.setEstablishmentMeans(importDwc.getEstablishmentMeans()); occurrence.setOccurrenceStatus(importDwc.getOccurrenceStatus()); occurrence.setPreparations(importDwc.getPreparations()); occurrence.setDisposition(importDwc.getDisposition()); occurrence.setOtherCatalogNumbers(importDwc.getOtherCatalogNumbers()); occurrence.setPreviousIdentifications(importDwc.getPreviousIdentifications()); occurrence.setAssociatedMedia(importDwc.getAssociatedMedia()); occurrence.setAssociatedReferences(importDwc.getAssociatedReferences()); occurrence.setAssociatedOccurrences(importDwc.getAssociatedOccurrences()); occurrence.setAssociatedSequences(importDwc.getAssociatedSequences()); occurrence.setAssociatedTaxa(importDwc.getAssociatedTaxa()); occurrence.setEventId(importDwc.getEventId()); occurrence.setSamplingProtocol(importDwc.getSamplingProtocol()); occurrence.setSamplingEffort(importDwc.getSamplingEffort()); occurrence.setEventDate(importDwc.getEventDate()); occurrence.setEventTime(importDwc.getEventTime()); occurrence.setStartDayOfYear(importDwc.getStartDayOfYear()); occurrence.setEndDayOfYear(importDwc.getEndDayOfYear()); occurrence.setYear(importDwc.getYear()); occurrence.setMonth(importDwc.getMonth()); occurrence.setDay(importDwc.getDay()); occurrence.setVerbatimEventDate(importDwc.getVerbatimEventDate()); occurrence.setHabitat(importDwc.getHabitat()); occurrence.setFieldNotes(importDwc.getFieldNumber()); occurrence.setFieldNotes(importDwc.getFieldNotes()); occurrence.setEventRemarks(importDwc.getEventRemarks()); occurrence.setGeologicalContextId(importDwc.getGeologicalContextId()); occurrence.setEarliestEonOrLowestEonothem(importDwc.getEarliestEonOrLowestEonothem()); occurrence.setLatestEonOrHighestEonothem(importDwc.getLatestEonOrHighestEonothem()); occurrence.setEarliestEraOrLowestErathem(importDwc.getEarliestEraOrLowestErathem()); occurrence.setLatestEraOrHighestErathem(importDwc.getLatestEraOrHighestErathem()); occurrence.setEarliestPeriodOrLowestSystem(importDwc.getEarliestPeriodOrLowestSystem()); occurrence.setLatestPeriodOrHighestSystem(importDwc.getLatestPeriodOrHighestSystem()); occurrence.setEarliestEpochOrLowestSeries(importDwc.getEarliestEpochOrLowestSeries()); occurrence.setLatestEpochOrHighestSeries(importDwc.getLatestEpochOrHighestSeries()); occurrence.setEarliestAgeOrLowestStage(importDwc.getEarliestAgeOrLowestStage()); occurrence.setLatestAgeOrHighestStage(importDwc.getLatestAgeOrHighestStage()); occurrence.setLowestBiostratigraphicZone(importDwc.getLowestBiostratigraphicZone()); occurrence.setHighestBiostratigraphicZone(importDwc.getHighestBiostratigraphicZone()); occurrence.setLithostratigraphicTerms(importDwc.getLithostratigraphicTerms()); occurrence.setGroup(importDwc.getGroup()); occurrence.setFormation(importDwc.getFormation()); occurrence.setMember(importDwc.getMember()); occurrence.setBed(importDwc.getBed()); occurrence.setIdentificationId(importDwc.getIdentificationId()); occurrence.setIdentifiedBy(importDwc.getIdentifiedBy()); if (importDwc.getDateIdentified() != null && importDwc.getDateIdentified().length() > 0) occurrence.setDateIdentified(sourceDateFormat.parse(importDwc.getDateIdentified())); occurrence.setIdentificationReferences(importDwc.getIdentificationReferences()); occurrence.setIdentificationVerificationStatus(importDwc.getIdentificationVerificationStatus()); occurrence.setIdentificationRemarks(importDwc.getIdentificationRemarks()); occurrence.setIdentificationQualifier(importDwc.getIdentificationQualifier()); occurrence.setTypeStatus(importDwc.getTypeStatus()); occurrence.setTaxonId(importDwc.getTaxonId()); occurrence.setScientificNameId(importDwc.getScientificNameId()); occurrence.setAcceptedNameUsageId(importDwc.getAcceptedNameUsageId()); occurrence.setParentNameUsageId(importDwc.getParentNameUsageId()); occurrence.setOriginalNameUsageId(importDwc.getOriginalNameUsageId()); occurrence.setNameAccordingToId(importDwc.getNameAccordingToId()); occurrence.setNamePublishedInId(importDwc.getNamePublishedInId()); occurrence.setTaxonConceptId(importDwc.getTaxonConceptId()); occurrence.setScientificName(importDwc.getScientificName()); occurrence.setAcceptedNameUsage(importDwc.getAcceptedNameUsage()); occurrence.setParentNameUsage(importDwc.getParentNameUsage()); occurrence.setOriginalNameUsage(importDwc.getOriginalNameUsage()); occurrence.setNameAccordingTo(importDwc.getNameAccordingTo()); occurrence.setNamePublishedIn(importDwc.getNamePublishedIn()); occurrence.setNamePublishedInYear(importDwc.getNamePublishedInYear()); occurrence.setHigherClassification(importDwc.getHigherClassification()); occurrence.setKingdom(importDwc.getKingdom()); occurrence.setPhylum(importDwc.getPhylum()); occurrence.setClass_(importDwc.getClass_()); occurrence.setOrder(importDwc.getOrder()); occurrence.setFamily(importDwc.getFamily()); occurrence.setGenus(importDwc.getGenus()); occurrence.setSubgenus(importDwc.getSubgenus()); occurrence.setSpecificEpithet(importDwc.getSpecificEpithet()); occurrence.setInfraspecificEpithet(importDwc.getInfraspecificEpithet()); occurrence.setTaxonRank(importDwc.getTaxonRank()); occurrence.setVerbatimTaxonRank(importDwc.getVerbatimTaxonRank()); occurrence.setScientificNameAuthorship(importDwc.getScientificNameAuthorship()); occurrence.setVernacularName(importDwc.getVernacularName()); occurrence.setNomenclaturalCode(importDwc.getNomenclaturalCode()); occurrence.setTaxonomicStatus(importDwc.getTaxonomicStatus()); occurrence.setNomenclaturalStatus(importDwc.getNomenclaturalStatus()); occurrence.setTaxonRemarks(importDwc.getTaxonRemarks()); if (!update) occurrenceDAO.create(occurrence); else occurrenceDAO.update(occurrence); // clear objects occurrence.setImages(null); occurrence = null; taxon = null; location = null; } catch (NonUniqueResultException ex) { logger.warn("NonUniqueResultException occurrenceId " + importDwc.getCatalogNumber()); } catch (NumberFormatException ex) { logger.error("NumberFormatException occurrenceId " + importDwc.getCatalogNumber()); ex.printStackTrace(); System.exit(-1); } catch (ParseException e) { e.printStackTrace(); } // } // end for, 1000 importDwc rows session.evict(importDwc); if (batch >= BATCH_SIZE) { batch = 0; SessionStatistics statistics = session.getStatistics(); logger.trace("Entities before flush: " + String.valueOf(statistics.getEntityCount())); session.flush(); logger.trace("Entities before clear: " + String.valueOf(statistics.getEntityCount())); // fullTextSession.flushToIndexes(); session.clear(); logger.trace("Entities after clear: " + String.valueOf(statistics.getEntityCount())); // System.exit(1); } if (rowsCounter % maxResults == 0) { // fullTextSession.flushToIndexes(); logger.info("Occurrences added " + rowsCounter); SessionStatistics statistics = session.getStatistics(); logger.debug("Entities: " + String.valueOf(statistics.getEntityCount())); logger.debug("Collections: " + String.valueOf(statistics.getCollectionCount())); } // ******* for debug only *********** // if(rowsCounter == 1) { // session.getTransaction().rollback(); // scroll.close(); // System.exit(1); // } // firstResult += maxResults; // occurrencesDwcList = // importDwcDAO.scrollAll(ImportDwc.class, // maxResults, // firstResult); } // end while, no more importDwc rows scroll.close(); // transactionManager.commit(status); session.flush(); session.clear(); logger.info("Total occurrences processed " + rowsCounter); // session.getTransaction().commit(); // session.close(); }
From source file:org.jboss.as.test.integration.hibernate.SFSBHibernate2LcacheStats.java
License:Open Source License
public Planet prepareData(String planetName, String galaxyName, String starName, Set<Satellite> satellites, Integer id) {/*from w ww. j av a 2 s .c o m*/ Session session = sessionFactory.openSession(); Planet planet = new Planet(); planet.setPlanetId(id); planet.setPlanetName(planetName); planet.setGalaxy(galaxyName); planet.setStar(starName); // Transaction trans = session.beginTransaction(); try { session.save(planet); if (satellites != null && satellites.size() > 0) { Iterator<Satellite> itrSat = satellites.iterator(); while (itrSat.hasNext()) { Satellite sat = itrSat.next(); session.save(sat); } planet.setSatellites(new HashSet<Satellite>()); planet.getSatellites().addAll(satellites); } session.saveOrUpdate(planet); SessionStatistics stats = session.getStatistics(); assertEquals(2, stats.getEntityKeys().size()); assertEquals(2, stats.getEntityCount()); // session.flush(); // session.close(); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("transactional failure while persisting planet entity", e); } // trans.commit(); session.close(); return planet; }
From source file:org.jpos.transaction.DebugDB.java
License:Open Source License
@Override public int prepare(long id, Serializable context) { Context ctx = (Context) context; DB db = (DB) ctx.get(TxnConstants.DB); Session session = db.session(); SessionStatistics statistics = session.getStatistics(); Set<EntityKey> entityKeys = statistics.getEntityKeys(); ctx.log(String.format("ENTITIES: (%d)", statistics.getEntityCount())); for (EntityKey ek : entityKeys) { Object obj = session.get(ek.getEntityName(), ek.getIdentifier()); LockMode lockMode = session.getCurrentLockMode(obj); ctx.log(String.format("[%s] %s %s", ek.getIdentifier(), ek.getEntityName(), lockMode)); }/*from ww w. j a va 2 s . c om*/ ctx.log("==== COLLECTIONS ===="); Set<CollectionKey> collectionKeys = statistics.getCollectionKeys(); for (CollectionKey ck : collectionKeys) { ctx.log(String.format("[%s] %s", ck.getKey(), ck.getRole())); } ctx.log("====================="); return PREPARED | READONLY | NO_JOIN; }