List of usage examples for org.hibernate Session evict
void evict(Object object);
From source file:org.eurocarbdb.dataaccess.HibernateEntityManager.java
License:Open Source License
public <T> void revert(T entity) { Session s = getHibernateSession(); s.evict(entity); }
From source file:org.gbif.portal.dao.DAOUtils.java
License:Open Source License
/** * /*from www. j ava 2s . c om*/ * @param resultsOutputter * @param session * @param sr * @param associationTraverser * @param batchSize * @throws IOException */ public static void processScrollableResults(final ResultsOutputter resultsOutputter, Session session, ScrollableResults sr, AssociationTraverser associationTraverser, int batchSize) throws IOException { //indicate end of resultset boolean eor = false; int batchNo = 0; do { if (logger.isDebugEnabled()) { logger.debug("Running batch: " + (batchNo++)); } if (associationTraverser != null) associationTraverser.batchPreprocess(batchSize, sr, session); //process in batches for (int i = 0; i < batchSize && !eor; i++) { Object record = sr.get(); Map beanMap = null; //assemble all required model objects for rendering a single row if (associationTraverser != null) { beanMap = associationTraverser.traverse(record, session); } else { beanMap = new HashMap<String, Object>(); if (record != null && record instanceof Object[] && ((Object[]) record).length > 0) { beanMap.put("record", ((Object[]) record)[0]); } else { beanMap.put("record", record); } } //write out result resultsOutputter.write(beanMap); if (beanMap != null) { //evict from the session to keep memory footprint down for (Object recordElement : beanMap.entrySet()) { session.evict(recordElement); } beanMap = null; } //check to see if this is the last element in resultset if (sr.isLast()) { eor = true; } else { sr.next(); } } //post process if (associationTraverser != null) associationTraverser.batchPostprocess(batchSize, sr, session); //flush between batches - to remove objects from the session session.flush(); session.clear(); } while (!eor); }
From source file:org.gbif.portal.dao.occurrence.OccurrenceAssociationTraverser.java
License:Open Source License
/** * @see org.gbif.portal.dao.AssociationTraverser#batchPreprocess(int, org.hibernate.ScrollableResults, org.hibernate.Session) */// ww w. ja v a 2s. co m public void batchPreprocess(int batchSize, ScrollableResults scrollableResults, Session session) { if (!retrieveIdentifiers) return; if (logger.isDebugEnabled()) logger.debug("Current row number:" + scrollableResults.getRowNumber()); List<Long> occurrenceRecordIds = new ArrayList<Long>(); boolean eor = false; int numberScrolled = 0; for (numberScrolled = 0; numberScrolled < batchSize - 1 && !eor; numberScrolled++) { //retrieve the id Long recordId = (Long) scrollableResults.get(0); occurrenceRecordIds.add(recordId); if (scrollableResults.isLast()) { eor = true; numberScrolled--; } else { scrollableResults.next(); } } scrollableResults.scroll(-numberScrolled); if (logger.isDebugEnabled()) { logger.debug("Number scrolled through: " + numberScrolled); logger.debug("Scrolled back to: " + scrollableResults.getRowNumber()); } //retrieve image records for this batch - and process into Map - List<ORImage> orImageList = imageRecordDAO.getImageRecordsForOccurrenceRecords(occurrenceRecordIds); this.orImageUrlMap = new HashMap<Long, String>(); for (ORImage orImage : orImageList) { //only storing the first image url we find if (this.orImageUrlMap.get(orImage.getOccurrenceRecordId()) == null) { this.orImageUrlMap.put(orImage.getOccurrenceRecordId(), orImage.getUrl()); } session.evict(orImage); } if (logger.isDebugEnabled()) logger.debug("Number of images found for batch: " + this.orImageUrlMap.size()); //retrieve type status for this batch List<TypeStatus> typeStatusList = typificationRecordDAO .getTypeStatusForOccurrenceRecords(occurrenceRecordIds); this.typeStatusMap = new HashMap<Long, String>(); for (TypeStatus typeStatus : typeStatusList) { //only storing the first type status we find if (this.typeStatusMap.get(typeStatus.getOccurrenceRecordId()) == null) { this.typeStatusMap.put(typeStatus.getOccurrenceRecordId(), typeStatus.getTypeStatus()); } session.evict(typeStatus); } if (logger.isDebugEnabled()) logger.debug("Number of type status found for batch: " + this.typeStatusMap.size()); //retrieve identifiers for this batch List<IdentifierRecord> identifierList = identifierRecordDAO .getIdentifierRecordsForOccurrenceRecords(occurrenceRecordIds); this.identifiersMap = new HashMap<Long, List<IdentifierRecord>>(); for (IdentifierRecord ir : identifierList) { List<IdentifierRecord> irs = this.identifiersMap.get(ir.getOccurrenceRecordId()); if (irs == null) { irs = new ArrayList<IdentifierRecord>(); irs.add(ir); this.identifiersMap.put(ir.getOccurrenceRecordId(), irs); } else { irs.add(ir); } session.evict(ir); } if (logger.isDebugEnabled()) logger.debug("Number of identifiers found for batch: " + this.identifiersMap.size()); }
From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java
License:Apache License
public void evict(final Object entity) throws DataAccessException { doExecute(new HibernateCallback<Object>() { public Object doInHibernate(Session session) throws HibernateException { session.evict(entity); return null; }/*from www . j a va 2s . c o m*/ }, true); }
From source file:org.hyperic.hibernate.CacheInitializingLocalSessionFactoryBean.java
License:Open Source License
@SuppressWarnings("unchecked") private void preloadCache() { Session session = SessionFactoryUtils.getSession(getSessionFactory(), true); for (String className : classesForCache) { Class<?> clazz;/* w w w.jav a2 s .c om*/ className = className.trim(); if (className.length() == 0 || className.startsWith("#")) { continue; } try { clazz = Class.forName(className); } catch (Exception e) { log.warn("Unable to find preload cache for class [" + className + "]", e); continue; } long start = System.currentTimeMillis(); Collection<Object> vals = session.createCriteria(clazz).list(); long end = System.currentTimeMillis(); log.info("Preloaded " + vals.size() + " [" + clazz.getName() + "] in " + (end - start) + " millis"); // Evict, to avoid dirty checking everything in the inventory for (Object val : vals) { session.evict(val); } } }
From source file:org.inbio.neoportal.index.Importer.java
License:Open Source License
/** * /* w ww .j a v a2 s.c o m*/ */ @Transactional public void indexOccurrences() { Session session = sessionFactory.getCurrentSession(); // FullTextSession fullTextSession = Search.getFullTextSession(session); // config session for bash job session.setFlushMode(FlushMode.MANUAL); // fullTextSession.setFlushMode(FlushMode.MANUAL); logger.log(org.apache.log4j.Level.DEBUG, "Starting importOccurrences process"); //get current date for dateLastModified field DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy"); Date date = new Date(); String dateLastModified = dateFormat.format(date); DateFormat sourceDateFormat = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH); int firstResult = 0; int setCounter = 0; //every 100 (the jdbc batch size) call flush DataProvider dp = dataProviderDAO.findAll().get(0); logger.log(org.apache.log4j.Level.DEBUG, "importOccurrences Begin Transaction"); ScrollableResults scroll = session.createCriteria(ImportDwc.class).setFetchSize(BATCH_SIZE) .setCacheMode(CacheMode.IGNORE).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY); boolean update; int batch = 0; int rowsCounter = 0; while (scroll.next()) { batch++; rowsCounter++; ImportDwc importDwc = (ImportDwc) scroll.get(0); logger.trace("ImportDwc after scroll.get"); try { //avoid repeated occurrenceId OccurrenceDwc occurrence = occurrenceDAO .findByCatalogNumberHql(importDwc.getCatalogNumber().replace("\"", "")); logger.trace("OccurrenceDwc after findByCatalogNumber " + importDwc.getCatalogNumber().replace("\"", "")); if (occurrence != null) { update = true; // continue; } else { update = false; occurrence = new OccurrenceDwc(); } Taxon taxon = null; //check if taxonId is empty (unidentify specimens) if (importDwc.getTaxonId().isEmpty()) { taxon = null; } else { // check if is the same taxon already associated with the occurrence if (update && occurrence.getTaxonId().equals(importDwc.getTaxonId().replace("\"", ""))) { taxon = occurrence.getTaxon(); logger.trace("Occurrence update with same taxon"); } else { // find taxon entity // taxon = taxonNewDAO.findById(new BigDecimal(importDwc.getTaxonId().replace("\"", ""))); List<Taxon> taxonList = taxonDAO.findByDefaultName(importDwc.getScientificName()); logger.trace("Taxon after findByDefaultName"); if (taxonList.size() == 1) taxon = taxonList.get(0); else if (taxonList.size() > 1) { for (Taxon taxon2 : taxonList) { if (taxon2.getKingdom().equals(importDwc.getKingdom())) { taxon = taxon2; break; } } } } } // TODO: fix, use specimenId instead occurrence.setOccurrenceId(importDwc.getCatalogNumber().replace("\"", "")); occurrence.setDataProvider(dp); occurrence.setTaxon(taxon); //find or create location Location location = locationDAO.findById(new BigDecimal(importDwc.getLocationId())); logger.trace("Location after findById"); if (location == null) { location = new Location(new BigDecimal(importDwc.getLocationId())); location.setHigherGeographyId(importDwc.getHigherGeographyId()); location.setHigherGeography(importDwc.getHigherGeography()); location.setContinent(importDwc.getContinent()); location.setWaterBody(importDwc.getWaterBody()); location.setIslandGroup(importDwc.getIslandGroup()); location.setIsland(importDwc.getIsland()); location.setCountry(importDwc.getCountry()); location.setCountryCode(importDwc.getCountryCode()); location.setStateProvince(importDwc.getStateProvince()); location.setCounty(importDwc.getCounty()); location.setMunicipality(importDwc.getMunicipality()); location.setLocality(importDwc.getLocality()); location.setVerbatimLocality(importDwc.getVerbatimLocality()); location.setVerbatimElevation(importDwc.getVerbatimElevation()); location.setMinimumElevationInMeters(importDwc.getMinimumElevationInMeters()); location.setMaximumElevationInMeters(importDwc.getMaximumElevationInMeters()); location.setVerbatimDepth(importDwc.getVerbatimDepth()); location.setMinimumDepthInMeters(importDwc.getMinimumDepthInMeters()); location.setMaximumDepthInMeters(importDwc.getMaximumDepthInMeters()); location.setMinimumDistanceAboveSurfaceInMeters( importDwc.getMinimumDistanceAboveSurfaceInMeters()); location.setMaximumDistanceAboveSurfaceInMeters( importDwc.getMaximumDistanceAboveSurfaceInMeters()); location.setLocationAccordingTo(importDwc.getLocationAccordingTo()); location.setLocationRemarks(importDwc.getLocationRemarks()); location.setVerbatimCoordinates(importDwc.getVerbatimCoordinates()); location.setVerbatimLatitude(importDwc.getVerbatimLatitude()); location.setVerbatimLongitude(importDwc.getVerbatimLongitude()); location.setVerbatimCoordinateSystem(importDwc.getVerbatimCoordinateSystem()); location.setVerbatimSRS(importDwc.getVerbatimSRS()); if (!importDwc.getDecimalLatitude().isEmpty()) location.setDecimalLatitude(Double.valueOf(importDwc.getDecimalLatitude())); if (!importDwc.getDecimalLongitude().isEmpty()) location.setDecimalLongitude(Double.valueOf(importDwc.getDecimalLongitude())); location.setGeodeticDatum(importDwc.getGeodeticDatum()); location.setCoordinateUncertaintyInMeters(importDwc.getCoordinateUncertaintyInMeters()); location.setCoordinatePrecision(importDwc.getCoordinatePrecision()); location.setPointRadiusSpatialFit(importDwc.getPointRadiusSpatialFit()); location.setFootprintWKT(importDwc.getFootprintWKT()); location.setFootprintSRS(importDwc.getFootprintSRS()); location.setFootprintSpatialFit(importDwc.getFootprintSpatialFit()); location.setGeoreferencedBy(importDwc.getGeoreferencedBy()); location.setGeoreferencedDate(importDwc.getGeoreferencedDate()); location.setGeoreferenceProtocol(importDwc.getGeoreferenceProtocol()); location.setGeoreferenceSources(importDwc.getGeoreferenceSources()); location.setGeoreferenceVerificationStatus(importDwc.getGeoreferenceVerificationStatus()); location.setGeoreferenceRemarks(importDwc.getGeoreferenceRemarks()); locationDAO.create(location); // increment batch because location should be inserted batch++; } occurrence.setLocation(location); occurrence.setType(importDwc.getType()); occurrence.setModified(importDwc.getModified()); occurrence.setLanguage(importDwc.getLanguage()); occurrence.setRights(importDwc.getRights()); occurrence.setRightsHolder(importDwc.getRightsHolder()); occurrence.setAccessRights(importDwc.getAccessRights()); occurrence.setBibliographicCitation(importDwc.getBibliographicCitation()); occurrence.setReferences(importDwc.getReferences()); occurrence.setInstitutionId(importDwc.getInstitutionId()); occurrence.setCollectionId(importDwc.getCollectionId()); occurrence.setDatasetId(importDwc.getDatasetId()); occurrence.setInstitutionCode(importDwc.getInstitutionCode()); occurrence.setCollectionCode(importDwc.getCollectionCode()); occurrence.setDatasetName(importDwc.getDatasetName()); occurrence.setOwnerInstitutionCode(importDwc.getOwnerInstitutionCode()); occurrence.setBasisOfRecord(importDwc.getBasisOfRecord()); occurrence.setInformationWithheld(importDwc.getInformationWithheld()); occurrence.setDataGeneralizations(importDwc.getDataGeneralizations()); occurrence.setDynamicProperties(importDwc.getDynamicProperties()); occurrence.setOccurrenceId(importDwc.getOccurrenceId().toString()); occurrence.setCatalogNumber(importDwc.getCatalogNumber()); occurrence.setOccurrenceRemarks(importDwc.getOccurrenceRemarks()); occurrence.setRecordNumber(importDwc.getRecordNumber()); occurrence.setRecordedBy(importDwc.getRecordedBy()); occurrence.setIndividualId(importDwc.getIndividualId()); occurrence.setIndividualCount(importDwc.getIndividualCount()); occurrence.setSex(importDwc.getSex()); occurrence.setLifeStage(importDwc.getLifeStage()); occurrence.setReproductiveCondition(importDwc.getReproductiveCondition()); occurrence.setBehavior(importDwc.getBehavior()); occurrence.setEstablishmentMeans(importDwc.getEstablishmentMeans()); occurrence.setOccurrenceStatus(importDwc.getOccurrenceStatus()); occurrence.setPreparations(importDwc.getPreparations()); occurrence.setDisposition(importDwc.getDisposition()); occurrence.setOtherCatalogNumbers(importDwc.getOtherCatalogNumbers()); occurrence.setPreviousIdentifications(importDwc.getPreviousIdentifications()); occurrence.setAssociatedMedia(importDwc.getAssociatedMedia()); occurrence.setAssociatedReferences(importDwc.getAssociatedReferences()); occurrence.setAssociatedOccurrences(importDwc.getAssociatedOccurrences()); occurrence.setAssociatedSequences(importDwc.getAssociatedSequences()); occurrence.setAssociatedTaxa(importDwc.getAssociatedTaxa()); occurrence.setEventId(importDwc.getEventId()); occurrence.setSamplingProtocol(importDwc.getSamplingProtocol()); occurrence.setSamplingEffort(importDwc.getSamplingEffort()); occurrence.setEventDate(importDwc.getEventDate()); occurrence.setEventTime(importDwc.getEventTime()); occurrence.setStartDayOfYear(importDwc.getStartDayOfYear()); occurrence.setEndDayOfYear(importDwc.getEndDayOfYear()); occurrence.setYear(importDwc.getYear()); occurrence.setMonth(importDwc.getMonth()); occurrence.setDay(importDwc.getDay()); occurrence.setVerbatimEventDate(importDwc.getVerbatimEventDate()); occurrence.setHabitat(importDwc.getHabitat()); occurrence.setFieldNotes(importDwc.getFieldNumber()); occurrence.setFieldNotes(importDwc.getFieldNotes()); occurrence.setEventRemarks(importDwc.getEventRemarks()); occurrence.setGeologicalContextId(importDwc.getGeologicalContextId()); occurrence.setEarliestEonOrLowestEonothem(importDwc.getEarliestEonOrLowestEonothem()); occurrence.setLatestEonOrHighestEonothem(importDwc.getLatestEonOrHighestEonothem()); occurrence.setEarliestEraOrLowestErathem(importDwc.getEarliestEraOrLowestErathem()); occurrence.setLatestEraOrHighestErathem(importDwc.getLatestEraOrHighestErathem()); occurrence.setEarliestPeriodOrLowestSystem(importDwc.getEarliestPeriodOrLowestSystem()); occurrence.setLatestPeriodOrHighestSystem(importDwc.getLatestPeriodOrHighestSystem()); occurrence.setEarliestEpochOrLowestSeries(importDwc.getEarliestEpochOrLowestSeries()); occurrence.setLatestEpochOrHighestSeries(importDwc.getLatestEpochOrHighestSeries()); occurrence.setEarliestAgeOrLowestStage(importDwc.getEarliestAgeOrLowestStage()); occurrence.setLatestAgeOrHighestStage(importDwc.getLatestAgeOrHighestStage()); occurrence.setLowestBiostratigraphicZone(importDwc.getLowestBiostratigraphicZone()); occurrence.setHighestBiostratigraphicZone(importDwc.getHighestBiostratigraphicZone()); occurrence.setLithostratigraphicTerms(importDwc.getLithostratigraphicTerms()); occurrence.setGroup(importDwc.getGroup()); occurrence.setFormation(importDwc.getFormation()); occurrence.setMember(importDwc.getMember()); occurrence.setBed(importDwc.getBed()); occurrence.setIdentificationId(importDwc.getIdentificationId()); occurrence.setIdentifiedBy(importDwc.getIdentifiedBy()); if (importDwc.getDateIdentified() != null && importDwc.getDateIdentified().length() > 0) occurrence.setDateIdentified(sourceDateFormat.parse(importDwc.getDateIdentified())); occurrence.setIdentificationReferences(importDwc.getIdentificationReferences()); occurrence.setIdentificationVerificationStatus(importDwc.getIdentificationVerificationStatus()); occurrence.setIdentificationRemarks(importDwc.getIdentificationRemarks()); occurrence.setIdentificationQualifier(importDwc.getIdentificationQualifier()); occurrence.setTypeStatus(importDwc.getTypeStatus()); occurrence.setTaxonId(importDwc.getTaxonId()); occurrence.setScientificNameId(importDwc.getScientificNameId()); occurrence.setAcceptedNameUsageId(importDwc.getAcceptedNameUsageId()); occurrence.setParentNameUsageId(importDwc.getParentNameUsageId()); occurrence.setOriginalNameUsageId(importDwc.getOriginalNameUsageId()); occurrence.setNameAccordingToId(importDwc.getNameAccordingToId()); occurrence.setNamePublishedInId(importDwc.getNamePublishedInId()); occurrence.setTaxonConceptId(importDwc.getTaxonConceptId()); occurrence.setScientificName(importDwc.getScientificName()); occurrence.setAcceptedNameUsage(importDwc.getAcceptedNameUsage()); occurrence.setParentNameUsage(importDwc.getParentNameUsage()); occurrence.setOriginalNameUsage(importDwc.getOriginalNameUsage()); occurrence.setNameAccordingTo(importDwc.getNameAccordingTo()); occurrence.setNamePublishedIn(importDwc.getNamePublishedIn()); occurrence.setNamePublishedInYear(importDwc.getNamePublishedInYear()); occurrence.setHigherClassification(importDwc.getHigherClassification()); occurrence.setKingdom(importDwc.getKingdom()); occurrence.setPhylum(importDwc.getPhylum()); occurrence.setClass_(importDwc.getClass_()); occurrence.setOrder(importDwc.getOrder()); occurrence.setFamily(importDwc.getFamily()); occurrence.setGenus(importDwc.getGenus()); occurrence.setSubgenus(importDwc.getSubgenus()); occurrence.setSpecificEpithet(importDwc.getSpecificEpithet()); occurrence.setInfraspecificEpithet(importDwc.getInfraspecificEpithet()); occurrence.setTaxonRank(importDwc.getTaxonRank()); occurrence.setVerbatimTaxonRank(importDwc.getVerbatimTaxonRank()); occurrence.setScientificNameAuthorship(importDwc.getScientificNameAuthorship()); occurrence.setVernacularName(importDwc.getVernacularName()); occurrence.setNomenclaturalCode(importDwc.getNomenclaturalCode()); occurrence.setTaxonomicStatus(importDwc.getTaxonomicStatus()); occurrence.setNomenclaturalStatus(importDwc.getNomenclaturalStatus()); occurrence.setTaxonRemarks(importDwc.getTaxonRemarks()); if (!update) occurrenceDAO.create(occurrence); else occurrenceDAO.update(occurrence); // clear objects occurrence.setImages(null); occurrence = null; taxon = null; location = null; } catch (NonUniqueResultException ex) { logger.warn("NonUniqueResultException occurrenceId " + importDwc.getCatalogNumber()); } catch (NumberFormatException ex) { logger.error("NumberFormatException occurrenceId " + importDwc.getCatalogNumber()); ex.printStackTrace(); System.exit(-1); } catch (ParseException e) { e.printStackTrace(); } // } // end for, 1000 importDwc rows session.evict(importDwc); if (batch >= BATCH_SIZE) { batch = 0; SessionStatistics statistics = session.getStatistics(); logger.trace("Entities before flush: " + String.valueOf(statistics.getEntityCount())); session.flush(); logger.trace("Entities before clear: " + String.valueOf(statistics.getEntityCount())); // fullTextSession.flushToIndexes(); session.clear(); logger.trace("Entities after clear: " + String.valueOf(statistics.getEntityCount())); // System.exit(1); } if (rowsCounter % maxResults == 0) { // fullTextSession.flushToIndexes(); logger.info("Occurrences added " + rowsCounter); SessionStatistics statistics = session.getStatistics(); logger.debug("Entities: " + String.valueOf(statistics.getEntityCount())); logger.debug("Collections: " + String.valueOf(statistics.getCollectionCount())); } // ******* for debug only *********** // if(rowsCounter == 1) { // session.getTransaction().rollback(); // scroll.close(); // System.exit(1); // } // firstResult += maxResults; // occurrencesDwcList = // importDwcDAO.scrollAll(ImportDwc.class, // maxResults, // firstResult); } // end while, no more importDwc rows scroll.close(); // transactionManager.commit(status); session.flush(); session.clear(); logger.info("Total occurrences processed " + rowsCounter); // session.getTransaction().commit(); // session.close(); }
From source file:org.jadira.usertype.spi.repository.JpaBaseRepository.java
License:Apache License
/** * {@inheritDoc}// w w w. j av a 2s . c om */ public T evict(T entity) { Session session = (Session) getEntityManager().getDelegate(); session.evict(entity); return entity; }
From source file:org.jasig.ssp.dao.AppointmentDaoTest.java
License:Apache License
@Test public void testSaveNew() throws ObjectNotFoundException { final Date startDate = new Date(1339419600000L); // "Mon JUN 11 09:00:00 EDT 2012" final Date endDate = new Date(1339423200000L); // "Mon JUN 11 09:00:00 EDT 2012" Appointment obj = new Appointment(); obj.setEndTime(endDate);/*from www.j ava 2s . c o m*/ obj.setStartTime(startDate); obj.setPerson(securityService.currentUser().getPerson()); obj = dao.save(obj); assertNotNull("Saved obj should not have been null", obj.getId()); final UUID saved = obj.getId(); // flush to storage, then clear out in-memory version final Session session = sessionFactory.getCurrentSession(); session.flush(); session.evict(obj); obj = dao.get(saved); LOGGER.debug("testSaveNew(): Saved " + obj.toString()); assertNotNull("Reloaded object should not have been null.", obj); assertNotNull("Reloaded ID should not have been null.", obj.getId()); final List<Appointment> all = (List<Appointment>) dao.getAll(ObjectStatus.ACTIVE).getRows(); assertNotNull("GetAll list should not have been null.", all); assertFalse("GetAll list should not have been empty.", all.isEmpty()); TestUtils.assertListDoesNotContainNullItems(all); dao.delete(obj); }
From source file:org.jasig.ssp.dao.EarlyAlertRoutingDaoTest.java
License:Apache License
/** * Test {@link EarlyAlertRoutingDao#save(EarlyAlertRouting)}, * {@link EarlyAlertRoutingDao#get(UUID)}, * {@link EarlyAlertRoutingDao#getAll(ObjectStatus)}, and * {@link EarlyAlertRoutingDao#delete(EarlyAlertRouting)}. * // w w w .j av a 2 s . c om * @throws ObjectNotFoundException * If saved instance could not be reloaded. */ @Test public void testSaveNew() throws ObjectNotFoundException { UUID saved; EarlyAlertRouting obj = new EarlyAlertRouting(); obj.setGroupName("new name"); obj.setObjectStatus(ObjectStatus.ACTIVE); obj.setCampus(campusService.get(CAMPUS_ID)); obj.setEarlyAlertReason(earlyAlertReasonService.get(EARLY_ALERT_REASON_ID)); obj.setPerson(securityService.currentUser().getPerson()); obj = dao.save(obj); assertNotNull("Saved object should not have been null.", obj.getId()); saved = obj.getId(); // flush to storage, then clear out in-memory version final Session session = sessionFactory.getCurrentSession(); session.flush(); session.evict(obj); obj = dao.get(saved); LOGGER.debug("testSaveNew(): Saved " + obj.toString()); assertNotNull("Reloaded object should not have been null.", obj); assertNotNull("Reloaded ID should not have been null.", obj.getId()); assertNotNull("Reloaded name should not have been null.", obj.getGroupName()); assertEquals("EarlyAlertReason names do not match.", EARLY_ALERT_REASON_NAME, obj.getEarlyAlertReason().getName()); final List<EarlyAlertRouting> all = (List<EarlyAlertRouting>) dao.getAll(ObjectStatus.ACTIVE).getRows(); assertNotNull("GetAll list should not have been null.", all); assertFalse("GetAll list should not have been empty.", all.isEmpty()); assertList(all); dao.delete(obj); }
From source file:org.jasig.ssp.dao.GoalDaoTest.java
License:Apache License
/** * Test {@link GoalDao#save(Goal)}, {@link GoalDao#get(UUID)}, * {@link GoalDao#getAll(ObjectStatus)}, and {@link GoalDao#delete(Goal)}. * /*from w w w .j av a2 s . co m*/ * @throws ObjectNotFoundException * If saved instance could not be reloaded. */ @Test public void testSaveNew() throws ObjectNotFoundException { UUID saved; Goal obj = new Goal(); obj.setName("new name"); obj.setObjectStatus(ObjectStatus.ACTIVE); obj.setConfidentialityLevel(testConfidentialityLevel); obj.setPerson(securityService.currentUser().getPerson()); obj = dao.save(obj); assertNotNull("Saved object should not have been null.", obj.getId()); saved = obj.getId(); // flush to storage, then clear out in-memory version final Session session = sessionFactory.getCurrentSession(); session.flush(); session.evict(obj); obj = dao.get(saved); LOGGER.debug("testSaveNew(): Saved " + obj.toString()); assertNotNull("Reloaded object should not have been null.", obj); assertNotNull("Reloaded ID should not have been null.", obj.getId()); assertNotNull("Reloaded name should not have been null.", obj.getName()); final List<Goal> all = (List<Goal>) dao.getAll(ObjectStatus.ACTIVE).getRows(); assertNotNull("GetAll list should not have been null.", all); assertFalse("GetAll list should not have been empty.", all.isEmpty()); TestUtils.assertListDoesNotContainNullItems(all); dao.delete(obj); }