List of usage examples for org.hibernate StatelessSession createCriteria
@Deprecated Criteria createCriteria(Class persistentClass);
From source file:au.org.theark.lims.model.dao.BiospecimenDao.java
License:Open Source License
protected BiospecimenUidSequence getBiospecimenUidSequence(Study study) { // Stateless sessions should be used to avoid locking the record for future update // by getSession(), which relies on the "open session filter" mechanism StatelessSession session = getStatelessSession(); Criteria criteria = session.createCriteria(BiospecimenUidSequence.class); criteria.add(Restrictions.eq(Constants.SUBJECTUIDSEQ_STUDYNAMEID, study.getName())); criteria.setMaxResults(1);/*from w w w.j a va 2 s . c om*/ BiospecimenUidSequence result = (BiospecimenUidSequence) criteria.uniqueResult(); session.close(); return result; }
From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java
License:Open Source License
@Override public boolean isPhenoDataSetCategoryUnique(String phenoDataSetCategoryName, Study study, PhenoDataSetCategory phenoDataSetCategoryToUpdate) { boolean isUnique = true; StatelessSession stateLessSession = getStatelessSession(); Criteria criteria = stateLessSession.createCriteria(CustomFieldCategory.class); criteria.add(Restrictions.eq("name", phenoDataSetCategoryName)); criteria.add(Restrictions.eq("study", study)); criteria.add(Restrictions.eq("arkFunction", phenoDataSetCategoryToUpdate.getArkFunction())); criteria.setMaxResults(1);/*from w w w.j a v a2s . c om*/ PhenoDataSetCategory existingPhenoDataSetCategory = (PhenoDataSetCategory) criteria.uniqueResult(); if ((phenoDataSetCategoryToUpdate.getId() != null && phenoDataSetCategoryToUpdate.getId() > 0)) { if (existingPhenoDataSetCategory != null && !phenoDataSetCategoryToUpdate.getId().equals(existingPhenoDataSetCategory.getId())) { isUnique = false; } } else { if (existingPhenoDataSetCategory != null) { isUnique = false; } } stateLessSession.close(); return isUnique; }
From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java
License:Open Source License
/** * check the Custom field category for the data intergrity. *///from w ww . j av a2 s. c o m @Override public boolean isPhenoDataSetCategoryAlreadyUsed(PhenoDataSetCategory phenoDataSetCategory) { /** * if a phenoDatasetCategory been used by the system it should be at least one or more of this table. * PickedPhenoDataSetCategory * LinkPhenoDataSetCategoryField * PhenoDataSetFieldDisplay * */ Boolean status1 = false, status2 = false, status3 = false; StatelessSession stateLessSessionOne = getStatelessSession(); Criteria criteria = stateLessSessionOne.createCriteria(PickedPhenoDataSetCategory.class); ArkFunction arkFunction = iArkCommonService .getArkFunctionByName(au.org.theark.core.Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); criteria.add(Restrictions.eq("arkFunction", arkFunction)); criteria.add(Restrictions.eq("study", phenoDataSetCategory.getStudy())); criteria.add(Restrictions.eq("phenoDataSetCategory", phenoDataSetCategory)); List<PickedPhenoDataSetCategory> phenoDataSetCategories = (List<PickedPhenoDataSetCategory>) criteria .list(); if (phenoDataSetCategories.size() > 0) { status1 = true; } else { status1 = false; } StatelessSession stateLessSessionTwo = getStatelessSession(); Criteria criteriaTwo = stateLessSessionTwo.createCriteria(LinkPhenoDataSetCategoryField.class); criteriaTwo.add(Restrictions.eq("arkFunction", arkFunction)); criteriaTwo.add(Restrictions.eq("study", phenoDataSetCategory.getStudy())); criteriaTwo.add(Restrictions.eq("phenoDataSetCategory", phenoDataSetCategory)); List<LinkPhenoDataSetCategoryField> linkPhenoDataSetCategoryFields = (List<LinkPhenoDataSetCategoryField>) criteriaTwo .list(); if (linkPhenoDataSetCategoryFields.size() > 0) { status2 = true; } else { status2 = false; } StatelessSession stateLessSessionThree = getStatelessSession(); Criteria criteriaThree = stateLessSessionThree.createCriteria(PhenoDataSetFieldDisplay.class); criteriaThree.createAlias("phenoDataSetGroup", "phenoDSG"); criteriaThree.add(Restrictions.eq("phenoDSG.arkFunction", arkFunction)); criteriaThree.add(Restrictions.eq("phenoDSG.study", phenoDataSetCategory.getStudy())); criteriaThree.add(Restrictions.eq("phenoDataSetCategory", phenoDataSetCategory)); List<PhenoDataSetFieldDisplay> phenoDataSetFieldDisplays = (List<PhenoDataSetFieldDisplay>) criteriaThree .list(); if (phenoDataSetFieldDisplays.size() > 0) { status3 = true; } else { status3 = false; } return status1 || status2 || status3; }
From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java
License:Open Source License
public boolean isPhenoDataSetFieldUnqiue(String phenoFieldName, Study study, PhenoDataSetField phenoFieldToUpdate) { boolean isUnique = true; StatelessSession stateLessSession = getStatelessSession(); Criteria criteria = stateLessSession.createCriteria(PhenoDataSetField.class); criteria.add(Restrictions.eq("name", phenoFieldName)); criteria.add(Restrictions.eq("study", study)); criteria.add(Restrictions.eq("arkFunction", phenoFieldToUpdate.getArkFunction())); criteria.setMaxResults(1);/*from ww w .ja v a 2s .com*/ PhenoDataSetField existingField = (PhenoDataSetField) criteria.uniqueResult(); if ((phenoFieldToUpdate.getId() != null && phenoFieldToUpdate.getId() > 0)) { if (existingField != null && !phenoFieldToUpdate.getId().equals(existingField.getId())) { isUnique = false; } } else { if (existingField != null) { isUnique = false; } } stateLessSession.close(); return isUnique; }
From source file:au.org.theark.study.model.dao.StudyDao.java
License:Open Source License
protected SubjectUidSequence getSubjectUidSequence(Study study) { // log.info("Getting uid seq entity for study " + study.getName()); // Stateless sessions should be used to avoid locking the record for future update // by getSession(), which relies on the "open session filter" mechanism StatelessSession session = getStatelessSession(); Criteria criteria = session.createCriteria(SubjectUidSequence.class); criteria.add(Restrictions.eq(Constants.SUBJECTUIDSEQ_STUDYNAMEID, study.getName())); criteria.setMaxResults(1);//from w w w. ja v a 2 s. c o m SubjectUidSequence result = (SubjectUidSequence) criteria.uniqueResult(); session.close(); log.warn("and got entity with lock = " + result.getInsertLock() + " for study " + study.getName()); return result; }
From source file:au.org.theark.study.model.dao.StudyDao.java
License:Open Source License
public boolean isStudyCompUnique(String studyComponentName, Study study, StudyComp studyComponentToUpdate) { boolean isUnique = true; StatelessSession stateLessSession = getStatelessSession(); Criteria criteria = stateLessSession.createCriteria(StudyComp.class); criteria.add(Restrictions.eq("name", studyComponentName)); criteria.add(Restrictions.eq("study", study)); criteria.setMaxResults(1);//from w w w.j a va 2 s. c o m StudyComp existingComponent = (StudyComp) criteria.uniqueResult(); if ((studyComponentToUpdate.getId() != null && studyComponentToUpdate.getId() > 0)) { if (existingComponent != null && !studyComponentToUpdate.getId().equals(existingComponent.getId())) { isUnique = false; } } else { if (existingComponent != null) { isUnique = false; } } stateLessSession.close(); return isUnique; }
From source file:com.hmsinc.epicenter.tools.reclassifier.Reclassifier.java
License:Open Source License
@Transactional public void run() { setup();/*from w w w.j ava 2 s.co m*/ final String destinationTable = (arguments.length > 4) ? arguments[4] : DEFAULT_TABLE_NAME; final String query = new StringBuilder("INSERT INTO ").append(destinationTable) .append(INSERT_CLASSIFICATION).toString(); final BatchSqlUpdate updater = new BatchSqlUpdate(modelDataSource, query); updater.declareParameter(new SqlParameter(Types.BIGINT)); updater.declareParameter(new SqlParameter(Types.BIGINT)); updater.setBatchSize(BATCH_SIZE); updater.compile(); final StatelessSession ss = ((Session) entityManager.getDelegate()).getSessionFactory() .openStatelessSession(); final Criteria c = ss.createCriteria(target.getInteractionClass()) .add(Restrictions.eq("patientClass", target.getPatientClass())).addOrder(Order.asc("id")) .setCacheable(false); if (arguments.length > 2) { c.add(Restrictions.gt("id", Long.valueOf(arguments[2]))); } if (arguments.length > 3) { c.add(Restrictions.lt("id", Long.valueOf(arguments[3]))); } final ScrollableResults sr = c.scroll(ScrollMode.FORWARD_ONLY); int i = 0; while (sr.next()) { final Interaction interaction = (Interaction) sr.get(0); final Set<Classification> classifications = classificationService.classify(interaction, target); save(interaction, classifications, updater); i++; if (i % BATCH_SIZE == 0) { logger.info("Processed {} interactions (current id: {})", i, interaction.getId()); } ((Session) entityManager.getDelegate()).evict(interaction); } sr.close(); updater.flush(); }
From source file:com.twinsoft.convertigo.engine.billing.HibernateTicketManager.java
License:Open Source License
public synchronized Ticket peekTicket() throws BillingException { final Ticket[] ticket = { null }; hibernateHelper.retry(new Runnable() { @Override/* w w w .j a v a 2 s . c o m*/ public void run() { StatelessSession session = hibernateHelper.getSession(); try { ticket[0] = (Ticket) session.createCriteria(Ticket.class).setMaxResults(1).uniqueResult(); ; if (log.isDebugEnabled()) { log.debug("(HibernateTicketManager) peekTicket " + ticket[0]); } } finally { session.close(); } } }); return ticket[0]; }
From source file:com.twinsoft.convertigo.engine.SecurityTokenManager.java
License:Open Source License
public synchronized SecurityToken consumeToken(final String tokenID) throws NoSuchSecurityTokenException, ExpiredSecurityTokenException { final SecurityToken[] token = { null }; Engine.logSecurityTokenManager.debug("(SecurityTokenManager) Try to consume tokenID: '" + tokenID + "'"); removeExpired();/*w w w . j av a2 s .c om*/ if (tokens != null) { token[0] = tokens.get(tokenID); if (Engine.logSecurityTokenManager.isDebugEnabled()) { Engine.logSecurityTokenManager .debug("(SecurityTokenManager) Memory tokens manager retrieves: " + token[0]); } } if (hibernateHelper != null) { hibernateHelper.retry(new Runnable() { @Override public void run() { StatelessSession session = hibernateHelper.getSession(); try { token[0] = (SecurityToken) session.createCriteria(SecurityToken.class) .add(Restrictions.eq("tokenID", tokenID)).uniqueResult(); } finally { session.close(); } } }); if (Engine.logSecurityTokenManager.isDebugEnabled()) { Engine.logSecurityTokenManager .debug("(SecurityTokenManager) Database tokens manager retrieves: " + token[0]); } } if (token[0] == null) { Engine.logSecurityTokenManager.debug("(SecurityTokenManager) Not found tokenID: '" + tokenID + "'"); throw new NoSuchSecurityTokenException(tokenID); } if (tokens != null) { tokens.remove(tokenID); } if (hibernateHelper != null) { hibernateHelper.delete(token[0]); } if (token[0].isExpired()) { Engine.logSecurityTokenManager.debug("(SecurityTokenManager) Expired tokenID: '" + tokenID + "'"); throw new ExpiredSecurityTokenException(tokenID); } Engine.logSecurityTokenManager.debug("(SecurityTokenManager) The security token is: '" + token[0] + "'"); return token[0]; }
From source file:magoffin.matt.dao.hbm.GenericHibernateDao.java
License:Open Source License
/** * Execute a batch callback against a StatelessSession using a named query. * /*www .j ava 2s. com*/ * <p>The DELETE, UPDATE, and UPDATE_STOP {@link BatchCallbackResult} * values are not supported in this operation, and will throw an * <code>UnsupportedOperationException</code> if returned by the * {@link BatchCallback} instance passed to this method.</p> * * @param criteriaBuilder the criteria builder * @param callback the callback * @param options the options * @return the number of items processed */ @SuppressWarnings("unchecked") protected Integer executeStatelessCriteriaBatchCallback(final CriteriaBuilder criteriaBuilder, final BatchCallback<T> callback, final BatchOptions options) { StatelessSession session = getHibernateTemplate().getSessionFactory().openStatelessSession(); Transaction tx = session.beginTransaction(); try { Criteria criteria = session.createCriteria(getType()); criteria.setFetchSize(options.getBatchSize()); criteriaBuilder.buildCriteria(criteria); ScrollableResults items = criteria.scroll(ScrollMode.FORWARD_ONLY); int count = 0; OUTER: while (items.next()) { T item = (T) items.get(0); BatchCallbackResult action = callback.handle(item); switch (action) { case DELETE: case UPDATE: case UPDATE_STOP: throw new UnsupportedOperationException("Action " + action + " not possible during " + options.getMode() + " mode batch processing"); case STOP: break OUTER; case CONTINUE: // nothing to do break; } } tx.commit(); return count; } catch (RuntimeException e) { tx.rollback(); throw e; } finally { if (session != null) { session.close(); } } }