List of usage examples for org.hibernate Session evict
void evict(Object object);
From source file:br.msf.commons.persistence.dao.AbstractEntityDaoBean.java
License:Open Source License
@Override public T deleteById(final ID id) { ArgumentUtils.rejectIfNull(id);/*from w w w.j ava 2s . c o m*/ final Session session = getCurrentSession(); final T entity = findById(id); session.evict(entity); if (entity != null) { session.delete(entity); } return entity; }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
@Override public void update(Iterable<DataRecord> records) { assertPrepared();/* ww w. j ava 2s .c o m*/ Session session = this.getCurrentSession(); try { storageClassLoader.bind(Thread.currentThread()); DataRecordConverter<Object> converter = new ObjectDataRecordConverter(storageClassLoader, session); for (DataRecord currentDataRecord : records) { TypeMapping mapping = mappingRepository.getMappingFromUser(currentDataRecord.getType()); Wrapper o = (Wrapper) converter.convert(currentDataRecord, mapping); if (session.contains(o) && session.isReadOnly(o)) { // A read only instance for an update? // Session#setReadOnly(...) does not always work as expected (especially in case of compound keys // see TMDM-7014). session.evict(o); o = (Wrapper) converter.convert(currentDataRecord, mapping); } DataRecordMetadata recordMetadata = currentDataRecord.getRecordMetadata(); Map<String, String> recordProperties = recordMetadata.getRecordProperties(); if (!ObjectUtils.equals(recordMetadata.getTaskId(), o.taskId())) { o.taskId(recordMetadata.getTaskId()); } for (Map.Entry<String, String> currentProperty : recordProperties.entrySet()) { String key = currentProperty.getKey(); String value = currentProperty.getValue(); ComplexTypeMetadata database = mapping.getDatabase(); if (database.hasField(key)) { Object convertedValue = StorageMetadataUtils.convert(value, database.getField(key)); if (!ObjectUtils.equals(convertedValue, o.get(key))) { o.set(key, convertedValue); } } else { throw new IllegalArgumentException("Can not store value '" + key //$NON-NLS-1$ + "' because there is no database field '" + key + "' in type '" + mapping.getName() //$NON-NLS-1$ //$NON-NLS-2$ + "' (storage is '" + toString() + "')"); //$NON-NLS-1$ //$NON-NLS-2$ } } session.saveOrUpdate(o); if (FLUSH_ON_LOAD && session.getStatistics().getEntityCount() % batchSize == 0) { // Periodically flush objects to avoid using too much memory. session.flush(); } } } catch (ConstraintViolationException e) { throw new com.amalto.core.storage.exception.ConstraintViolationException(e); } catch (PropertyValueException e) { throw new RuntimeException("Invalid value in record to update.", e); //$NON-NLS-1$ } catch (NonUniqueObjectException e) { throw new RuntimeException("Attempted to update multiple times same record within same transaction.", //$NON-NLS-1$ e); } catch (Exception e) { throw new RuntimeException("Exception occurred during update.", e); //$NON-NLS-1$ } finally { this.releaseSession(); storageClassLoader.unbind(Thread.currentThread()); } }
From source file:com.anite.zebra.hivemind.impl.ClusterSafeLockManager.java
License:Apache License
/** * @param processInstance//from w ww .j av a 2s . c o m * @throws LockException */ public void aquireLock(IProcessInstance processInstance, Session session) throws LockException { boolean isLocked = false; while (!isLocked) { DatabaseLock lock; try { lock = (DatabaseLock) session.get(DatabaseLock.class, processInstance.getProcessInstanceId()); if (lock != null) { session.evict(lock); } } catch (HibernateException e2) { log.error("Unable to test for lock", e2); throw new LockException(e2); } if (lock == null) { try { Class lockClazz = DatabaseLock.class; lock = (DatabaseLock) lockClazz.newInstance(); lock.setProcessInstanceId(processInstance.getProcessInstanceId()); Transaction t = session.beginTransaction(); session.save(lock); t.commit(); isLocked = true; } catch (HibernateException e) { // It is vaguely possible someone beat us to it try { lock = null; Thread.sleep(100); } catch (InterruptedException e1) { log.error("Interupted while trying to lock - this should not occur", e1); throw new LockException(e1); } } catch (InstantiationException e) { log.error("Unable to create lock class", e); throw new LockException(e); } catch (IllegalAccessException e) { log.error("Unable to create lock class", e); throw new LockException(e); } } else { try { Thread.sleep(100); } catch (InterruptedException e1) { log.error("Interupted while trying to lock - this should not occur", e1); throw new LockException(e1); } } } }
From source file:com.autentia.intra.util.HibernateUtil.java
License:Open Source License
/** * Fully evict an object (including its related objects) from Hibernate cache. * This method is nececessary since HibernateSession.evict() does not evict * related objects, only the main passed instance. Hibernate can be configured * with cascade="evict" to evict the full object, but we prefer to do it * programatically better than by configuration, which can lead to more problems. * * @param dto//from w w w. j a va2 s.c om */ public static void evictFullObject(Object dto) { Session s = currentSession(); s.evict(dto); try { Map props = BeanUtilsBean.getInstance().getPropertyUtils().describe(dto); Collection vals = props.values(); for (Object val : vals) { if (val instanceof ITransferObject) { s.evict(val); } } } catch (Exception e) { log.error("evictFullObject - exception", e); } }
From source file:com.bahadirakin.dao.impl.BaseHibernateDAO.java
License:Apache License
public void detach(T entity) { if (entity == null) { throw new IllegalArgumentException("Entity Must not be null"); }/*from w w w . j a v a 2 s . c om*/ try { Session session = this.getCurrentSession(); Transaction transaction = session.beginTransaction(); session.evict(entity); transaction.commit(); } catch (Exception e) { LOG.error("Error while detach Entity. M: " + e.getMessage() + " C: " + e.getCause(), e); } }
From source file:com.db4o.drs.hibernate.impl.ObjectLifeCycleEventsListenerImpl.java
License:Open Source License
private void deleteObjectRef(ObjectReference ref) { Session s = getSession(); try {//from w ww . jav a2 s . co m PreparedStatement ps = s.connection().prepareStatement(DELETE_SQL); ps.setLong(1, ref.getUuid().getCreated()); ps.setLong(2, ref.getUuid().getProvider().getId()); int affected = ps.executeUpdate(); if (affected != 1) throw new RuntimeException("can't delete the ObjectRef " + ref); } catch (SQLException e) { throw new RuntimeException(e); } s.evict(ref); }
From source file:com.examples.hql.FirstLevelCacheExample.java
public static void main(String[] args) { SessionFactory sf = HibernateUtil.getSessionFactory(); Session session = sf.openSession(); //Transaction tx = session.beginTransaction(); try {/*from ww w. ja v a 2s. c o m*/ PurchaseOrderHeader po = (PurchaseOrderHeader) session.get(PurchaseOrderHeader.class, 2); System.out.println(po.getPoheaderId() + " | " + po.getPonumber() + " | " + po.getPodate() + " | " + po.getOrderValue()); System.out.println("----------***********-------------"); Session session1 = sf.openSession(); PurchaseOrderHeader po1 = (PurchaseOrderHeader) session1.get(PurchaseOrderHeader.class, 2); System.out.println(po1.getPoheaderId() + " | " + po1.getPonumber() + " | " + po1.getPodate() + " | " + po1.getOrderValue()); System.out.println("----------***********-------------"); // Query q=session.createQuery("update PurchaseOrderHeader set orderValue=:value where poheaderid=:id" ); // q.setParameter("value", new BigDecimal(1020124)); // q.setParameter("id", 2); // // q.executeUpdate(); // // session.beginTransaction().commit(); Session session2 = sf.openSession(); PurchaseOrderHeader po2 = (PurchaseOrderHeader) session2.get(PurchaseOrderHeader.class, 2); System.out.println(po2.getPoheaderId() + " | " + po2.getPonumber() + " | " + po2.getPodate() + " | " + po2.getOrderValue()); System.out.println("----------***********-------------"); // tx.commit(); session.evict(po); session1.evict(po1); session2.evict(po2); session.close(); session1.close(); session2.close(); sf.close(); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.globalsight.everest.page.pageexport.ExportEventObserverLocal.java
License:Apache License
/** * Updates the exporting page in the specified Export Batch. *//*from w w w. j ava2 s .c o m*/ private boolean updateExportingPage(long p_batchId, String p_pageId, HttpServletRequest p_request) throws ExportEventObserverException { ExportingPage page = null; Session session = null; Transaction transaction = null; try { s_logger.debug("Looking up export batch id: " + p_batchId + ", pageId: " + p_pageId); String responseType = p_request.getParameter(ExportConstants.RESPONSE_TYPE); String newState = responseType.equals(ExportConstants.SUCCESS) ? ExportingPage.EXPORTED : ExportingPage.EXPORT_FAIL; long endTime = Long.parseLong(p_request.getParameter(ExportConstants.EXPORTED_TIME)); String details = p_request.getParameter(ExportConstants.RESPONSE_DETAILS); String exportPath = p_request.getParameter(ExportConstants.ABSOLUTE_EXPORT_PATH); String isComponentPage = p_request.getParameter(ExportConstants.IS_COMPONENT_PAGE); page = getExportingPageById(p_batchId, Long.parseLong(p_pageId), false); if (page != null) { session = HibernateUtil.getSession(); transaction = session.beginTransaction(); session.evict(page); page = (ExportingPage) session.get(ExportingPage.class, page.getIdAsLong()); page.setEndTime(endTime); page.setErrorMessage(details); page.setState(newState); page.setExportPath(exportPath); char isComp = (isComponentPage == null || isComponentPage.equalsIgnoreCase("false")) ? 'N' : 'Y'; page.setComponentPage(isComp); session.saveOrUpdate(page); transaction.commit(); s_logger.debug("updateExportingPage: ExportBatchId " + p_batchId + " was found and the results were recorded " + "for pageId " + p_pageId); return true; } else { s_logger.warn("updateExportingPage: PageId " + p_pageId + " in ExportBatch " + p_batchId + " was NOT found. Either the BatchId or the PageId are not valid."); return false; } } catch (Exception ex) { if (transaction != null) { transaction.rollback(); } // just warn, but do not throw an exception s_logger.warn("updateExportingPage: PageId " + p_pageId + "in ExportBatch " + p_batchId + " was NOT found. Either the BatchId or the PageId are not valid."); } finally { if (session != null) { // session.close(); } } return false; }
From source file:com.globalsight.everest.workflowmanager.WorkflowManagerLocal.java
License:Apache License
public void dispatch(Job p_job) throws RemoteException, WorkflowManagerException { JobImpl jobClone = null;// w ww . j av a 2s .co m Session session = HibernateUtil.getSession(); Transaction transaction = null; JbpmContext ctx = null; try { transaction = HibernateUtil.getTransaction(); jobClone = (JobImpl) session.get(JobImpl.class, new Long(p_job.getId())); if (jobClone != null) { // refresh job object in the session session.evict(jobClone); jobClone = (JobImpl) session.get(JobImpl.class, new Long(p_job.getId())); } Iterator it = jobClone.getWorkflows().iterator(); // a Map containing task id as key and workflow as value. // This is used for possible creation of STF. HashMap<Long, Workflow> map = new HashMap<Long, Workflow>(1); HashMap<Long, String> etfMap = new HashMap<Long, String>(1); Date startDate = new Date(); ExecutorService pool = Executors.newFixedThreadPool(MAX_THREAD); while (it.hasNext()) { Workflow wf = (Workflow) it.next(); if (WF_READY.equals(wf.getState()) || Workflow.PENDING.equals(wf.getState())) { Workflow wfClone = (Workflow) session.get(WorkflowImpl.class, wf.getIdAsLong()); TaskEmailInfo emailInfo = createTaskEmailInfo(jobClone, wfClone); ArrayList returnValue = dispatchWorkflow(wfClone, session, startDate, emailInfo); long taskId = ((Long) returnValue.get(0)).longValue(); if (taskId != -1) { Object actionType = returnValue.get(3); if (actionType != null) { etfMap.put(taskId, (String) actionType); } Task task = (Task) wfClone.getTasks().get(taskId); long jobId = task.getJobId(); L10nProfile l10nProfile = ServerProxy.getJobHandler().getL10nProfileByJobId(jobId); long wfStatePostId = l10nProfile.getWfStatePostId(); if (wfStatePostId != -1) { WfStatePostThread myTask = new WfStatePostThread(task, null, true); pool.execute(myTask); } // For sla issue if (wfClone.isEstimatedTranslateCompletionDateOverrided()) { updateEstimatedTranslateCompletionDate(wfClone.getId(), wfClone.getEstimatedTranslateCompletionDate()); } // prepare the map for possible creation of secondary // target // files if (((Boolean) returnValue.get(1)).booleanValue()) { map.put(new Long(taskId), wfClone); } } session.saveOrUpdate(wfClone); } } pool.shutdown(); jobClone.setState(WF_DISPATCHED); updatePageState(session, jobClone.getSourcePages(), PG_ACTIVE_JOB); session.saveOrUpdate(jobClone); HibernateUtil.commit(transaction); String pmId = p_job.getL10nProfile().getProject().getProjectManagerId(); if (map.size() > 0) { Object[] keys = map.keySet().toArray(); for (int i = 0; i < keys.length; i++) { Long stfTaskId = (Long) keys[i]; Workflow wf = map.get(stfTaskId); exportForStfCreation(stfTaskId, wf, pmId); } } // GBS-3002 if (etfMap.size() > 0) { Object[] keys = etfMap.keySet().toArray(); for (int i = 0; i < keys.length; i++) { Long taskId = (Long) keys[i]; String actionType = etfMap.get(taskId); SystemActionPerformer.perform(actionType, taskId, pmId); } } } catch (Exception e2) { HibernateUtil.rollback(transaction); s_logger.error("Failed to dispatch: " + p_job.getJobName(), e2); String[] args = new String[1]; args[0] = new Long(p_job.getId()).toString(); throw new WorkflowManagerException(WorkflowManagerException.MSG_FAILED_TO_DISPATCH_WORKFLOW, args, e2, WorkflowManagerException.PROPERTY_FILE_NAME); } finally { if (ctx != null) { ctx.close(); } } runJobCreationScript(p_job); }
From source file:com.hazelcast.hibernate.HibernateStatisticsTestSupport.java
License:Open Source License
protected ArrayList<DummyEntity> getDummyEntities(SessionFactory sf, long untilId) { Session session = sf.openSession(); ArrayList<DummyEntity> entities = new ArrayList<DummyEntity>(); for (long i = 0; i < untilId; i++) { DummyEntity entity = (DummyEntity) session.get(DummyEntity.class, i); if (entity != null) { session.evict(entity); entities.add(entity);// w w w . ja v a 2 s. c om } } session.close(); return entities; }