List of usage examples for javax.persistence EntityManager persist
public void persist(Object entity);
From source file:com.hiperf.common.ui.server.storage.impl.PersistenceHelper.java
@Override public String replaceFile(String fileClass, String fileNameField, String fileStorageField, String fileName, FileItem fileItem, String existingId) throws PersistenceException { String idField = getIdFieldFromFileStorageClass(fileClass); boolean error = false; TransactionContext tc = null;// w w w. j a v a2 s .c o m Object id = null; PropertyDescriptor[] pds = propertyDescriptorsByClassName.get(fileClass); try { id = getFileId(existingId, idField, pds); tc = createTransactionalContext(); EntityManager em = tc.getEm(); ITransaction tx = tc.getTx(); tx.begin(); Object o; Class<?> clazz = Class.forName(fileClass); o = em.find(clazz, id); for (PropertyDescriptor pd : pds) { if (fileNameField.equals(pd.getName())) { pd.getWriteMethod().invoke(o, fileName); } else if (fileStorageField.equals(pd.getName())) { pd.getWriteMethod().invoke(o, fileItem.get()); } } em.merge(o); tx.commit(); //idsByClassName.get(fileClass).iterator().next().getReadMethod().invoke(o, new Object[0]); } catch (Exception e) { error = true; logger.log(Level.SEVERE, "Exception in saveFile : " + e.getMessage(), e); try { if (tc != null) tc.rollback(); } catch (Exception ee) { } } finally { if (tc != null) { if (!error) close(tc); else tc.close(); } } if (error) { try { tc = createTransactionalContext(); EntityManager em = tc.getEm(); ITransaction tx = tc.getTx(); tx.begin(); Class<?> clazz = Class.forName(fileClass); em.createQuery("delete from " + fileClass + " o where o." + idField + " = :id") .setParameter("id", id).executeUpdate(); Object newDoc = clazz.newInstance(); for (PropertyDescriptor pd : pds) { if (fileNameField.equals(pd.getName())) { pd.getWriteMethod().invoke(newDoc, fileName); } else if (fileStorageField.equals(pd.getName())) { pd.getWriteMethod().invoke(newDoc, fileItem.get()); } } em.persist(newDoc); tx.commit(); for (PropertyDescriptor pd : pds) { if (idField.equals(pd.getName())) { existingId = pd.getReadMethod().invoke(newDoc, new String[0]).toString(); break; } } } catch (Exception e) { logger.log(Level.SEVERE, "Exception in saveFile : " + e.getMessage(), e); try { if (tc != null) tc.rollback(); } catch (Exception ee) { } throw new PersistenceException(e.getMessage(), e); } finally { if (tc != null) close(tc); } } return existingId; }
From source file:org.opencastproject.serviceregistry.impl.ServiceRegistryJpaImpl.java
/** * Creates a job on a remote host./*from ww w . j a va 2 s . co m*/ */ public Job createJob(String host, String serviceType, String operation, List<String> arguments, String payload, boolean dispatchable, Job parentJob) throws ServiceRegistryException { if (StringUtils.isBlank(host)) { throw new IllegalArgumentException("Host can't be null"); } if (StringUtils.isBlank(serviceType)) { throw new IllegalArgumentException("Service type can't be null"); } if (StringUtils.isBlank(operation)) { throw new IllegalArgumentException("Operation can't be null"); } User currentUser = securityService.getUser(); Organization currentOrganization = securityService.getOrganization(); EntityManager em = null; EntityTransaction tx = null; try { em = emf.createEntityManager(); tx = em.getTransaction(); tx.begin(); ServiceRegistrationJpaImpl creatingService = getServiceRegistration(em, serviceType, host); if (creatingService == null) { throw new ServiceRegistryException( "No service registration exists for type '" + serviceType + "' on host '" + host + "'"); } if (creatingService.getHostRegistration().isMaintenanceMode()) { logger.warn("Creating a job from {}, which is currently in maintenance mode.", creatingService.getHost()); } else if (!creatingService.getHostRegistration().isActive()) { logger.warn("Creating a job from {}, which is currently inactive.", creatingService.getHost()); } JobJpaImpl job = new JobJpaImpl(currentUser, currentOrganization, creatingService, operation, arguments, payload, dispatchable); // Bind the given parent job to the new job if (parentJob != null) { // Get the JPA instance of the parent job JobJpaImpl jpaParentJob; if (parentJob instanceof JobJpaImpl) jpaParentJob = (JobJpaImpl) parentJob; else { try { jpaParentJob = (JobJpaImpl) getJob(parentJob.getId()); } catch (NotFoundException e) { logger.error("{} not found in the persitence context", parentJob); throw new ServiceRegistryException(e); } } job.setParentJob(jpaParentJob); // Get the JPA instance of the root job JobJpaImpl jpaRootJob; if (parentJob.getRootJobId() == -1L) { jpaRootJob = jpaParentJob; } else { try { jpaRootJob = (JobJpaImpl) getJob(parentJob.getRootJobId()); } catch (NotFoundException e) { logger.error("job with id {} not found in the persitence context", parentJob.getRootJobId()); throw new ServiceRegistryException(e); } } job.setRootJob(jpaRootJob); } // if this job is not dispatchable, it must be handled by the host that has created it if (dispatchable) { job.setStatus(Status.QUEUED); } else { job.setProcessingHost(creatingService.getHost()); } em.persist(job); tx.commit(); setJobUri(job); return job; } catch (RollbackException e) { if (tx != null && tx.isActive()) { tx.rollback(); } throw e; } finally { if (em != null) em.close(); } }
From source file:mil.navy.med.dzreg.dao.RegistriesManagerDAO.java
/** * Register a new registry profile.//from www . j av a 2 s . co m * @param profile * @return * @throws Exception */ public AckType register(PersonRegistryProfileType profile) { EntityManager em = null; PersistentServiceFactory psf = null; StringBuffer exceptionMsg = new StringBuffer(); AckType ack = new AckType(); ack.setResponseCode(_APPLICATION_ERROR); if (profile != null && profile.getPerson() != null && profile.getRegistry() != null && !profile.getRegistry().isEmpty() && profile.getDataSource() != null) { //------------------------------------------------------------------------ // Registry type must be valid. //------------------------------------------------------------------------ Map<Integer, DzType> toBeRegisterDzTypes = Collections.synchronizedMap(new HashMap<Integer, DzType>()); for (RegistryType r : profile.getRegistry()) { try { DzType dzType = this.validRegistryType(r); if (dzType != null) { toBeRegisterDzTypes.put(Integer.valueOf(dzType.getDztypeId()), dzType); } else { exceptionMsg.append("Invalid/Unknown registy type specified - " + r.getRegistryId() + ";"); } } catch (Exception ex) { exceptionMsg.append(ex.getMessage() + ";"); } } //---------------------------------------------------------------------- // Person info must have following elements: // 1. Identifer // 2. Name // 3. Date of birth // 4. Data Source //---------------------------------------------------------------------- PersonType person = profile.getPerson(); if (person.getName() == null || person.getName().isEmpty() || (person.getDataSource() == null && profile.getDataSource() == null)) { ack.setDetectedIssueText("Missing required metadata (person identifier or name or data source);"); return ack; } //------------------------------------------------------------------------ // Check to see if this person already registered. //------------------------------------------------------------------------ DzPatients registeredPatient = null; try { registeredPatient = validPerson(person); } catch (javax.persistence.NoResultException nre) { } catch (Exception ex) { ack.setDetectedIssueText("Failed to register patient - " + ex.getMessage()); return ack; } try { psf = PersistentServiceFactory.getInstance(REGISTRY_MANAGER_PU); em = psf.getEntityManager(); em.getTransaction().begin(); // Get the date today using Calendar object. Calendar cal = Calendar.getInstance(); Timestamp today = new Timestamp(cal.getTimeInMillis()); //---------------------------------------------------------------------- // If yes, only need to add a record to table DZ_REG for each new // registry type. //---------------------------------------------------------------------- if (registeredPatient != null) { // remove any registry type (from request) that is already assigned to // this person Collection<DzReg> registries = registeredPatient.getDzRegCollection(); for (DzReg r : registries) { Integer intDzTypeId = Integer.valueOf(r.getDzType().getDztypeId()); if (toBeRegisterDzTypes.containsKey(intDzTypeId)) { toBeRegisterDzTypes.remove(intDzTypeId); log.debug("Already registered in Registry " + intDzTypeId + "!"); exceptionMsg.append("Already registered in Registry " + intDzTypeId + ";"); } } // what we have left is new registry type to be add to person registry // profile Collection<DzType> toBeRegisterColl = toBeRegisterDzTypes.values(); for (DzType d : toBeRegisterColl) { // only need to add a record to table DZ_REG DzRegPK pk = new DzRegPK(person.getId(), d.getDztypeId()); DzReg newDzreg = new DzReg(); newDzreg.setDzRegPK(pk); newDzreg.setActive(_ACTIVE); newDzreg.setDataSource(person.getDataSource()); newDzreg.setRegisteredDt(today); newDzreg.setInsertedDt(today); em.persist(newDzreg); } } //---------------------------------------------------------------------- // If no, need to insert a new record in DZ_PATIENTS table and a new // record in table DZ_REG for each new registry type. //---------------------------------------------------------------------- else { DzPatients newDzPatient = map(person); newDzPatient.setInsertedDt(today); newDzPatient.setUpdatedDt(today); if (person.getDataSource() == null) { if (profile.getDataSource() != null) { newDzPatient.setDataSource(profile.getDataSource()); } else { // cannot insert record throw new Exception("Missing required metadata (data source);"); } } else { newDzPatient.setDataSource(profile.getDataSource()); } Collection<DzType> dzTypes = toBeRegisterDzTypes.values(); Collection<DzReg> newDzregList = new ArrayList<DzReg>(dzTypes.size()); for (DzType dzType : dzTypes) { DzRegPK pk = new DzRegPK(person.getId(), dzType.getDztypeId()); DzReg newDzreg = new DzReg(); newDzreg.setDzRegPK(pk); newDzreg.setActive(_ACTIVE); newDzreg.setRegisteredDt(today); newDzreg.setInsertedDt(today); if (person.getDataSource() == null) { newDzreg.setDataSource(profile.getDataSource()); } else { newDzreg.setDataSource(person.getDataSource()); } newDzregList.add(newDzreg); } newDzPatient.setDzRegCollection(newDzregList); em.persist(newDzPatient); } em.getTransaction().commit(); ack.setResponseCode(_OK); ack.setDetectedIssueText(exceptionMsg.toString()); return ack; } catch (Exception ex) { ex.printStackTrace(); em.getTransaction().rollback(); log.error("Failed to create new records in table DZ_PATIENTS/DZ_REG for profile=" + profile); ack.setDetectedIssueText( "Failed to register patient " + profile.getPerson().getId() + "-" + ex.getMessage()); return ack; } finally { em.close(); } } else { ack.setDetectedIssueText("Invalid registry profile"); return ack; } }
From source file:org.broadleafcommerce.openadmin.server.service.persistence.module.provider.RuleFieldPersistenceProvider.java
protected boolean updateQuantityRule(EntityManager em, DataDTOToMVELTranslator translator, String entityKey, String fieldService, String jsonPropertyValue, Collection<QuantityBasedRule> criteriaList, Class<?> memberType, Object parent, String mappedBy, Property property) { boolean dirty = false; if (!StringUtils.isEmpty(jsonPropertyValue)) { //avoid lazy init exception on the criteria list for criteria created during an add criteriaList.size();/*from w ww . j a va 2s .com*/ DataWrapper dw = ruleFieldExtractionUtility.convertJsonToDataWrapper(jsonPropertyValue); if (dw != null && StringUtils.isEmpty(dw.getError())) { List<QuantityBasedRule> updatedRules = new ArrayList<QuantityBasedRule>(); for (DataDTO dto : dw.getData()) { if (dto.getId() != null && !CollectionUtils.isEmpty(criteriaList)) { checkId: { //updates are comprehensive, even data that was not changed //is submitted here //Update Existing Criteria for (QuantityBasedRule quantityBasedRule : criteriaList) { //make compatible with enterprise module Long id = sandBoxHelper.getOriginalId(quantityBasedRule); boolean isMatch = dto.getId().equals(id) || dto.getId().equals(quantityBasedRule.getId()); if (isMatch) { String mvel; //don't update if the data has not changed if (!quantityBasedRule.getQuantity().equals(dto.getQuantity())) { dirty = true; } try { mvel = ruleFieldExtractionUtility.convertDTOToMvelString(translator, entityKey, dto, fieldService); if (!quantityBasedRule.getMatchRule().equals(mvel)) { dirty = true; } } catch (MVELTranslationException e) { throw new RuntimeException(e); } if (!dirty && extensionManager != null) { ExtensionResultHolder<Boolean> resultHolder = new ExtensionResultHolder<Boolean>(); ExtensionResultStatusType result = extensionManager.getProxy() .establishDirtyState(quantityBasedRule, resultHolder); if (ExtensionResultStatusType.NOT_HANDLED != result && resultHolder.getResult() != null) { dirty = resultHolder.getResult(); } } if (dirty) { quantityBasedRule.setQuantity(dto.getQuantity()); quantityBasedRule.setMatchRule(mvel); quantityBasedRule = em.merge(quantityBasedRule); } updatedRules.add(quantityBasedRule); break checkId; } } throw new IllegalArgumentException("Unable to update the rule of type (" + memberType.getName() + ") because an update was requested for id (" + dto.getId() + "), which does not exist."); } } else { //Create a new Criteria QuantityBasedRule quantityBasedRule; try { quantityBasedRule = (QuantityBasedRule) memberType.newInstance(); quantityBasedRule.setQuantity(dto.getQuantity()); quantityBasedRule.setMatchRule(ruleFieldExtractionUtility .convertDTOToMvelString(translator, entityKey, dto, fieldService)); if (StringUtils.isEmpty(quantityBasedRule.getMatchRule()) && !StringUtils.isEmpty(dw.getRawMvel())) { quantityBasedRule.setMatchRule(dw.getRawMvel()); } PropertyUtils.setNestedProperty(quantityBasedRule, mappedBy, parent); } catch (Exception e) { throw new RuntimeException(e); } em.persist(quantityBasedRule); dto.setId(quantityBasedRule.getId()); if (extensionManager != null) { ExtensionResultHolder resultHolder = new ExtensionResultHolder(); extensionManager.getProxy().postAdd(quantityBasedRule, resultHolder); if (resultHolder.getResult() != null) { quantityBasedRule = (QuantityBasedRule) resultHolder.getResult(); } } updatedRules.add(quantityBasedRule); dirty = true; } } //if an item was not included in the comprehensive submit from the client, we can assume that the //listing was deleted, so we remove it here. Iterator<QuantityBasedRule> itr = criteriaList.iterator(); while (itr.hasNext()) { checkForRemove: { QuantityBasedRule original = itr.next(); for (QuantityBasedRule quantityBasedRule : updatedRules) { Long id = sandBoxHelper.getOriginalId(quantityBasedRule); boolean isMatch = original.getId().equals(id) || original.getId().equals(quantityBasedRule.getId()); if (isMatch) { break checkForRemove; } } em.remove(original); itr.remove(); dirty = true; } } ObjectMapper mapper = new ObjectMapper(); String json; try { json = mapper.writeValueAsString(dw); } catch (Exception e) { throw new RuntimeException(e); } property.setValue(json); } } return dirty; }
From source file:com.hiperf.common.ui.server.storage.impl.PersistenceHelper.java
private boolean doPersist(ObjectsToPersist toPersist, String userName, Map<com.hiperf.common.ui.shared.util.Id, INakedObject> res, Map<Object, IdHolder> newIdByOldId, EntityManager em, boolean validateBefore, Locale locale, boolean processExceptions) throws ClassNotFoundException, IntrospectionException, PersistenceException, IllegalAccessException, InvocationTargetException, InstantiationException { try {/*from w ww . ja va2 s . c o m*/ Validator validator = validatorFactory.getValidator(); List<INakedObject> toInsert = toPersist.getInsertedObjects(); if (toInsert != null) { int max = 100 * toInsert.size(); int idx = -1; int k = -1; int s = toInsert.size(); int prevSize = s; while (!toInsert.isEmpty()) { if (s == toInsert.size()) { k++; } else k = 0; if (k == 1) { logger.log(Level.FINE, "Impossible to persist data : one linked object not found in toInsert list"); return false; } if (prevSize == toInsert.size()) { idx++; } else { idx = 0; prevSize = toInsert.size(); } if (idx > max) { logger.log(Level.FINE, "Impossible to persist data : one linked object not found in toInsert list..."); return false; } Iterator<INakedObject> it = toInsert.iterator(); while (it.hasNext()) { INakedObject o = (INakedObject) it.next(); String className = o.getClass().getName(); if (o instanceof IAuditable) { IAuditable aud = (IAuditable) o; aud.setCreateUser(userName); aud.setCreateDate(new Date()); } Set<PropertyDescriptor> ids = idsByClassName.get(className); processLinkedCollectionsBeforePersist(o, collectionsByClassName.get(className)); if (!processLinkedObjectsBeforePersist(newIdByOldId, o, lazysByClassName.get(className), toPersist)) continue; if (!processLinkedObjectsBeforePersist(newIdByOldId, o, eagerObjectsByClassName.get(className), toPersist)) continue; if (generatedIdClasses.contains(className)) { PropertyDescriptor idPd = ids.iterator().next(); Object oldId = idPd.getReadMethod().invoke(o, StorageService.emptyArg); Object[] args = new Object[1]; if (!idPd.getPropertyType().isPrimitive()) args[0] = null; else args[0] = 0L; idPd.getWriteMethod().invoke(o, args); if (validateBefore) { Set<ConstraintViolation<INakedObject>> errors = validator.validate(o); if (errors != null && !errors.isEmpty()) { it.remove(); continue; } try { em.persist(o); } catch (Exception e) { it.remove(); continue; } } else em.persist(o); Object newId = idPd.getReadMethod().invoke(o, StorageService.emptyArg); newIdByOldId.put(oldId, new IdHolder(newId, className)); List<Object> idVals = new ArrayList<Object>(1); idVals.add(oldId); List<String> idFields = new ArrayList<String>(1); idFields.add(idPd.getName()); res.put(new com.hiperf.common.ui.shared.util.Id(idFields, idVals), o); it.remove(); } else { com.hiperf.common.ui.shared.util.Id id = getId(o, ids); int i = 0; boolean toProcess = true; for (Object idVal : id.getFieldValues()) { if ((idVal instanceof Long && ((Long) idVal).longValue() < 0) || (idVal instanceof String && ((String) idVal).startsWith(PersistenceManager.SEQ_PREFIX))) { IdHolder newIds = newIdByOldId.get(idVal); if (newIds != null) { String att = id.getFieldNames().get(i); for (PropertyDescriptor idPd : ids) { if (idPd.getName().equals(att)) { Object[] args = new Object[1]; args[0] = newIds.getId(); idPd.getWriteMethod().invoke(o, args); break; } } } else { toProcess = false; break; } } i++; } if (toProcess) { if (validateBefore) { Set<ConstraintViolation<INakedObject>> errors = validator.validate(o); if (errors != null && !errors.isEmpty()) { it.remove(); continue; } try { refreshManyToOneLinkedWithId(o, id, em); em.persist(o); } catch (Exception e) { it.remove(); continue; } } else { refreshManyToOneLinkedWithId(o, id, em); em.persist(o); } id = getId(o, ids); res.put(id, o); it.remove(); } } } } } Map<String, Set<com.hiperf.common.ui.shared.util.Id>> toDelete = toPersist .getRemovedObjectsIdsByClassName(); if (toDelete != null) { for (String className : toDelete.keySet()) { Set<com.hiperf.common.ui.shared.util.Id> ids = toDelete.get(className); Class<?> clazz = Class.forName(className); Map<Field, Field> toRemove = null; if (ids != null && !ids.isEmpty()) { com.hiperf.common.ui.shared.util.Id id = ids.iterator().next(); if (id.getFieldValues().size() > 1) { toRemove = new HashMap<Field, Field>(); Field[] fields = clazz.getDeclaredFields(); for (Field f : fields) { if (f.isAnnotationPresent(ManyToOne.class)) { Field[] ff = f.getType().getDeclaredFields(); for (Field lf : ff) { OneToMany ann = lf.getAnnotation(OneToMany.class); if (ann != null && ann.targetEntity() != null && ann.targetEntity().equals(clazz)) { toRemove.put(f, lf); } } } } // TODO : manage annotations on the getters... } } for (com.hiperf.common.ui.shared.util.Id id : ids) { INakedObject no = getObject(clazz, id, em); if (no != null) { if (toRemove != null && !toRemove.isEmpty()) { for (Entry<Field, Field> e : toRemove.entrySet()) { Field f = e.getKey(); Field ff = e.getValue(); boolean b1 = false; boolean b2 = false; if (!f.isAccessible()) { f.setAccessible(true); b1 = true; } if (!ff.isAccessible()) { ff.setAccessible(true); b2 = true; } ((Collection) ff.get(f.get(no))).remove(no); if (b1) f.setAccessible(false); if (b2) ff.setAccessible(false); } } else { // TODO : manage annotations on the getters... } em.remove(no); } } } } Map<String, Map<com.hiperf.common.ui.shared.util.Id, Map<String, Serializable>>> toUpdate = toPersist .getUpdatedObjects(); if (toUpdate != null) { for (String className : toUpdate.keySet()) { Map<com.hiperf.common.ui.shared.util.Id, Map<String, Serializable>> map = toUpdate .get(className); Class<?> clazz = Class.forName(className); Iterator<Entry<com.hiperf.common.ui.shared.util.Id, Map<String, Serializable>>> iterator = map .entrySet().iterator(); while (iterator.hasNext()) { Entry<com.hiperf.common.ui.shared.util.Id, Map<String, Serializable>> entry = iterator .next(); com.hiperf.common.ui.shared.util.Id id = entry.getKey(); INakedObject original = getObject(clazz, id, em); Map<String, Serializable> updateMap = entry.getValue(); for (String att : updateMap.keySet()) { Object object = updateMap.get(att); if (object != null && object instanceof NakedObjectHandler) { NakedObjectHandler oo = (NakedObjectHandler) object; com.hiperf.common.ui.shared.util.Id objId = oo.getId(); if (generatedIdClasses.contains(oo.getClassName()) && newIdByOldId.containsKey(objId.getFieldValues().get(0))) { IdHolder newIds = newIdByOldId.get(objId.getFieldValues().get(0)); List<Object> idVals = new ArrayList<Object>(1); idVals.add(newIds.getId()); List<String> idFields = new ArrayList<String>(1); idFields.add(idsByClassName.get(oo.getClassName()).iterator().next().getName()); com.hiperf.common.ui.shared.util.Id newObjId = new com.hiperf.common.ui.shared.util.Id( idFields, idVals); object = getObject(Class.forName(oo.getClassName()), newObjId, em); } else { object = getObject(Class.forName(oo.getClassName()), oo.getId(), em); } } updateAttributeValue(className, original, att, object); } if (original instanceof IAuditable) { IAuditable aud = (IAuditable) original; aud.setModifyUser(userName); aud.setModifyDate(new Date()); } INakedObject o = null; if (validateBefore) { Set<ConstraintViolation<INakedObject>> errors = validator.validate(original); if (errors != null && !errors.isEmpty()) { iterator.remove(); continue; } try { o = em.merge(original); em.flush(); } catch (Exception e) { iterator.remove(); continue; } } else o = em.merge(original); res.put(id, o); } } } processAddedManyToMany(toPersist, res, newIdByOldId, em); processRemovedManyToMany(toPersist, res, newIdByOldId, em); em.flush(); return true; } catch (Exception e) { logger.log(Level.WARNING, "Exception", e); if (processExceptions) { processDbExceptions(locale, e); return false; } else throw new PersistenceException(e); } }
From source file:org.apache.juddi.api.impl.UDDISubscriptionImpl.java
@SuppressWarnings("unchecked") public SubscriptionResultsList getSubscriptionResults(GetSubscriptionResults body, UddiEntityPublisher publisher) throws DispositionReportFaultMessage { long startTime = System.currentTimeMillis(); EntityManager em = PersistenceManager.getEntityManager(); EntityTransaction tx = em.getTransaction(); try {/*w ww. j a v a 2 s. com*/ tx.begin(); if (publisher == null) { publisher = this.getEntityPublisher(em, body.getAuthInfo()); new ValidateSubscription(publisher).validateGetSubscriptionResults(em, body); } org.apache.juddi.model.Subscription modelSubscription = em .find(org.apache.juddi.model.Subscription.class, body.getSubscriptionKey()); SubscriptionFilter subscriptionFilter = null; try { subscriptionFilter = (SubscriptionFilter) JAXBMarshaller.unmarshallFromString( modelSubscription.getSubscriptionFilter(), JAXBMarshaller.PACKAGE_SUBSCRIPTION); } catch (JAXBException e) { logger.error("JAXB Exception while unmarshalling subscription filter", e); throw new FatalErrorException(new ErrorMessage("errors.Unspecified")); } if (logger.isDebugEnabled()) logger.debug("filter=" + modelSubscription.getSubscriptionFilter()); SubscriptionResultsList result = new SubscriptionResultsList(); result.setChunkToken("0"); //chunkToken: Optional element used to retrieve subsequent groups of data when the first invocation of this API indicates more data is available. This occurs when a chunkToken is returned whose value is not "0" in the validValuesList structure described in the next section. To retrieve the next chunk of data, the chunkToken returned should be used as an argument to the next invocation of this API. result.setCoveragePeriod(body.getCoveragePeriod()); // The subscription structure is required output for the results org.uddi.sub_v3.Subscription apiSubscription = new org.uddi.sub_v3.Subscription(); MappingModelToApi.mapSubscription(modelSubscription, apiSubscription); result.setSubscription(apiSubscription); Date startPointDate = new Date( body.getCoveragePeriod().getStartPoint().toGregorianCalendar().getTimeInMillis()); Date endPointDate = new Date( body.getCoveragePeriod().getEndPoint().toGregorianCalendar().getTimeInMillis()); Integer chunkData = null; if (body.getChunkToken() != null && body.getChunkToken().length() > 0) { SubscriptionChunkToken chunkToken = em.find(SubscriptionChunkToken.class, body.getChunkToken()); if (chunkToken == null) throw new InvalidValueException(new ErrorMessage( "errors.getsubscriptionresult.InvalidChunkToken", body.getChunkToken())); if (!chunkToken.getSubscriptionKey().equals(chunkToken.getSubscriptionKey())) throw new InvalidValueException(new ErrorMessage( "errors.getsubscriptionresult.NonMatchingChunkToken", body.getChunkToken())); if (chunkToken.getStartPoint() != null && chunkToken.getStartPoint().getTime() != startPointDate.getTime()) throw new InvalidValueException(new ErrorMessage( "errors.getsubscriptionresult.NonMatchingChunkToken", body.getChunkToken())); if (chunkToken.getEndPoint() != null && chunkToken.getEndPoint().getTime() != endPointDate.getTime()) throw new InvalidValueException(new ErrorMessage( "errors.getsubscriptionresult.NonMatchingChunkToken", body.getChunkToken())); if (chunkToken.getExpiresAfter().before(new Date())) throw new InvalidValueException(new ErrorMessage( "errors.getsubscriptionresult.ExpiredChunkToken", body.getChunkToken())); chunkData = chunkToken.getData(); // We've got the data from the chunk token, now it is no longer needed (once it's called, it's used up) em.remove(chunkToken); } if (subscriptionFilter.getFindBinding() != null) { //Get the current matching keys List<?> currentMatchingKeys = getSubscriptionMatches(subscriptionFilter, em); // See if there's any missing keys by comparing against the previous matches. If so, they missing keys are added to the KeyBag and // then added to the result List<String> missingKeys = getMissingKeys(currentMatchingKeys, modelSubscription.getSubscriptionMatches()); if (missingKeys != null && missingKeys.size() > 0) { KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); for (String key : missingKeys) missingKeyBag.getBindingKey().add(key); result.getKeyBag().add(missingKeyBag); } // Re-setting the subscription matches to the new matching key collection //modelSubscription.getSubscriptionMatches().clear(); //for (Object key : currentMatchingKeys) { // SubscriptionMatch subMatch = new SubscriptionMatch(modelSubscription, (String)key); // modelSubscription.getSubscriptionMatches().add(subMatch); //} // Now, finding the necessary entities, within the coverage period limits if (modelSubscription.isBrief()) { KeyBag resultsKeyBag = new KeyBag(); for (String key : (List<String>) currentMatchingKeys) resultsKeyBag.getBindingKey().add(key); result.getKeyBag().add(resultsKeyBag); } else { FindBinding fb = subscriptionFilter.getFindBinding(); org.apache.juddi.query.util.FindQualifiers findQualifiers = new org.apache.juddi.query.util.FindQualifiers(); findQualifiers.mapApiFindQualifiers(fb.getFindQualifiers()); // To do subscription "chunking", the listHead and maxRows are nulled which will set them to system default. User settings for // these values don't make sense with the "chunking" feature. fb.setListHead(null); fb.setMaxRows(null); // Setting the start index to the chunkData Holder<Integer> subscriptionStartIndex = new Holder<Integer>(chunkData); BindingDetail bindingDetail = InquiryHelper.getBindingDetailFromKeys(fb, findQualifiers, em, currentMatchingKeys, startPointDate, endPointDate, subscriptionStartIndex, modelSubscription.getMaxEntities()); // Upon exiting above function, if more results are to be had, the subscriptionStartIndex will contain the latest value (or null // if no more results) chunkData = subscriptionStartIndex.value; result.setBindingDetail(bindingDetail); } } if (subscriptionFilter.getFindBusiness() != null) { //Get the current matching keys List<?> currentMatchingKeys = getSubscriptionMatches(subscriptionFilter, em); List<String> missingKeys = getMissingKeys(currentMatchingKeys, modelSubscription.getSubscriptionMatches()); if (missingKeys != null && missingKeys.size() > 0) { KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); for (String key : missingKeys) missingKeyBag.getBusinessKey().add(key); result.getKeyBag().add(missingKeyBag); } // Re-setting the subscription matches to the new matching key collection //modelSubscription.getSubscriptionMatches().clear(); //for (Object key : currentMatchingKeys) { // SubscriptionMatch subMatch = new SubscriptionMatch(modelSubscription, (String)key); // modelSubscription.getSubscriptionMatches().add(subMatch); //} // Now, finding the necessary entities, within the coverage period limits if (modelSubscription.isBrief()) { KeyBag resultsKeyBag = new KeyBag(); for (String key : (List<String>) currentMatchingKeys) resultsKeyBag.getBusinessKey().add(key); result.getKeyBag().add(resultsKeyBag); } else { FindBusiness fb = subscriptionFilter.getFindBusiness(); org.apache.juddi.query.util.FindQualifiers findQualifiers = new org.apache.juddi.query.util.FindQualifiers(); findQualifiers.mapApiFindQualifiers(fb.getFindQualifiers()); // To do subscription "chunking", the listHead and maxRows are nulled which will set them to system default. User settings for // these values don't make sense with the "chunking" feature. fb.setListHead(null); fb.setMaxRows(null); // Setting the start index to the chunkData Holder<Integer> subscriptionStartIndex = new Holder<Integer>(chunkData); BusinessList businessList = InquiryHelper.getBusinessListFromKeys(fb, findQualifiers, em, currentMatchingKeys, startPointDate, endPointDate, subscriptionStartIndex, modelSubscription.getMaxEntities()); // Upon exiting above function, if more results are to be had, the subscriptionStartIndex will contain the latest value (or null // if no more results) chunkData = subscriptionStartIndex.value; result.setBusinessList(businessList); } } if (subscriptionFilter.getFindService() != null) { //Get the current matching keys List<?> currentMatchingKeys = getSubscriptionMatches(subscriptionFilter, em); if (logger.isDebugEnabled()) logger.debug("current matching keys=" + currentMatchingKeys); List<String> missingKeys = getMissingKeys(currentMatchingKeys, modelSubscription.getSubscriptionMatches()); if (missingKeys != null && missingKeys.size() > 0) { KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); for (String key : missingKeys) missingKeyBag.getServiceKey().add(key); result.getKeyBag().add(missingKeyBag); } // Re-setting the subscription matches to the new matching key collection //modelSubscription.getSubscriptionMatches().clear(); //for (Object key : currentMatchingKeys) { // SubscriptionMatch subMatch = new SubscriptionMatch(modelSubscription, (String)key); // modelSubscription.getSubscriptionMatches().add(subMatch); //} // Now, finding the necessary entities, within the coverage period limits if (modelSubscription.isBrief()) { KeyBag resultsKeyBag = new KeyBag(); for (String key : (List<String>) currentMatchingKeys) resultsKeyBag.getServiceKey().add(key); result.getKeyBag().add(resultsKeyBag); } else { FindService fs = subscriptionFilter.getFindService(); org.apache.juddi.query.util.FindQualifiers findQualifiers = new org.apache.juddi.query.util.FindQualifiers(); findQualifiers.mapApiFindQualifiers(fs.getFindQualifiers()); // To do subscription "chunking", the listHead and maxRows are nulled which will set them to system default. User settings for // these values don't make sense with the "chunking" feature. fs.setListHead(null); fs.setMaxRows(null); // Setting the start index to the chunkData Holder<Integer> subscriptionStartIndex = new Holder<Integer>(chunkData); ServiceList serviceList = InquiryHelper.getServiceListFromKeys(fs, findQualifiers, em, currentMatchingKeys, startPointDate, endPointDate, subscriptionStartIndex, modelSubscription.getMaxEntities()); if (serviceList.getServiceInfos() == null || serviceList.getServiceInfos().getServiceInfo().size() == 0) { serviceList = null; } // Upon exiting above function, if more results are to be had, the subscriptionStartIndex will contain the latest value (or null // if no more results) chunkData = subscriptionStartIndex.value; result.setServiceList(serviceList); } } if (subscriptionFilter.getFindTModel() != null) { //Get the current matching keys List<?> currentMatchingKeys = getSubscriptionMatches(subscriptionFilter, em); List<String> missingKeys = getMissingKeys(currentMatchingKeys, modelSubscription.getSubscriptionMatches()); if (missingKeys != null && missingKeys.size() > 0) { KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); for (String key : missingKeys) missingKeyBag.getTModelKey().add(key); result.getKeyBag().add(missingKeyBag); } // Re-setting the subscription matches to the new matching key collection //modelSubscription.getSubscriptionMatches().clear(); //for (Object key : currentMatchingKeys) { // SubscriptionMatch subMatch = new SubscriptionMatch(modelSubscription, (String)key); // modelSubscription.getSubscriptionMatches().add(subMatch); //} // Now, finding the necessary entities, within the coverage period limits if (modelSubscription.isBrief()) { KeyBag resultsKeyBag = new KeyBag(); for (String key : (List<String>) currentMatchingKeys) resultsKeyBag.getTModelKey().add(key); result.getKeyBag().add(resultsKeyBag); } else { FindTModel ft = subscriptionFilter.getFindTModel(); org.apache.juddi.query.util.FindQualifiers findQualifiers = new org.apache.juddi.query.util.FindQualifiers(); findQualifiers.mapApiFindQualifiers(ft.getFindQualifiers()); // To do subscription "chunking", the listHead and maxRows are nulled which will set them to system default. User settings for // these values don't make sense with the "chunking" feature. ft.setListHead(null); ft.setMaxRows(null); // Setting the start index to the chunkData Holder<Integer> subscriptionStartIndex = new Holder<Integer>(chunkData); // If more results are to be had, chunkData will come out with a value and a new token will be generated below. Otherwise, it will // be null and no token will be generated. TModelList tmodelList = InquiryHelper.getTModelListFromKeys(ft, findQualifiers, em, currentMatchingKeys, startPointDate, endPointDate, subscriptionStartIndex, modelSubscription.getMaxEntities()); // Upon exiting above function, if more results are to be had, the subscriptionStartIndex will contain the latest value (or null // if no more results) chunkData = subscriptionStartIndex.value; result.setTModelList(tmodelList); } } if (subscriptionFilter.getFindRelatedBusinesses() != null) { FindRelatedBusinesses findRelatedBusiness = subscriptionFilter.getFindRelatedBusinesses(); RelatedBusinessesList relatedBusinessList = InquiryHelper .getRelatedBusinessesList(findRelatedBusiness, em, startPointDate, endPointDate); result.setRelatedBusinessesList(relatedBusinessList); } if (subscriptionFilter.getGetBindingDetail() != null) { GetBindingDetail getDetail = subscriptionFilter.getGetBindingDetail(); // Running through the key list here to determine the deleted keys and store the existing entities. KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); List<org.apache.juddi.model.BindingTemplate> existingList = new ArrayList<org.apache.juddi.model.BindingTemplate>( 0); for (String key : getDetail.getBindingKey()) { org.apache.juddi.model.BindingTemplate modelBindingTemplate = em .find(org.apache.juddi.model.BindingTemplate.class, key); if (modelBindingTemplate != null) existingList.add(modelBindingTemplate); else missingKeyBag.getBindingKey().add(key); } // Store deleted keys in the results if (missingKeyBag.getBindingKey() != null && missingKeyBag.getBindingKey().size() > 0) result.getKeyBag().add(missingKeyBag); KeyBag resultsKeyBag = new KeyBag(); BindingDetail bindingDetail = new BindingDetail(); // Set the currentIndex to 0 or the value of the chunkData int currentIndex = 0; if (chunkData != null) currentIndex = chunkData; int returnedRowCount = 0; while (currentIndex < existingList.size()) { org.apache.juddi.model.BindingTemplate modelBindingTemplate = existingList.get(currentIndex); if (startPointDate.after(modelBindingTemplate.getModifiedIncludingChildren())) { currentIndex++; continue; } if (endPointDate.before(modelBindingTemplate.getModifiedIncludingChildren())) { currentIndex++; continue; } if (modelSubscription.isBrief()) { resultsKeyBag.getBindingKey().add(modelBindingTemplate.getEntityKey()); } else { org.uddi.api_v3.BindingTemplate apiBindingTemplate = new org.uddi.api_v3.BindingTemplate(); MappingModelToApi.mapBindingTemplate(modelBindingTemplate, apiBindingTemplate); bindingDetail.getBindingTemplate().add(apiBindingTemplate); returnedRowCount++; } // If the returned rows equals the max allowed, we can end the loop. if (modelSubscription.getMaxEntities() != null) { if (returnedRowCount == modelSubscription.getMaxEntities()) break; } currentIndex++; } // If the loop was broken prematurely (max row count hit) we set the chunk data to the next index to start with. // A non-null value of chunk data will cause a chunk token to be generated. if (currentIndex < (existingList.size() - 1)) chunkData = currentIndex + 1; else chunkData = null; if (modelSubscription.isBrief()) result.getKeyBag().add(resultsKeyBag); else result.setBindingDetail(bindingDetail); } if (subscriptionFilter.getGetBusinessDetail() != null) { GetBusinessDetail getDetail = subscriptionFilter.getGetBusinessDetail(); // Running through the key list here to determine the deleted keys and store the existing entities. KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); List<org.apache.juddi.model.BusinessEntity> existingList = new ArrayList<org.apache.juddi.model.BusinessEntity>( 0); for (String key : getDetail.getBusinessKey()) { org.apache.juddi.model.BusinessEntity modelBusinessEntity = em .find(org.apache.juddi.model.BusinessEntity.class, key); if (modelBusinessEntity != null) existingList.add(modelBusinessEntity); else missingKeyBag.getBusinessKey().add(key); } // Store deleted keys in the results if (missingKeyBag.getBusinessKey() != null && missingKeyBag.getBusinessKey().size() > 0) result.getKeyBag().add(missingKeyBag); KeyBag resultsKeyBag = new KeyBag(); BusinessDetail businessDetail = new BusinessDetail(); // Set the currentIndex to 0 or the value of the chunkData int currentIndex = 0; if (chunkData != null) currentIndex = chunkData; int returnedRowCount = 0; while (currentIndex < existingList.size()) { org.apache.juddi.model.BusinessEntity modelBusinessEntity = existingList.get(currentIndex); if (startPointDate.after(modelBusinessEntity.getModifiedIncludingChildren())) { currentIndex++; continue; } if (endPointDate.before(modelBusinessEntity.getModifiedIncludingChildren())) { currentIndex++; continue; } if (modelSubscription.isBrief()) { resultsKeyBag.getBusinessKey().add(modelBusinessEntity.getEntityKey()); } else { org.uddi.api_v3.BusinessEntity apiBusinessEntity = new org.uddi.api_v3.BusinessEntity(); MappingModelToApi.mapBusinessEntity(modelBusinessEntity, apiBusinessEntity); businessDetail.getBusinessEntity().add(apiBusinessEntity); returnedRowCount++; } // If the returned rows equals the max allowed, we can end the loop. if (modelSubscription.getMaxEntities() != null) { if (returnedRowCount == modelSubscription.getMaxEntities()) break; } currentIndex++; } // If the loop was broken prematurely (max row count hit) we set the chunk data to the next index to start with. // A non-null value of chunk data will cause a chunk token to be generated. if (currentIndex < (existingList.size() - 1)) chunkData = currentIndex + 1; else chunkData = null; if (modelSubscription.isBrief()) result.getKeyBag().add(resultsKeyBag); else result.setBusinessDetail(businessDetail); } if (subscriptionFilter.getGetServiceDetail() != null) { GetServiceDetail getDetail = subscriptionFilter.getGetServiceDetail(); // Running through the key list here to determine the deleted keys and store the existing entities. KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); List<org.apache.juddi.model.BusinessService> existingList = new ArrayList<org.apache.juddi.model.BusinessService>( 0); for (String key : getDetail.getServiceKey()) { org.apache.juddi.model.BusinessService modelBusinessService = em .find(org.apache.juddi.model.BusinessService.class, key); if (modelBusinessService != null) existingList.add(modelBusinessService); else missingKeyBag.getBusinessKey().add(key); } // Store deleted keys in the results if (missingKeyBag.getServiceKey() != null && missingKeyBag.getServiceKey().size() > 0) result.getKeyBag().add(missingKeyBag); KeyBag resultsKeyBag = new KeyBag(); ServiceDetail serviceDetail = new ServiceDetail(); // Set the currentIndex to 0 or the value of the chunkData int currentIndex = 0; if (chunkData != null) currentIndex = chunkData; int returnedRowCount = 0; while (currentIndex < existingList.size()) { org.apache.juddi.model.BusinessService modelBusinessService = existingList.get(currentIndex); if (startPointDate.after(modelBusinessService.getModifiedIncludingChildren())) { currentIndex++; continue; } if (endPointDate.before(modelBusinessService.getModifiedIncludingChildren())) { currentIndex++; continue; } if (modelSubscription.isBrief()) { resultsKeyBag.getServiceKey().add(modelBusinessService.getEntityKey()); } else { org.uddi.api_v3.BusinessService apiBusinessService = new org.uddi.api_v3.BusinessService(); MappingModelToApi.mapBusinessService(modelBusinessService, apiBusinessService); serviceDetail.getBusinessService().add(apiBusinessService); returnedRowCount++; } // If the returned rows equals the max allowed, we can end the loop. if (modelSubscription.getMaxEntities() != null) { if (returnedRowCount == modelSubscription.getMaxEntities()) break; } currentIndex++; } // If the loop was broken prematurely (max row count hit) we set the chunk data to the next index to start with. // A non-null value of chunk data will cause a chunk token to be generated. if (currentIndex < (existingList.size() - 1)) chunkData = currentIndex + 1; else chunkData = null; if (modelSubscription.isBrief()) result.getKeyBag().add(resultsKeyBag); else result.setServiceDetail(serviceDetail); } if (subscriptionFilter.getGetTModelDetail() != null) { GetTModelDetail getDetail = subscriptionFilter.getGetTModelDetail(); // Running through the key list here to determine the deleted keys and store the existing entities. KeyBag missingKeyBag = new KeyBag(); missingKeyBag.setDeleted(true); List<org.apache.juddi.model.Tmodel> existingList = new ArrayList<org.apache.juddi.model.Tmodel>(0); for (String key : getDetail.getTModelKey()) { org.apache.juddi.model.Tmodel modelTModel = em.find(org.apache.juddi.model.Tmodel.class, key); if (modelTModel != null) existingList.add(modelTModel); else missingKeyBag.getTModelKey().add(key); } // Store deleted keys in the results if (missingKeyBag.getTModelKey() != null && missingKeyBag.getTModelKey().size() > 0) result.getKeyBag().add(missingKeyBag); KeyBag resultsKeyBag = new KeyBag(); TModelDetail tmodelDetail = new TModelDetail(); // Set the currentIndex to 0 or the value of the chunkData int currentIndex = 0; if (chunkData != null) currentIndex = chunkData; int returnedRowCount = 0; while (currentIndex < existingList.size()) { org.apache.juddi.model.Tmodel modelTModel = existingList.get(currentIndex); if (startPointDate.after(modelTModel.getModifiedIncludingChildren())) { currentIndex++; continue; } if (endPointDate.before(modelTModel.getModifiedIncludingChildren())) { currentIndex++; continue; } if (modelSubscription.isBrief()) { resultsKeyBag.getTModelKey().add(modelTModel.getEntityKey()); } else { org.uddi.api_v3.TModel apiTModel = new org.uddi.api_v3.TModel(); MappingModelToApi.mapTModel(modelTModel, apiTModel); tmodelDetail.getTModel().add(apiTModel); returnedRowCount++; } // If the returned rows equals the max allowed, we can end the loop. if (modelSubscription.getMaxEntities() != null) { if (returnedRowCount == modelSubscription.getMaxEntities()) break; } currentIndex++; } // If the loop was broken prematurely (max row count hit) we set the chunk data to the next index to start with. // A non-null value of chunk data will cause a chunk token to be generated. if (currentIndex < (existingList.size() - 1)) chunkData = currentIndex + 1; else chunkData = null; if (modelSubscription.isBrief()) result.getKeyBag().add(resultsKeyBag); else result.setTModelDetail(tmodelDetail); } if (subscriptionFilter.getGetAssertionStatusReport() != null) { // The coverage period doesn't apply here (basically because publisher assertions don't keep operational info). GetAssertionStatusReport getAssertionStatusReport = subscriptionFilter .getGetAssertionStatusReport(); List<AssertionStatusItem> assertionList = PublicationHelper.getAssertionStatusItemList(publisher, getAssertionStatusReport.getCompletionStatus(), em); AssertionStatusReport assertionStatusReport = new AssertionStatusReport(); for (AssertionStatusItem asi : assertionList) assertionStatusReport.getAssertionStatusItem().add(asi); result.setAssertionStatusReport(assertionStatusReport); } // If chunkData contains non-null data, a new token must be created and the token returned in the results if (chunkData != null) { String chunkToken = CHUNK_TOKEN_PREFIX + UUID.randomUUID(); SubscriptionChunkToken newChunkToken = new SubscriptionChunkToken(chunkToken); newChunkToken.setSubscriptionKey(body.getSubscriptionKey()); newChunkToken.setStartPoint(startPointDate); newChunkToken.setEndPoint(endPointDate); newChunkToken.setData(chunkData); int chunkExpirationMinutes = DEFAULT_CHUNKEXPIRATION_MINUTES; try { chunkExpirationMinutes = AppConfig.getConfiguration() .getInt(Property.JUDDI_SUBSCRIPTION_CHUNKEXPIRATION_MINUTES); } catch (ConfigurationException ce) { throw new FatalErrorException(new ErrorMessage("errors.configuration.Retrieval")); } newChunkToken.setExpiresAfter( new Date(System.currentTimeMillis() + ((long) chunkExpirationMinutes * 60L * 1000L))); em.persist(newChunkToken); result.setChunkToken(chunkToken); } tx.commit(); long procTime = System.currentTimeMillis() - startTime; serviceCounter.update(SubscriptionQuery.GET_SUBSCRIPTIONRESULTS, QueryStatus.SUCCESS, procTime); return result; } catch (DispositionReportFaultMessage drfm) { long procTime = System.currentTimeMillis() - startTime; serviceCounter.update(SubscriptionQuery.GET_SUBSCRIPTIONRESULTS, QueryStatus.FAILED, procTime); throw drfm; } finally { if (tx.isActive()) { tx.rollback(); } em.close(); } }