List of usage examples for javax.ejb TransactionAttributeType REQUIRES_NEW
TransactionAttributeType REQUIRES_NEW
To view the source code for javax.ejb TransactionAttributeType REQUIRES_NEW.
Click Source Link
REQUIRES_NEW
with a new transaction context. From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void exportStudy(Study study) { List<String> exportFormats = studyExporterFactory.getExportFormats(); for (String exportFormat : exportFormats) { exportStudyToFormat(study, exportFormat); }/*from w ww . ja v a 2 s . com*/ study.setLastExportTime(new Date()); }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void exportStudy(Long studyId) { Study study = em.find(Study.class, studyId); exportStudy(study); }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void exportStudyToFormat(Long studyId, String exportFormat) { Study study = em.find(Study.class, studyId); exportStudyToFormat(study, exportFormat); }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void exportStudyToFormat(Study study, String exportFormat) { // only export released studies // TODO: we should clean up the export logic to handle export of versions if (study.getReleasedVersion() == null) { return;//w w w .j a v a 2s . c om } File studyDir = FileUtil.getStudyFileDir(study); StudyExporter studyExporter = studyExporterFactory.getStudyExporter(exportFormat); String fileName = "export_" + exportFormat; if (studyExporter.isXmlFormat()) { fileName += ".xml"; } File exportFile = new File(studyDir, fileName); OutputStream os = null; try { exportFile.createNewFile(); os = new FileOutputStream(exportFile); studyExporter.exportStudy(study, os); } catch (IOException e) { throw new EJBException(e); } finally { try { if (os != null) { os.close(); } } catch (IOException ex) { } } // study.setLastExportTime(new Date()); }
From source file:org.rhq.enterprise.server.content.ContentSourceManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public RepoSyncResults _mergePackageSyncReportUPDATE(ContentSource contentSource, PackageSyncReport report, Map<ContentProviderPackageDetailsKey, PackageVersionContentSource> previous, RepoSyncResults syncResults, StringBuilder progress) { progress.append(new Date()).append(": ").append("Updating"); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); int flushCount = 0; // used to know when we should flush the entity manager - for performance purposes int updateCount = 0; // update all changed packages that are still available on the remote repository but whose information is different ////from w w w . ja v a2s . c o m // for each updated package, we have to find its resource type // (which must exist, or we abort that package and move on to the next); // then we have to get the current PVCS and merge its updates for (ContentProviderPackageDetails updatedDetails : report.getUpdatedPackages()) { ContentProviderPackageDetailsKey key = updatedDetails.getContentProviderPackageDetailsKey(); PackageVersionContentSource previousPvcs = previous.get(key); PackageVersionContentSource attachedPvcs; // what we will find in the DB, in jpa session attachedPvcs = entityManager.find(PackageVersionContentSource.class, previousPvcs.getPackageVersionContentSourcePK()); if (attachedPvcs == null) { log.warn("Content source adapter reported that a non-existing package was updated, adding it [" + key + "]"); // I should probably not rely on persist cascade and use contentmanager.persistOrMergePackageVersionSafely // however, this rarely will occur (should never occur really) so I won't worry about it entityManager.persist(previousPvcs); attachedPvcs = previousPvcs; } PackageVersion pv = previousPvcs.getPackageVersionContentSourcePK().getPackageVersion(); pv.setDisplayName(updatedDetails.getDisplayName()); pv.setDisplayVersion(updatedDetails.getDisplayVersion()); pv.setExtraProperties(updatedDetails.getExtraProperties()); pv.setFileCreatedDate(updatedDetails.getFileCreatedDate()); pv.setFileName(updatedDetails.getFileName()); pv.setFileSize(updatedDetails.getFileSize()); pv.setLicenseName(updatedDetails.getLicenseName()); pv.setLicenseVersion(updatedDetails.getLicenseVersion()); pv.setLongDescription(updatedDetails.getLongDescription()); pv.setMD5(updatedDetails.getMD5()); pv.setMetadata(updatedDetails.getMetadata()); pv.setSHA256(updatedDetails.getSHA256()); pv.setShortDescription(updatedDetails.getShortDescription()); // we normally would want to do this: // pv = entityManager.merge(pv); // but we have to watch out for an agent sending us a content report at the same time that // would create this PV concurrently. If the below line takes too hard a hit on performance, // we can replace it with the merge call mentioned above and hope this concurrency doesn't happen. // if it does happen, we will rollback our tx and we'll have to wait for the next sync to fix it. pv = contentManager.persistOrMergePackageVersionSafely(pv); attachedPvcs.setLocation(updatedDetails.getLocation()); if ((++flushCount % 200) == 0) { entityManager.flush(); entityManager.clear(); } if ((++updateCount % 200) == 0) { progress.append("...").append(updateCount); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); } } progress.append("...").append(updateCount).append('\n'); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); return syncResults; }
From source file:org.rhq.enterprise.server.configuration.ConfigurationManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public AbstractResourceConfigurationUpdate mergeConfigurationUpdate( AbstractResourceConfigurationUpdate configurationUpdate) { return this.entityManager.merge(configurationUpdate); }
From source file:org.rhq.enterprise.server.configuration.ConfigurationManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public int createGroupConfigurationUpdate(AbstractGroupConfigurationUpdate update) throws SchedulerException { entityManager.persist(update);/*from w w w . j a v a 2 s . c om*/ return update.getId(); }
From source file:org.rhq.enterprise.server.content.ContentSourceManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public RepoSyncResults _mergeDistributionSyncReportREMOVE(ContentSource contentSource, DistributionSyncReport report, RepoSyncResults syncResults, StringBuilder progress) { progress.append(new Date()).append(": ").append("Removing"); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); DistributionManagerLocal distManager = LookupUtil.getDistributionManagerLocal(); Subject overlord = LookupUtil.getSubjectManager().getOverlord(); // remove all distributions that are no longer available on the remote repository for (DistributionDetails doomedDetails : report.getDeletedDistributions()) { Distribution doomedDist = distManager.getDistributionByLabel(doomedDetails.getLabel()); distManager.deleteDistributionByDistId(overlord, doomedDist.getId()); distManager.deleteDistributionFilesByDistId(overlord, doomedDist.getId()); progress.append("Removed distribution & distribution files for: " + doomedDetails.getLabel()); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); }/*w w w . ja va2 s.com*/ progress.append("Finished Distribution removal...").append('\n'); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); return syncResults; }
From source file:org.rhq.enterprise.server.resource.ResourceManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public int clearResourceConfigErrorByType(Subject subject, int resourceId, ResourceErrorType resourceErrorType) { if (!authorizationManager.canViewResource(subject, resourceId)) { throw new PermissionException( "Cannot delete resource errors of type [" + resourceErrorType + "]. User [" + subject.getName() + "] does not have permission to operate on resource ID [" + resourceId + "]."); }//from w w w. j av a 2 s. c o m Query q = entityManager.createQuery( "DELETE FROM ResourceError e WHERE e.resource.id = :resourceId AND e.errorType = :type"); q.setParameter("resourceId", resourceId); q.setParameter("type", resourceErrorType); int updates = q.executeUpdate(); return updates; }
From source file:org.rhq.enterprise.server.content.ContentSourceManagerBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public RepoSyncResults _mergeDistributionSyncReportADD(ContentSource contentSource, DistributionSyncReport report, RepoSyncResults syncResults, StringBuilder progress) { DistributionManagerLocal distManager = LookupUtil.getDistributionManagerLocal(); RepoManagerLocal repoManager = LookupUtil.getRepoManagerLocal(); Subject overlord = LookupUtil.getSubjectManager().getOverlord(); List<DistributionDetails> newDetails = report.getDistributions(); for (DistributionDetails detail : newDetails) { try {/*www.j av a 2 s . c om*/ log.debug("Attempting to create new distribution based off of: " + detail); DistributionType distType = distManager.getDistributionTypeByName(detail.getDistributionType()); Distribution newDist = distManager.createDistribution(overlord, detail.getLabel(), detail.getDistributionPath(), distType); log.debug("Created new distribution: " + newDist); Repo repo = repoManager.getRepo(overlord, report.getRepoId()); RepoDistribution repoDist = new RepoDistribution(repo, newDist); log.debug("Created new mapping of RepoDistribution repoId = " + repo.getId() + ", distId = " + newDist.getId()); entityManager.persist(repoDist); List<DistributionFileDetails> files = detail.getFiles(); for (DistributionFileDetails f : files) { log.debug("Creating DistributionFile for: " + f); DistributionFile df = new DistributionFile(newDist, f.getRelativeFilename(), f.getMd5sum()); df.setLastModified(f.getLastModified()); entityManager.persist(df); entityManager.flush(); } } catch (DistributionException e) { progress.append("Caught exception when trying to add: " + detail.getLabel() + "\n"); progress.append("Error is: " + e.getMessage()); syncResults.setResults(progress.toString()); syncResults = repoManager.mergeRepoSyncResults(syncResults); log.error(e); } } return syncResults; }