List of usage examples for javax.ejb EJBException EJBException
public EJBException(Exception ex)
From source file:edu.harvard.iq.dvn.core.web.ExploreDataPage.java
public File getZipFileExport() { File zipOutputFile;/*from w w w .j a v a2 s . c o m*/ ZipOutputStream zout; String exportTimestamp = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss").format(new Date()); try { zipOutputFile = File.createTempFile("dataDownload", "zip"); zout = new ZipOutputStream((OutputStream) new FileOutputStream(zipOutputFile)); if (includeCSV) { File csvFile = File.createTempFile("dataDownload_", "csv"); //writeFile(csvFile, csvString.toString().toCharArray(), csvString.length()); writeFile(csvFile, csvString, csvString.length()); addZipEntry(zout, csvFile.getAbsolutePath(), "csvData_" + exportTimestamp + ".txt"); if (csvFile != null && csvFile.exists()) { if (!FileUtil.keepTempFiles("core.web.ExploreDataPage")) { csvFile.delete(); } } } if (includeImage || includePdf) { File imageUrlFile = includeImage ? File.createTempFile("dataDownload", "png") : null; File imagePdfFile = includePdf ? File.createTempFile("dataDownload", "pdf") : null; writeImageFile(imageUrlFile, imagePdfFile); if (includeImage) { addZipEntry(zout, imageUrlFile.getAbsolutePath(), "imageGraph_" + exportTimestamp + ".png"); } if (includePdf) { addZipEntry(zout, imagePdfFile.getAbsolutePath(), "imagePdf_" + exportTimestamp + ".pdf"); } if (imageUrlFile != null && imageUrlFile.exists()) { if (!FileUtil.keepTempFiles("core.web.ExploreDataPage")) { imageUrlFile.delete(); } } if (imagePdfFile != null && imagePdfFile.exists()) { if (!FileUtil.keepTempFiles("core.web.ExploreDataPage")) { imagePdfFile.delete(); } } } if (includeExcel) { File excelDataFile = File.createTempFile("dataDownload", "xls"); writeExcelFile(excelDataFile); addZipEntry(zout, excelDataFile.getAbsolutePath(), "excelData_" + exportTimestamp + ".xls"); if (excelDataFile != null && excelDataFile.exists()) { if (!FileUtil.keepTempFiles("core.web.ExploreDataPage")) { excelDataFile.delete(); } } } zout.close(); } catch (IOException e) { throw new EJBException(e); } catch (Exception ie) { zipOutputFile = null; } // TODO: make sure the zip file itself gets deleted as well. // - L.A. return zipOutputFile; }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
private Study doImportStudy(File xmlFile, Long harvestFormatTypeId, Long vdcId, Long userId, String harvestIdentifier, List<StudyFileEditBean> filesToUpload) { logger.info("Begin doImportStudy"); Study study = null;/*from www . j a v a 2 s. c om*/ StudyVersion studyVersion = null; boolean newStudy = true; VDC vdc = em.find(VDC.class, vdcId); // Note on the logic below: // It IS possible for the method to be called on a Harvested study, but without the // harvestIdentifier supplied! This happens when harvesting from Nesstar sources // (see more comments on Nesstar harvesting below). // --L.A. boolean isHarvest = (harvestIdentifier != null || (vdc.getHarvestingDataverse() != null)); boolean isNesstarHarvest = ((vdc.getHarvestingDataverse() != null) && vdc.getHarvestingDataverse().isNesstar()); Map<String, String> globalIdComponents = null; // used if this is an update of a harvested study VDCUser creator = em.find(VDCUser.class, userId); // Step 1: determine format and transform if necessary File ddiFile = xmlFile; boolean fileTransformed = false; HarvestFormatType hft = em.find(HarvestFormatType.class, harvestFormatTypeId); if (hft.getStylesheetFileName() != null) { ddiFile = transformToDDI(xmlFile, hft.getStylesheetFileName()); fileTransformed = true; } // Step 2a: if harvested, check if exists if (isHarvest) { if (harvestIdentifier == null) { // When harvesting from Nesstar sources, no unique identifiers // are provided on the protocol level. Instead, we have to // check the actual DDI metadata and see if it provides // an ID we could use as the harvestidentifier. // -- L.A. if (isNesstarHarvest) { Study tmpStudy = new Study(vdc, creator, StudyVersion.VersionState.RELEASED); StudyVersion tmpStudyVersion = tmpStudy.getLatestVersion(); String tmpStudyId = null; // We are doing an extra run of ddiService.mapDDI in order // to achieve this; note that mapDDI is a fairly cheap // operation, since it doesn't parse the data portion of the // ddi. // an experimental hack: // it appears that some Nesstar-provided DDIs are failing to // import because of the illegal "xml:lang=..." attributes in // them; and some fail because of the DTD-style schema headers // (<?DOCTYPE codeBook ...) Let's try to just strip them out - // and see if that helps: BufferedReader rd = null; PrintWriter ddiProcessedOut = null; File ddiFileProcessed = null; try { ddiFileProcessed = File.createTempFile("NesstarDdiProcessed.", ".xml"); FileOutputStream ddiProcessedStream = new FileOutputStream(ddiFileProcessed); ddiProcessedOut = new PrintWriter(ddiProcessedStream, true); rd = new BufferedReader(new InputStreamReader(new FileInputStream(ddiFile))); String line = null; while ((line = rd.readLine()) != null) { if (line.matches("^<.DOCTYPE codeBook.*") || line.equals("")) { // skip this line } else { line = line.replaceAll(" xml:lang=\"[^\"]*\"", ""); ddiProcessedOut.println(line); } } } catch (IOException ex) { throw new EJBException("Failed to process and parse Nesstar ddi."); } finally { if (rd != null) { try { rd.close(); } catch (Exception ex) { } } ddiFile.delete(); if (ddiProcessedOut != null) { ddiProcessedOut.close(); } } ddiFile = ddiFileProcessed; ddiService.mapDDI(ddiFile, tmpStudyVersion, true); if (tmpStudyVersion.getMetadata().getStudyOtherIds().size() > 0) { tmpStudyId = tmpStudyVersion.getMetadata().getStudyOtherIds().get(0).getOtherId(); // We may need to go through the list of "other Ids" (instead of // just grabbing the first one); and apply some logic to get the // best one. -- L.A. } if (tmpStudyId == null || !isValidStudyIdString(tmpStudyId)) { throw new EJBException("No suitable ID was found in the Nesstar-harvested metadata."); } harvestIdentifier = tmpStudyId; } else { throw new EJBException("No Identifier available for a harvested (non-Nesstar) study."); } } study = getStudyByHarvestInfo(vdc, harvestIdentifier); if (study != null) { // if (!study.isIsHarvested()) { // // This study actually belongs to the local DVN, so don't continue with harvest // // TODO: this check is probably no longer needed, now that we get study by harvestIdentifier // throw new EJBException("This study originated in the local DVN - we don't need to harvest it."); // } newStudy = false; // store old global ID components globalIdComponents = new HashMap<String, String>(); globalIdComponents.put("globalId", study.getGlobalId()); globalIdComponents.put("protocol", study.getProtocol()); globalIdComponents.put("authority", study.getAuthority()); globalIdComponents.put("studyId", study.getStudyId()); studyVersion = study.getLatestVersion(); resetStudyForHarvesting(studyVersion); } } // Step 2b: initialize new Study if (study == null) { VersionState newVersionState = isHarvest ? StudyVersion.VersionState.RELEASED : StudyVersion.VersionState.DRAFT; study = new Study(vdc, creator, newVersionState); studyVersion = study.getLatestVersion(); // if not a harvest, set initial date of deposit (this may get overridden during map ddi step if (!isHarvest) { studyVersion.getMetadata().setDateOfDeposit(new SimpleDateFormat("yyyy-MM-dd").format(new Date())); } } em.persist(study); // Step 3: map the ddi Map dataFilesMap = null; if (isNesstarHarvest) { dataFilesMap = ddiService.mapDDI(ddiFile, studyVersion, true); } else { dataFilesMap = ddiService.mapDDI(ddiFile, studyVersion, false); } logger.info("doImportStudy: ddi mapped"); //Step 4: post mapping processing if (isHarvest) { study.setIsHarvested(true); study.setHarvestIdentifier(harvestIdentifier); // for Nesstar studies, we are also cooking a link pointing to // the location of this study on their web server, to provide as a // "holdings"/"original archive" in the DVN Study UI. if (isNesstarHarvest) { String nServerURL = vdc.getHarvestingDataverse().getServerUrl(); // chop any trailing slashes in the server URL - or they will result // in multiple slashes in the final URL pointing to the study // on server of origin; Nesstar doesn't like it, apparently. nServerURL = nServerURL.replaceAll("/*$", ""); String nServerURLencoded = nServerURL; nServerURLencoded.replace(":", "%3A"); nServerURLencoded.replace("/", "%2F"); String nWebviewLocation = nServerURL + "/webview/?mode=documentation&submode=abstract&studydoc=" + nServerURLencoded + "%2Fobj%2FfStudy%2F" + harvestIdentifier + "&top=yes"; studyVersion.getMetadata().setHarvestHoldings(nWebviewLocation); } } else { // clear fields related to harvesting studyVersion.getMetadata().setHarvestHoldings(null); studyVersion.getMetadata().setHarvestDVTermsOfUse(null); studyVersion.getMetadata().setHarvestDVNTermsOfUse(null); } //em.flush(); // This flush statement may in fact produce an error condition, // since some of our StudyFiles may not have been persisted yet // We are going to take care of that in the next step; but we // shouldn't be trying to flush until we do. So I'm commenting it // out. -- L.A. // step 5: persist files from ddi (since studyFile is not persisted when the new FileMetadata objects are created - since // the studyFile often already exists - we need to manually persist the study files here) for (FileMetadata fmd : studyVersion.getFileMetadatas()) { em.persist(fmd.getStudyFile()); } if (!isNesstarHarvest) { Map variablesMap = ddiService.reMapDDI(ddiFile, studyVersion, dataFilesMap); logger.info("doImportStudy: ddi re-mapped"); logger.info("reading the variables map;"); if (variablesMap != null) { for (Object mapKey : variablesMap.keySet()) { List<DataVariable> variablesMapEntry = (List<DataVariable>) variablesMap.get(mapKey); Long fileId = (Long) mapKey; if (variablesMapEntry != null) { logger.info("found non-empty map entry for datatable id " + fileId); DataVariable dv = variablesMapEntry.get(0); DataTable tmpDt = dv.getDataTable(); if (tmpDt != null) { tmpDt.setDataVariables(variablesMapEntry); logger.info("added variables to datatable " + tmpDt.getId()); } else { logger.info("first variable on the map for id " + tmpDt.getId() + " is referencing NULL datatable! WTF?"); } } else { logger.info("found empty map entry for datatable id " + fileId); } } } } saveStudyVersion(studyVersion, userId); if (isHarvest) { studyVersion.setReleaseTime(new Date()); } boolean registerHandle = determineId(studyVersion, vdc, globalIdComponents); if (newStudy && !studyService.isUniqueStudyId(study.getStudyId(), study.getProtocol(), study.getAuthority())) { throw new EJBException( "A study with this globalId already exists (likely cause: the study was previously harvested into a different dataverse)."); } // step 5: upload files if (filesToUpload != null) { studyFileService.addFiles(studyVersion, filesToUpload, creator); } // step 6: store the original study files copyXMLFile(study, ddiFile, "original_imported_study.xml"); if (fileTransformed) { copyXMLFile(study, xmlFile, "original_imported_study_pretransform.xml"); } // step 7: register if necessary if (registerHandle && vdcNetworkService.find().isHandleRegistration()) { if (study.getProtocol().equals("hdl") || isHarvest) { study.setProtocol("hdl"); String handle = study.getAuthority() + "/" + study.getStudyId(); gnrsService.createHandle(handle); } if (study.getProtocol().equals("doi")) { doiEZIdServiceLocal.createIdentifier(studyVersion.getStudy()); } } logger.info("completed doImportStudy() returning study" + study.getGlobalId()); return study; }
From source file:edu.harvard.iq.dvn.core.web.ExploreDataPage.java
private void writeFile(File fileIn, String dataIn, int bufSize) { ByteBuffer dataByteBuffer = ByteBuffer.wrap(dataIn.getBytes()); try {/*ww w .ja v a 2 s. c o m*/ FileOutputStream outputFile = null; outputFile = new FileOutputStream(fileIn, true); WritableByteChannel outChannel = outputFile.getChannel(); try { outChannel.write(dataByteBuffer); outputFile.close(); } catch (IOException e) { e.printStackTrace(System.err); } } catch (IOException e) { throw new EJBException(e); } }
From source file:edu.harvard.iq.dvn.core.web.ExploreDataPage.java
private void writeFile(File fileIn, char[] charArrayIn, int bufSize) { try {/*ww w. j a v a 2s .co m*/ FileOutputStream outputFile = null; outputFile = new FileOutputStream(fileIn, true); FileChannel outChannel = outputFile.getChannel(); ByteBuffer buf = ByteBuffer.allocate((bufSize * 2) + 1000); for (char ch : charArrayIn) { buf.putChar(ch); } buf.flip(); try { outChannel.write(buf); outputFile.close(); } catch (IOException e) { e.printStackTrace(System.err); } } catch (IOException e) { throw new EJBException(e); } }
From source file:org.gss_project.gss.server.ejb.ExternalAPIBean.java
@Override @TransactionAttribute(TransactionAttributeType.NEVER) public String rebuildSolrIndex() { try {//from w w w . j a va 2s . c o m CommonsHttpSolrServer solr = new CommonsHttpSolrServer(getConfiguration().getString("solr.url")); solr.deleteByQuery("*:*"); solr.commit(); logger.info("Deleted everything in solr"); List<Long> fileIds = dao.getAllFileIds(); logger.info("Total of " + fileIds.size() + " will be indexed"); int i = 0; for (Long id : fileIds) { try { postFileToSolr(solr, id); } catch (ObjectNotFoundException e) { logger.error("Indexing of file id " + id + " failed.", e); } i++; if (i % 10 == 0) { solr.commit(); logger.info("Sent commit to solr at file " + i); } } solr.optimize(); solr.commit(); logger.info("Finished indexing of " + i + " files"); return "Finished indexing of " + i + " files"; } catch (IOException e) { throw new EJBException(e); } catch (SolrServerException e) { throw new EJBException(e); } }
From source file:org.gss_project.gss.server.ejb.ExternalAPIBean.java
@Override @TransactionAttribute(TransactionAttributeType.NEVER) public String refreshSolrIndex() { try {//ww w . j a v a 2s. c om CommonsHttpSolrServer solr = new CommonsHttpSolrServer(getConfiguration().getString("solr.url")); List<Long> fileIds = dao.getAllFileIds(); logger.info("Total of " + fileIds.size() + " will be checked"); int i = 0; for (Long id : fileIds) { if (!fileIsInSolr(solr, id)) { try { postFileToSolr(solr, id); } catch (ObjectNotFoundException e) { logger.error("Indexing of file id " + id + " failed.", e); } } i++; if (i % 10 == 0) { solr.commit(); logger.info("Sent commit to solr at file " + i); } } solr.optimize(); solr.commit(); logger.info("Finished indexing of " + i + " files"); return "Finished indexing of " + i + " files"; } catch (IOException e) { throw new EJBException(e); } catch (SolrServerException e) { throw new EJBException(e); } }
From source file:org.ejbca.core.ejb.ca.caadmin.CAAdminSessionBean.java
@Override public ResponseMessage processRequest(AuthenticationToken admin, CAInfo cainfo, RequestMessage requestmessage) throws CAExistsException, CADoesntExistsException, AuthorizationDeniedException, CryptoTokenOfflineException {//from w ww . ja v a 2 s .c o m final CA ca; Collection<Certificate> certchain = null; CertificateResponseMessage returnval = null; int caid = cainfo.getCAId(); // check authorization if (!accessSession.isAuthorizedNoLogging(admin, StandardRules.ROLE_ROOT.resource())) { String msg = intres.getLocalizedMessage("caadmin.notauthorizedtocertresp", cainfo.getName()); Map<String, Object> details = new LinkedHashMap<String, Object>(); details.put("msg", msg); auditSession.log(EventTypes.ACCESS_CONTROL, EventStatus.FAILURE, ModuleTypes.CA, ServiceTypes.CORE, admin.toString(), String.valueOf(caid), null, null, details); throw new AuthorizationDeniedException(msg); } // Check that CA doesn't already exists CAData oldcadata = null; if (caid >= 0 && caid <= CAInfo.SPECIALCAIDBORDER) { String msg = intres.getLocalizedMessage("caadmin.errorcaexists", cainfo.getName()); log.info(msg); throw new CAExistsException(msg); } oldcadata = CAData.findById(entityManager, Integer.valueOf(caid)); // If it did not exist with a certain DN (caid) perhaps a CA with the // same CA name exists? if (oldcadata == null) { oldcadata = CAData.findByName(entityManager, cainfo.getName()); } boolean processinternalca = false; if (oldcadata != null) { // If we find an already existing CA, there is a good chance that we // should throw an exception // Saying that the CA already exists. // However, if we have the same DN, and give the same name, we // simply assume that the admin actually wants // to treat an internal CA as an external CA, perhaps there is // different HSMs connected for root CA and sub CA? if (log.isDebugEnabled()) { log.debug("Old castatus=" + oldcadata.getStatus() + ", oldcaid=" + oldcadata.getCaId().intValue() + ", caid=" + cainfo.getCAId() + ", oldcaname=" + oldcadata.getName() + ", name=" + cainfo.getName()); } if (((oldcadata.getStatus() == CAConstants.CA_WAITING_CERTIFICATE_RESPONSE) || (oldcadata.getStatus() == CAConstants.CA_ACTIVE) || (oldcadata.getStatus() == CAConstants.CA_EXTERNAL)) && (oldcadata.getCaId().intValue() == cainfo.getCAId()) && (oldcadata.getName().equals(cainfo.getName()))) { // Yes, we have all the same DN, CAName and the old CA is either // waiting for a certificate response or is active // (new CA or active CA that we want to renew) // or it is an external CA that we want to issue a new // certificate to processinternalca = true; if (oldcadata.getStatus() == CAConstants.CA_EXTERNAL) { log.debug("Renewing an external CA."); } else { log.debug("Processing an internal CA, as an external."); } } else { String msg = intres.getLocalizedMessage("caadmin.errorcaexists", cainfo.getName()); log.info(msg); throw new CAExistsException(msg); } } // get signing CA if (cainfo.getSignedBy() > CAInfo.SPECIALCAIDBORDER || cainfo.getSignedBy() < 0) { try { final CA signca = caSession.getCAForEdit(admin, Integer.valueOf(cainfo.getSignedBy())); try { // Check that the signer is valid assertSignerValidity(admin, signca); // Get public key from request PublicKey publickey = requestmessage.getRequestPublicKey(); // Create cacertificate Certificate cacertificate = null; EndEntityInformation cadata = makeEndEntityInformation(cainfo); // We can pass the PKCS10 request message as extra // parameters if (requestmessage instanceof PKCS10RequestMessage) { ExtendedInformation extInfo = new ExtendedInformation(); PKCS10CertificationRequest pkcs10 = ((PKCS10RequestMessage) requestmessage) .getCertificationRequest(); extInfo.setCustomData(ExtendedInformationFields.CUSTOM_PKCS10, new String(Base64.encode(pkcs10.getEncoded()))); cadata.setExtendedinformation(extInfo); } CertificateProfile certprofile = certificateProfileSession .getCertificateProfile(cainfo.getCertificateProfileId()); String sequence = null; byte[] ki = requestmessage.getRequestKeyInfo(); if ((ki != null) && (ki.length > 0)) { sequence = new String(ki); } final CryptoToken signCryptoToken = cryptoTokenSession .getCryptoToken(signca.getCAToken().getCryptoTokenId()); cacertificate = signca.generateCertificate(signCryptoToken, cadata, publickey, -1, null, cainfo.getValidity(), certprofile, sequence); // X509ResponseMessage works for both X509 CAs and CVC CAs, should really be called CertificateResponsMessage returnval = new X509ResponseMessage(); returnval.setCertificate(cacertificate); // Build Certificate Chain Collection<Certificate> rootcachain = signca.getCertificateChain(); certchain = new ArrayList<Certificate>(); certchain.add(cacertificate); certchain.addAll(rootcachain); if (!processinternalca) { // If this is an internal CA, we don't create it and set // a NULL token, since the CA is already created if (cainfo instanceof X509CAInfo) { log.info("Creating a X509 CA (process request)"); ca = new X509CA((X509CAInfo) cainfo); } else if (cainfo instanceof CVCCAInfo) { // CVC CA is a special type of CA for EAC electronic // passports log.info("Creating a CVC CA (process request)"); CVCCAInfo cvccainfo = (CVCCAInfo) cainfo; // Create CVCCA ca = CvcCA.getInstance(cvccainfo); } else { ca = null; } ca.setCertificateChain(certchain); CAToken token = new CAToken(ca.getCAId(), new NullCryptoToken().getProperties()); ca.setCAToken(token); // set status to active entityManager.persist( new CAData(cainfo.getSubjectDN(), cainfo.getName(), CAConstants.CA_EXTERNAL, ca)); // cadatahome.create(cainfo.getSubjectDN(), cainfo.getName(), SecConst.CA_EXTERNAL, ca); } else { if (oldcadata.getStatus() == CAConstants.CA_EXTERNAL) { // If it is an external CA we will not import the // certificate later on here, so we want to // update the CA in this instance with the new // certificate so it is visible ca = caSession.getCAForEdit(admin, oldcadata.getCaId());//getCAFromDatabase(oldcadata.getCaId()); ca.setCertificateChain(certchain); if (log.isDebugEnabled()) { log.debug("Storing new certificate chain for external CA " + cainfo.getName() + ", CA token type: " + ca.getCAToken().getClass().getName()); } caSession.editCA(admin, ca, true); } else { // If it is an internal CA so we are "simulating" // signing a real external CA we don't do anything // because that CA is waiting to import a // certificate if (log.isDebugEnabled()) { log.debug( "Not storing new certificate chain or updating CA for internal CA, simulating external: " + cainfo.getName()); } ca = null; } } // Publish CA certificates. publishCACertificate(admin, certchain, signca.getCRLPublishers(), ca != null ? ca.getSubjectDN() : null); // External CAs will not have any CRLs in this system, so we don't have to try to publish any CRLs } catch (CryptoTokenOfflineException e) { String msg = intres.getLocalizedMessage("caadmin.errorprocess", cainfo.getName()); log.error(msg, e); throw e; } } catch (Exception e) { String msg = intres.getLocalizedMessage("caadmin.errorprocess", cainfo.getName()); log.error(msg, e); throw new EJBException(e); } } if (certchain != null) { String msg = intres.getLocalizedMessage("caadmin.processedca", cainfo.getName()); Map<String, Object> details = new LinkedHashMap<String, Object>(); details.put("msg", msg); auditSession.log(EventTypes.CA_EDITING, EventStatus.SUCCESS, ModuleTypes.CA, ServiceTypes.CORE, admin.toString(), String.valueOf(caid), null, null, details); } else { String msg = intres.getLocalizedMessage("caadmin.errorprocess", cainfo.getName()); Map<String, Object> details = new LinkedHashMap<String, Object>(); details.put("msg", msg); auditSession.log(EventTypes.CA_EDITING, EventStatus.FAILURE, ModuleTypes.CA, ServiceTypes.CORE, admin.toString(), String.valueOf(caid), null, null, details); } return returnval; }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
private boolean determineId(StudyVersion sv, VDC vdc, Map<String, String> globalIdComponents) { Study study = sv.getStudy();//from ww w . j a va2s .c o m VDCNetwork vdcNetwork = vdcNetworkService.find(); String protocol = vdcNetwork.getProtocol(); String authority = vdcNetwork.getAuthority(); String globalId = null; if (!StringUtil.isEmpty(study.getStudyId())) { globalId = study.getGlobalId(); } if (vdc.getHarvestingDataverse() != null) { if (vdc.getHarvestingDataverse().getHandlePrefix() != null) { // FOR THIS HARVESTED DATAVERSE, WE TAKE CARE OF HANDLE GENERATION if (globalId != null) { throw new EJBException("DDI should not specify a handle, but does."); } if (globalIdComponents != null) { study.setProtocol(globalIdComponents.get("protocol")); study.setAuthority(globalIdComponents.get("authority")); study.setStudyId(globalIdComponents.get("studyId")); } else { boolean generateRandom = vdc.getHarvestingDataverse().isGenerateRandomIds(); authority = vdc.getHarvestingDataverse().getHandlePrefix().getPrefix(); generateHandle(sv.getMetadata(), protocol, authority, generateRandom); return true; } } else { if (globalId == null) { // FOR THIS HARVESTED DATAVERSE, THE DDI SHOULD SPECIFY THE HANDLE throw new EJBException("DDI should specify a handle, but does not."); } else if (globalIdComponents != null && !globalId.equals(globalIdComponents.get("globalId"))) { throw new EJBException( "DDI specifies a handle that is different from current handle for this study."); } } } else { // imported study if (globalId == null) { generateHandle(sv.getMetadata(), protocol, authority, true); return true; } } return false; }
From source file:org.ejbca.core.ejb.ra.UserAdminSessionBean.java
private void print(Admin admin, EndEntityProfile profile, UserDataVO userdata) { try {/*from w w w . j a v a2 s. c o m*/ if (profile.getUsePrinting()) { String[] pINs = new String[1]; pINs[0] = userdata.getPassword(); PrinterManager.print(profile.getPrinterName(), profile.getPrinterSVGFileName(), profile.getPrinterSVGData(), profile.getPrintedCopies(), 0, userdata, pINs, new String[0], "", "", ""); } } catch (PrinterException e) { String msg = intres.getLocalizedMessage("ra.errorprint", userdata.getUsername(), e.getMessage()); log.error(msg, e); try { logSession.log(admin, userdata.getCAId(), LogConstants.MODULE_RA, new Date(), userdata.getUsername(), null, LogConstants.EVENT_ERROR_NOTIFICATION, msg); } catch (Exception f) { throw new EJBException(f); } } }
From source file:edu.harvard.iq.dvn.core.study.StudyServiceBean.java
private void generateHandle(Metadata metadata, String protocol, String authority, boolean generateRandom) { String studyId = null;//from ww w.j a v a 2 s.c o m if (generateRandom) { do { studyId = RandomStringUtils.randomAlphanumeric(5); } while (!isUniqueStudyId(studyId, protocol, authority)); } else { if (metadata.getStudyOtherIds().size() > 0) { studyId = metadata.getStudyOtherIds().get(0).getOtherId(); if (!isValidStudyIdString(studyId)) { throw new EJBException("The Other ID (from DDI) was invalid."); } } else { throw new EJBException("No Other ID (from DDI) was available for generating a handle."); } } Study study = metadata.getStudyVersion().getStudy(); study.setProtocol(protocol); study.setAuthority(authority); study.setStudyId(studyId); }