List of usage examples for org.dom4j Element add
void add(Namespace namespace);
Namespace
to this element. From source file:edu.ucsd.library.dams.api.DAMSAPIServlet.java
private Element createRdfRootElement() { // setup document rdf root element Document doc = new DocumentFactory().createDocument(); Element rdf = addElement(doc, "RDF", new Namespace("rdf", rdfNS)); doc.setRootElement(rdf);/*from ww w . j av a2 s. c om*/ rdf.add(new Namespace("mads", madsNS)); rdf.add(new Namespace("rdf", rdfNS)); rdf.add(new Namespace("dams", prNS)); return rdf; }
From source file:edu.ucsd.library.xdre.web.CollectionOperationController.java
public static String handleProcesses(Map<String, String[]> paramsMap, HttpSession session) throws Exception { String message = ""; String returnMessage = ""; DAMSClient damsClient = null;// ww w . j a v a2 s . co m String collectionId = getParameter(paramsMap, "category"); boolean[] operations = new boolean[20]; operations[0] = getParameter(paramsMap, "validateFileCount") != null; operations[1] = getParameter(paramsMap, "validateChecksums") != null; operations[2] = getParameter(paramsMap, "rdfImport") != null; operations[3] = getParameter(paramsMap, "createDerivatives") != null; operations[4] = getParameter(paramsMap, "collectionRelease") != null; operations[5] = getParameter(paramsMap, "externalImport") != null; operations[6] = getParameter(paramsMap, "marcModsImport") != null || getParameter(paramsMap, "excelImport") != null; operations[7] = getParameter(paramsMap, "luceneIndex") != null || getParameter(paramsMap, "solrDump") != null || getParameter(paramsMap, "solrRecordsDump") != null; operations[8] = getParameter(paramsMap, "sendToCDL") != null; operations[9] = getParameter(paramsMap, "dataConvert") != null; operations[10] = getParameter(paramsMap, "ingest") != null; operations[11] = getParameter(paramsMap, "serialize") != null; operations[12] = getParameter(paramsMap, "tsSyn") != null; operations[13] = getParameter(paramsMap, "createJson") != null; operations[14] = getParameter(paramsMap, "cacheJson") != null; operations[15] = getParameter(paramsMap, "fileUpload") != null; operations[16] = getParameter(paramsMap, "jsonDiffUpdate") != null; operations[17] = getParameter(paramsMap, "validateManifest") != null; operations[18] = getParameter(paramsMap, "metadataExport") != null; operations[19] = getParameter(paramsMap, "jhoveReport") != null; int submissionId = (int) System.currentTimeMillis(); String logLink = "https://" + (Constants.CLUSTER_HOST_NAME.indexOf("localhost") >= 0 ? "localhost:8443" : Constants.CLUSTER_HOST_NAME.indexOf("lib-ingest") >= 0 ? Constants.CLUSTER_HOST_NAME + ".ucsd.edu:8443" : Constants.CLUSTER_HOST_NAME + ".ucsd.edu") + "/damsmanager/downloadLog.do?submissionId=" + submissionId; String dataLink = ""; String ds = getParameter(paramsMap, "ts"); String dsDest = null; if ((ds == null || (ds = ds.trim()).length() == 0) && !(operations[15] || operations[16])) ds = Constants.DEFAULT_TRIPLESTORE; else if (operations[12]) { dsDest = getParameter(paramsMap, "dsDest"); if (dsDest == null) throw new ServletException("No destination triplestore data source provided..."); else if (ds.equals(dsDest) || !dsDest.startsWith("ts/")) throw new ServletException("Can't sync triplestore from " + ds + " to destination " + dsDest + "."); } String fileStore = getParameter(paramsMap, "fs"); damsClient = new DAMSClient(Constants.DAMS_STORAGE_URL); damsClient.setTripleStore(ds); damsClient.setFileStore(fileStore); damsClient.setUser((String) session.getAttribute("user")); String clientVersion = session.getServletContext().getInitParameter("src-version"); String clientTool = "Custom"; if (message.length() == 0) { int userId = -1; String userIdAttr = (String) session.getAttribute("employeeId"); if (userIdAttr != null && userIdAttr.length() > 0) { try { userId = Integer.parseInt(userIdAttr); } catch (NumberFormatException e) { userId = -1; } } CollectionHandler handler = null; OutputStream fileOut = null; try { boolean successful = true; for (int i = 0; i < operations.length; i++) { handler = null; String exeInfo = ""; if (operations[i]) { String opMessage = "Preparing procedure "; RequestOrganizer.setProgressPercentage(session, 0); message = ""; if (i == 0) { session.setAttribute("status", opMessage + "File Count Validation for FileStore " + fileStore + " ..."); boolean ingestFile = getParameter(paramsMap, "ingestFile") != null; boolean dams4FileRename = getParameter(paramsMap, "dams4FileRename") != null; handler = new FileCountValidaionHandler(damsClient, collectionId); ((FileCountValidaionHandler) handler).setDams4FileRename(dams4FileRename); if (ingestFile) { String[] filesPaths = getParameter(paramsMap, "filesLocation").split(";"); List<String> ingestFiles = new ArrayList<String>(); for (int j = 0; j < filesPaths.length; j++) ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j]) .getAbsolutePath()); ((FileCountValidaionHandler) handler).setIngestFile(ingestFile); ((FileCountValidaionHandler) handler) .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()])); } } else if (i == 1) { session.setAttribute("status", opMessage + "Checksum Validation for FileStore " + fileStore + " ..."); handler = new ChecksumsHandler(damsClient, collectionId, null); } else if (i == 2) { session.setAttribute("status", opMessage + "Importing metadata ..."); String dataFormat = getParameter(paramsMap, "dataFormat"); String importMode = getParameter(paramsMap, "importMode"); handler = new MetadataImportHandler(damsClient, collectionId, getParameter(paramsMap, "data"), dataFormat, importMode); } else if (i == 3) { session.setAttribute("status", opMessage + "Derivatives Creation ..."); boolean derReplace = getParameter(paramsMap, "derReplace") == null ? false : true; String reqSize = getParameter(paramsMap, "size"); String[] sizes = null; if (reqSize != null && reqSize.length() > 0) sizes = reqSize.split(","); handler = new DerivativeHandler(damsClient, collectionId, sizes, derReplace); } else if (i == 4) { session.setAttribute("status", opMessage + " release collection " + collectionId + " ..."); String releaseState = getParameter(paramsMap, "releaseState"); String releaseOption = getParameter(paramsMap, "releaseOption"); String collectionToMerge = getParameter(paramsMap, "collectionToMerge"); log.info("Collection release: category =>" + collectionId + ", releaseState => " + releaseState + ", releaseOption => " + releaseOption + ", collectionToMerge => " + collectionToMerge); handler = new CollectionReleaseHandler(damsClient, collectionId, releaseState, releaseOption); ((CollectionReleaseHandler) handler).setCollectionToMerge(collectionToMerge); } else if (i == 5) { session.setAttribute("status", opMessage + "Importing objects ..."); String[] dataPaths = getParameter(paramsMap, "dataPath").split(";"); String[] filesPaths = getParameter(paramsMap, "filesPath").split(";"); String importOption = getParameter(paramsMap, "importOption"); boolean replace = getParameter(paramsMap, "externalImportReplace") != null; List<File> dFiles = new ArrayList<File>(); for (int j = 0; j < dataPaths.length; j++) { String dataPath = dataPaths[j]; if (dataPath != null && (dataPath = dataPath.trim()).length() > 0) { File file = new File(Constants.DAMS_STAGING + "/" + dataPath); CollectionHandler.listFiles(dFiles, file); } } List<String> ingestFiles = new ArrayList<String>(); for (int j = 0; j < filesPaths.length; j++) { if ((filesPaths[j] = filesPaths[j].trim()).length() > 0) ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j]) .getAbsolutePath()); } String[] excelExts = { "xls", "xlsx" }; List<File> excelFiles = FileUtils.filterFiles(dFiles, excelExts); if (excelFiles.size() > 0) { // Remove the Excel source that need conversion from the file list dFiles.removeAll(excelFiles); // Pre-processing boolean preprocessing = importOption.equalsIgnoreCase("pre-processing"); Element rdfPreview = null; StringBuilder errorMessage = new StringBuilder(); StringBuilder duplicatRecords = new StringBuilder(); List<String> ids = new ArrayList<String>(); if (preprocessing) { Document doc = new DocumentFactory().createDocument(); rdfPreview = TabularRecord.createRdfRoot(doc); } handler = new MetadataImportHandler(damsClient, null); handler.setSubmissionId(submissionId); handler.setSession(session); handler.setUserId(userId); // Directory to hold the converted rdf/xml File tmpDir = new File(Constants.TMP_FILE_DIR + File.separatorChar + "converted"); if (!tmpDir.exists()) tmpDir.mkdir(); // Convert Excel source files to DAMS4 rdf/xml int filesCount = 0; for (File f : excelFiles) { filesCount++; RecordSource src = new ExcelSource(f); for (Record rec = null; (rec = src.nextRecord()) != null;) { String id = rec.recordID(); handler.logMessage("Pre-processing record with ID " + id + " ... "); if (ids.indexOf(id) < 0) { ids.add(id); } else { duplicatRecords.append(id + ", "); handler.logError("Found duplicated record with ID " + id + "."); } try { Document doc = rec.toRDFXML(); if (duplicatRecords.length() == 0 && errorMessage.length() == 0) { if (preprocessing) { // preview when there are no error reported rdfPreview.add(rec.toRDFXML().selectSingleNode("//dams:Object") .detach()); } else { File convertedFile = new File(tmpDir.getAbsolutePath(), id.replaceAll("[\\//:.*]+", "") + ".rdf.xml"); try { writeXml(convertedFile, doc.asXML()); } finally { convertedFile.deleteOnExit(); if (dFiles.indexOf(convertedFile) < 0) { dFiles.add(convertedFile); handler.logMessage("Added converted RDF/XML file " + convertedFile.getAbsolutePath()); } } } } } catch (Exception e) { log.warn("Excel Input Stream error", e); errorMessage.append("-" + e.getMessage() + "\n"); handler.logMessage(e.getMessage() + "\n"); } } handler.setProgressPercentage(filesCount * 100 / excelFiles.size()); } if (errorMessage.length() == 0 && duplicatRecords.length() == 0) { if (preprocessing) { File destFile = new File(Constants.TMP_FILE_DIR, "preview-" + submissionId + "-rdf.xml"); writeXml(destFile, rdfPreview.getDocument().asXML()); successful = true; message = "\nPre-processing passed. "; message += "\nThe converted RDF/XML is ready for <a href=\"" + logLink + "&file=" + destFile.getName() + "\">download</a>."; //handler.logMessage(message); handler.release(); handler = null; } else { handler.release(); // Initiate the ingest task for Excel AND/OR RDF/XML files handler = new RDFDAMS4ImportTsHandler(damsClient, dFiles.toArray(new File[dFiles.size()]), importOption); ((RDFDAMS4ImportTsHandler) handler) .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()])); ((RDFDAMS4ImportTsHandler) handler).setReplace(replace); } } else { successful = false; message = "\nPre-processing issues found:"; if (duplicatRecords.length() > 0) message += "\nDuplicated records: " + duplicatRecords .substring(0, duplicatRecords.length() - 2).toString(); if (errorMessage.length() > 0) message += "\nOther Errors: \n" + errorMessage.toString(); //handler.logMessage(message); handler.release(); handler = null; } } else { // Ingest for RDF/XML files handler = new RDFDAMS4ImportTsHandler(damsClient, dFiles.toArray(new File[dFiles.size()]), importOption); ((RDFDAMS4ImportTsHandler) handler) .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()])); ((RDFDAMS4ImportTsHandler) handler).setReplace(replace); } } else if (i == 6) { session.setAttribute("status", opMessage + "Importing from Standard Input Stream source ..."); log.info(opMessage + "Importing from Standard Input Stream source ..."); String unit = getParameter(paramsMap, "unit"); String source = getParameter(paramsMap, "source"); String bibNumber = getParameter(paramsMap, "bibInput"); String modsXml = getParameter(paramsMap, "modsInput"); String copyrightStatus = getParameter(paramsMap, "copyrightStatus"); String copyrightJurisdiction = getParameter(paramsMap, "countryCode"); String copyrightOwner = getParameter(paramsMap, "copyrightOwner"); String program = getParameter(paramsMap, "program"); String access = getParameter(paramsMap, "accessOverride"); String beginDate = getParameter(paramsMap, "licenseBeginDate"); String endDate = getParameter(paramsMap, "licenseEndDate"); String[] dataPaths = getParameter(paramsMap, "dataPath").split(";"); String[] filesPaths = getParameter(paramsMap, "filesPath").split(";"); String importOption = getParameter(paramsMap, "importOption"); List<String> ingestFiles = new ArrayList<String>(); for (int j = 0; j < filesPaths.length; j++) { if ((filesPaths[j] = filesPaths[j].trim()).length() > 0) ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j]) .getAbsolutePath()); } List<File> dataFiles = new ArrayList<File>(); for (int j = 0; j < dataPaths.length; j++) { String dataPath = dataPaths[j]; if (dataPath != null && (dataPath = dataPath.trim()).length() > 0) { File file = new File(Constants.DAMS_STAGING + "/" + dataPath); CollectionHandler.listFiles(dataFiles, file); } } // initiate the source metadata List<Object> sources = new ArrayList<Object>(); if (source != null && source.equalsIgnoreCase("bib")) { String[] bibs = bibNumber.split(","); for (int j = 0; j < bibs.length; j++) { if (bibs[j] != null && (bibs[j] = bibs[j].trim()).length() > 0) sources.add(bibs[j]); } } else { List<String> filters = new ArrayList<>(); if (getParameter(paramsMap, "excelImport") != null) { // Excel Input Stream source = "excel"; filters.add("xls"); filters.add("xlsx"); } else { // MARC/MODS source filters.add("xml"); } dataFiles = FileUtils.filterFiles(dataFiles, filters.toArray(new String[filters.size()])); sources.addAll(dataFiles); dataFiles.clear(); } // Handling pre-processing request Element rdfPreview = null; StringBuilder duplicatRecords = new StringBuilder(); List<String> ids = new ArrayList<String>(); boolean preprocessing = importOption.equalsIgnoreCase("pre-processing"); boolean ingestWithFiles = importOption.equalsIgnoreCase("metadataAndFiles"); if (preprocessing) { Document doc = new DocumentFactory().createDocument(); rdfPreview = TabularRecord.createRdfRoot(doc); } boolean preSuccessful = true; StringBuilder proMessage = new StringBuilder(); if (source != null && (source.equalsIgnoreCase("bib") || source.equalsIgnoreCase("mods") || source.equalsIgnoreCase("excel"))) { // Initiate the logging handler handler = new MetadataImportHandler(damsClient, null); handler.setSubmissionId(submissionId); handler.setSession(session); handler.setUserId(userId); Map<String, String> collections = new HashMap<String, String>(); if (StringUtils.isNotBlank(collectionId)) { String collType = damsClient.getCollectionType(collectionId); collections.put(collectionId, collType); } for (int j = 0; j < sources.size(); j++) { InputStream in = null; String sourceID = null; Object srcRecord = sources.get(j); sourceID = (srcRecord instanceof File ? ((File) srcRecord).getName() : srcRecord.toString()); if (preprocessing) handler.setStatus("Pre-processing record " + sourceID + " ... "); else handler.setStatus("Processing record " + sourceID + " ... "); RecordSource recordSource = null; InputStreamRecord record = null; try { if (source.equalsIgnoreCase("excel")) { clientTool = "Excel"; // Handling Excel Input Stream records recordSource = new ExcelSource((File) srcRecord); // Report for Excel column name validation List<String> invalidColumns = ((ExcelSource) recordSource) .getInvalidColumns(); if (invalidColumns != null && invalidColumns.size() > 0) { successful = false; preSuccessful = false; proMessage.append("Excel source " + sourceID + " - failed - " + CollectionHandler.damsDateFormat.format(new Date()) + ": \n"); if (invalidColumns != null && invalidColumns.size() > 0) { // Report invalid columns proMessage.append("* Found the following invalid column name" + (invalidColumns.size() > 1 ? "s" : "") + ": "); for (int k = 0; k < invalidColumns.size(); k++) { proMessage.append(invalidColumns.get(k)); if (k == invalidColumns.size() - 1) proMessage.append("\n"); else proMessage.append("; "); } } } } else { // Handling AT/Roger records try { if (source.equalsIgnoreCase("bib")) { clientTool = "MARC"; String url = Constants.DAMS_STORAGE_URL.substring(0, Constants.DAMS_STORAGE_URL.indexOf("/dams/")) + "/jollyroger/get?type=bib&mods=true&ns=true&value=" + sourceID; log.info("Getting MARC XML for Roger record " + sourceID + " from URL: " + url); HttpGet req = new HttpGet(url); Document doc = damsClient.getXMLResult(req); modsXml = doc.asXML(); in = new ByteArrayInputStream(modsXml.getBytes("UTF-8")); } else { // METS/MODS XML from staging area clientTool = "AT"; File srcFile = (File) sources.get(j); in = new FileInputStream(srcFile); } File xsl = new File(session.getServletContext() .getRealPath("files/mets2dams.xsl")); recordSource = new XsltSource(xsl, sourceID.replaceAll("\\..*", ""), in); } finally { CollectionHandler.close(in); in = null; } } } catch (Exception e) { e.printStackTrace(); successful = false; preSuccessful = false; String error = e.getMessage() != null ? e.getMessage() : e.getCause() != null ? e.getCause().getMessage() : e.getClass().getName(); handler.setStatus(error); log.error("Error metadata source " + sourceID + ": " + error); proMessage.append(sourceID + " - failed - " + CollectionHandler.damsDateFormat.format(new Date()) + " - " + error); } String id = ""; String info = ""; if (recordSource != null && preSuccessful) { for (Record rec = null; (rec = recordSource.nextRecord()) != null;) { String objTitle = ""; id = rec.recordID(); StringBuilder errorMessage = new StringBuilder(); try { record = new InputStreamRecord(rec, collections, unit, copyrightStatus, copyrightJurisdiction, copyrightOwner, program, access, beginDate, endDate); objTitle = getTitle(record.toRDFXML()); info = "Pre-processing record with ID " + id + " ... "; handler.setStatus(info); log.info(info); if (ids.indexOf(id) < 0) { ids.add(id); } else { duplicatRecords.append(rec + ", "); String error = "Duplicated record with ID " + id; handler.setStatus(error); log.error(info); errorMessage.append("\n* " + error); } // Add master file(s) for the bib/Roger record: a PDF or a TIFF, or a PDF + ZIP List<File> filesToIngest = null; if (source.equalsIgnoreCase("bib") && ingestWithFiles) { filesToIngest = getRogerFiles((String) srcRecord, ingestFiles); // Processing the master file(s) with error report. if (filesToIngest.size() == 0) { errorMessage.append("\n* Roger record " + srcRecord + " has no master file(s) for \"Ingest metadata and files\" option."); } else if (filesToIngest.size() > 2 || (filesToIngest.size() == 2 && !filesToIngest.get(1) .getName().endsWith(".zip"))) { errorMessage .append("\n* Unexpected file(s) for Roger record " + srcRecord + ": "); for (File file : filesToIngest) { errorMessage.append( (filesToIngest.indexOf(file) > 0 ? ", " : "") + file.getName()); } } else { // Handle the use property for the file(s) Map<String, String> fileUseMap = getFileUse(filesToIngest); record.addFiles(0, filesToIngest, fileUseMap); } } else if (source.equalsIgnoreCase("excel")) { // Report for invalid Excel control values validation List<Map<String, String>> invalidValues = ((ExcelSource) recordSource) .getInvalidValues(); if (invalidValues != null && invalidValues.size() > 0) { // process to retrieve control values errors for the record since it will parse the row for the next record StringBuilder cvErrors = new StringBuilder(); for (int k = 0; k < invalidValues.size(); k++) { Map<String, String> m = invalidValues.get(k); if (m.containsKey(TabularRecord.OBJECT_ID) && m.get(TabularRecord.OBJECT_ID) .equals(String.valueOf(id))) { cvErrors.append( "* Row index " + m.get("row") + " ["); // don't count for the row number and the record id m.remove("row"); m.remove(TabularRecord.OBJECT_ID); int l = 0; for (String key : m.keySet()) { if (l++ > 0) cvErrors.append(" | "); cvErrors.append(key + " => " + m.get(key)); } cvErrors.append("]\n"); } } if (cvErrors.length() > 0) { errorMessage.append("Invalid control value(s)" + " - \n" + cvErrors.toString()); } } } } catch (Exception e) { e.printStackTrace(); info = "Error: " + e.getMessage(); handler.setStatus(info); log.warn(info); errorMessage.append("\n* " + e.getMessage()); } objTitle = StringUtils.isEmpty(objTitle) ? "[Object]" : objTitle; if (errorMessage.length() == 0) { info = objTitle + " - " + id + " - " + " successful - " + CollectionHandler.damsDateFormat.format(new Date()); proMessage.append("\n\n" + info); log.info(info); if (preprocessing) { // Pre-processing with rdf preview rdfPreview.add(record.toRDFXML() .selectSingleNode("//dams:Object").detach()); } else { // Write the converted rdf/xml to file system File tmpDir = new File(Constants.TMP_FILE_DIR + File.separatorChar + "converted"); if (!tmpDir.exists()) tmpDir.mkdir(); File convertedFile = new File(tmpDir.getAbsolutePath(), id.replaceAll("[\\//:.*]+", "") + ".rdf.xml"); try { writeXml(convertedFile, record.toRDFXML().asXML()); } finally { convertedFile.deleteOnExit(); dataFiles.add(convertedFile); } } } else { preSuccessful = false; info = objTitle + " - " + id + " - " + " failed - " + CollectionHandler.damsDateFormat.format(new Date()) + " - " + errorMessage.toString(); proMessage.append("\n\n" + info); log.error(info); } handler.setProgressPercentage(j * 100 / sources.size()); } } } // Logging the result for pre-processing if (preprocessing || !preSuccessful) { message = "\nPre-processing " + (preSuccessful ? "successful" : "failed") + ": \n" + (proMessage.length() == 0 ? "" : "\n " + proMessage.toString()); handler.logMessage(message); } handler.release(); handler = null; if (preSuccessful) { // Write the converted RDF/xml for preview if (preprocessing) { File destFile = new File(Constants.TMP_FILE_DIR, "preview-" + submissionId + "-rdf.xml"); writeXml(destFile, rdfPreview.getDocument().asXML()); dataLink = "\nThe converted RDF/XML is ready for <a href=\"" + logLink + "&file=" + destFile.getName() + "\">download</a>.\n"; } else { // Ingest the converted RDF/XML files handler = new RDFDAMS4ImportTsHandler(damsClient, dataFiles.toArray(new File[dataFiles.size()]), importOption); ((RDFDAMS4ImportTsHandler) handler) .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()])); ((RDFDAMS4ImportTsHandler) handler).setReplace(true); } } else { successful = false; } } else { successful = false; message += "\nUnknown source type: " + source; } } else if (i == 7) { session.setAttribute("status", opMessage + "SOLR Index ..."); boolean update = getParameter(paramsMap, "indexReplace") != null; if (getParameter(paramsMap, "solrRecordsDump") != null) { // Handle single records submission List<String> items = new ArrayList<String>(); String txtInput = getParameter(paramsMap, "textInput"); String fileInputValue = getParameter(paramsMap, "data"); if (txtInput != null && (txtInput = txtInput.trim()).length() > 0) { String[] subjects = txtInput.split(","); for (String subject : subjects) { subject = subject.trim(); if (subject.length() > 0) { items.add(subject); } } } // Handle records submitted in file with csv format, in lines or mixed together if (fileInputValue != null && (fileInputValue = fileInputValue.trim()).length() > 0) { // Handle record with line input String[] lines = fileInputValue.split("\n"); for (String line : lines) { // Handle CSV encoding records and records delimited by comma, whitespace etc. if (line != null && (line = line.trim().replace("\"", "")).length() > 0) { String[] tokens = line.split(","); for (String token : tokens) { String[] records = token.split(" "); for (String record : records) { record = record.trim(); if (record.length() > 0) { items.add(record); } } } } } } // Initiate SOLRIndexHandler to index the records handler = new SOLRIndexHandler(damsClient, null, update); handler.setItems(items); handler.setCollectionTitle("SOLR Records"); } else { // Handle solr update for collections if (collectionId.indexOf(",") > 0) { String collIDs = collectionId; String[] collArr = collectionId.split(","); List<String> items = new ArrayList<String>(); String collNames = ""; for (int j = 0; j < collArr.length; j++) { if (collArr[j] != null && (collArr[j] = collArr[j].trim()).length() > 0) { collectionId = collArr[j]; if (collectionId.equalsIgnoreCase("all")) { items.addAll(damsClient.listAllRecords()); collNames += "All Records (" + items.size() + "), "; } else { try { handler = new SOLRIndexHandler(damsClient, collectionId); items.addAll(handler.getItems()); collNames += handler.getCollectionTitle() + "(" + handler.getFilesCount() + "), "; if (j > 0 && j % 5 == 0) collNames += "\n"; } finally { if (handler != null) { handler.release(); handler = null; } } } } } handler = new SOLRIndexHandler(damsClient, null, update); handler.setItems(items); handler.setCollectionTitle(collNames.substring(0, collNames.lastIndexOf(","))); handler.setCollectionId(collIDs); } else { if (collectionId.equalsIgnoreCase("all")) { handler = new SOLRIndexHandler(damsClient, null, update); handler.setItems(damsClient.listAllRecords()); } else handler = new SOLRIndexHandler(damsClient, collectionId, update); } } } /*else if (i == 8){ //session.setAttribute("status", opMessage + "CDL Sending ..."); int operationType = 0; boolean resend = getParameter(paramsMap, "cdlResend") != null; if(resend){ operationType = 1; }else{ resend = getParameter(paramsMap, "cdlResendMets") != null; if(resend) operationType = 2; } //handler = new CdlIngestHandler(tsUtils, collectionId, userId, operationType); String feeder = getParameter(paramsMap, "feeder"); session.setAttribute("status", opMessage + "CDL " + feeder.toUpperCase() + " METS feeding ..."); boolean includeEmbargoed = (getParameter(paramsMap, "includeEmbargoed")!=null); if(feeder.equals("merritt")){ String account = getParameter(paramsMap, "account"); String password = getParameter(paramsMap, "password"); //String accessGroupId = getParameter(paramsMap, "accessGroup"); handler = new CdlIngestHandler(damsClient, collectionId, userId, operationType, feeder, account, password); }else handler = new CdlIngestHandler(damsClient, collectionId, userId, operationType); if(!includeEmbargoed) handler.excludeEmbargoedObjects(); }else if (i == 9){ session.setAttribute("status", opMessage + "Metadata Converting and populating ..."); String tsOperation = getParameter(paramsMap, "sipOption"); if(tsOperation == null || tsOperation.length() == 0) tsOperation = "tsNew"; int operationType = MetadataImportController.getOperationId(tsOperation); String srcFile = (String) session.getAttribute("source"); String srcFormat = (String) session.getAttribute("format"); String pathMap = (String) session.getAttribute("pathMap"); int sheetNo = 0; if(session.getAttribute("sheetNo") != null) sheetNo = ((Integer)session.getAttribute("sheetNo")).intValue(); String rdfFileToWrite = Constants.TMP_FILE_DIR + "tmpRdf_" + session.getId() + ".xml"; if("excel".equalsIgnoreCase(srcFormat)){ handler = new ExcelConverter(damsClient, collectionId, srcFile, sheetNo, pathMap, operationType); ExcelConverter converter = (ExcelConverter)handler; converter.setUseArk(true); converter.setRdfFileToWrite(rdfFileToWrite); }else throw new ServletException("Unsupported data format: " + srcFormat); }*/else if (i == 10) { session.setAttribute("status", opMessage + "Stage Ingesting ..."); String unit = getParameter(paramsMap, "unit"); String arkSetting = getParameter(paramsMap, "arkSetting").trim(); String filePath = getParameter(paramsMap, "filePath").trim(); String fileFilter = getParameter(paramsMap, "fileFilter").trim(); String preferedOrder = getParameter(paramsMap, "preferedOrder"); String fileSuffixes = getParameter(paramsMap, "fileSuffixes"); String fileUse = getParameter(paramsMap, "fileUse"); if (fileSuffixes != null && fileSuffixes.length() > 0) fileSuffixes = fileSuffixes.trim(); String coDelimiter = "p"; if (arkSetting.equals("1")) { if (preferedOrder == null || preferedOrder.equalsIgnoreCase("cofDelimiter")) { coDelimiter = getParameter(paramsMap, "cofDelimiter").trim(); } else if (preferedOrder.equals("suffix")) coDelimiter = getParameter(paramsMap, "coDelimiter").trim(); else coDelimiter = null; } else { if (arkSetting.equals("5")) { coDelimiter = getParameter(paramsMap, "coDelimiter").trim(); } } String[] fileOrderSuffixes = null; if (fileSuffixes != null && fileSuffixes.length() > 0) fileOrderSuffixes = fileSuffixes.split(","); String[] fileUses = null; if (fileUse != null && (fileUse = fileUse.trim()).length() > 0) { fileUses = fileUse.split(","); for (int j = 0; j < fileUses.length; j++) { if (fileUses[j] != null) fileUses[j] = fileUses[j].trim(); } } session.setAttribute("category", collectionId); session.setAttribute("unit", unit); session.setAttribute("arkSetting", arkSetting); session.setAttribute("filePath", filePath); session.setAttribute("fileFilter", fileFilter); session.setAttribute("preferedOrder", preferedOrder); session.setAttribute("fileSuffixes", fileSuffixes); session.setAttribute("fileUse", fileUse); String[] dirArr = filePath.split(";"); List<String> fileList = new ArrayList<String>(); String dir = null; for (int j = 0; j < dirArr.length; j++) { dir = dirArr[j]; if (dir != null && (dir = dir.trim()).length() > 0) { if ((dir.startsWith("/") || dir.startsWith("\\")) && (Constants.DAMS_STAGING.endsWith("/") || Constants.DAMS_STAGING.endsWith("\\"))) dir = dir.substring(1); fileList.add(Constants.DAMS_STAGING + dir); } } handler = new FileIngestionHandler(damsClient, fileList, Integer.parseInt(arkSetting), collectionId, fileFilter, coDelimiter); ((FileIngestionHandler) handler).setFileOrderSuffixes(fileOrderSuffixes); ((FileIngestionHandler) handler).setPreferedOrder(preferedOrder); ((FileIngestionHandler) handler).setUnit(unit); ((FileIngestionHandler) handler).setFileUses(fileUses); } else if (i == 11) { session.setAttribute("status", opMessage + "Serialize records as RDF/XML to filestore ..."); if (collectionId.indexOf(",") > 0) { String collIDs = collectionId; String[] collArr = collectionId.split(","); List<String> items = new ArrayList<String>(); String collNames = ""; for (int j = 0; j < collArr.length; j++) { if (collArr[j] != null && (collArr[j] = collArr[j].trim()).length() > 0) { collectionId = collArr[j]; if (collectionId.equalsIgnoreCase("all")) { items.addAll(damsClient.listAllRecords()); collNames += "All Records (" + items.size() + "), "; } else { try { handler = new SOLRIndexHandler(damsClient, collectionId); items.addAll(handler.getItems()); collNames += handler.getCollectionTitle() + "(" + handler.getFilesCount() + "), "; if (j > 0 && j % 5 == 0) collNames += "\n"; } finally { if (handler != null) { handler.release(); handler = null; } } } } } handler = new FilestoreSerializationHandler(damsClient, null); handler.setItems(items); handler.setCollectionTitle(collNames.substring(0, collNames.lastIndexOf(","))); handler.setCollectionId(collIDs); } else { if (collectionId.equalsIgnoreCase("all")) { handler = new FilestoreSerializationHandler(damsClient, null); handler.setItems(damsClient.listAllRecords()); } else handler = new FilestoreSerializationHandler(damsClient, collectionId); } } else if (i == 15) { session.setAttribute("status", opMessage + "Uploading files from dams-staging to " + damsClient.getFileStore() + " ..."); Map<String, String> filesMap = new TreeMap<String, String>(); for (Iterator<String> it = paramsMap.keySet().iterator(); it.hasNext();) { String key = it.next(); if (key.startsWith("f-")) { String file = paramsMap.get(key)[0]; String fileURI = paramsMap.get(key.replaceFirst("f-", "fid-"))[0]; if (fileURI != null && fileURI.startsWith(Constants.DAMS_ARK_URL_BASE)) filesMap.put(file, fileURI.trim()); else message += "Invalid fileURL for file " + file + " (" + fileURI + "). \n"; } } handler = new FileUploadHandler(damsClient, filesMap); handler.setItems(Arrays.asList(filesMap.keySet().toArray(new String[filesMap.size()]))); } else if (i == 18) { boolean components = getParameter(paramsMap, "exComponents") == null; String exFormat = getParameter(paramsMap, "exportFormat"); String xslSource = getParameter(paramsMap, "xsl"); if (xslSource == null || (xslSource = xslSource.trim()).length() == 0) { xslSource = "/pub/data1/import/apps/glossary/xsl/dams/convertToCSV.xsl"; if (!new File(xslSource).exists()) xslSource = Constants.CLUSTER_HOST_NAME + "glossary/xsl/dams/convertToCSV.xsl"; } session.setAttribute("status", opMessage + (exFormat.equalsIgnoreCase("csv") ? "CSV" : exFormat.equalsIgnoreCase("N-TRIPLE") ? "N-TRIPLE" : "RDF XML ") + " Metadata Export ..."); File outputFile = new File(Constants.TMP_FILE_DIR, "export-" + DAMSClient.stripID(collectionId) + "-" + System.currentTimeMillis() + "-rdf.xml"); String nsInput = getParameter(paramsMap, "nsInput"); List<String> nsInputs = new ArrayList<String>(); boolean componentsIncluded = true; if (nsInput != null && (nsInput = nsInput.trim()).length() > 0) { String[] nsInputArr = nsInput.split(","); for (int j = 0; j < nsInputArr.length; j++) { if (nsInputArr[j] != null && (nsInputArr[j] = nsInputArr[j].trim()).length() > 0) nsInputs.add(nsInputArr[j]); } } fileOut = new FileOutputStream(outputFile); handler = new MetadataExportHandler(damsClient, collectionId, nsInputs, componentsIncluded, exFormat, fileOut); ((MetadataExportHandler) handler).setFileUri(logLink + "&file=" + outputFile.getName()); ((MetadataExportHandler) handler).setComponents(components); } else if (i == 19) { session.setAttribute("status", opMessage + "Jhove report ..."); boolean bytestreamFilesOnly = getParameter(paramsMap, "bsJhoveReport") != null; boolean update = getParameter(paramsMap, "bsJhoveUpdate") != null; handler = new JhoveReportHandler(damsClient, collectionId, bytestreamFilesOnly); if (update) ((JhoveReportHandler) handler) .setJhoveUpdate(getParameter(paramsMap, "jhoveUpdate")); } else throw new ServletException("Unhandle operation index: " + i); if (handler != null) { try { damsClient.setClientInfo(clientTool + (StringUtils.isNotBlank(clientVersion) ? " " + clientVersion : "")); handler.setSubmissionId(submissionId); handler.setDamsClient(damsClient); handler.setSession(session); handler.setUserId(userId); if (handler.getCollectionId() == null && (collectionId != null && collectionId.length() > 0)) handler.setCollectionId(collectionId); successful = handler.execute(); } catch (InterruptedException e) { successful = false; exeInfo += e.getMessage(); e.printStackTrace(); } catch (Exception e) { successful = false; exeInfo += "\n" + e.getMessage(); e.printStackTrace(); } finally { String collectionName = handler.getCollectionId(); if (collectionName != null && collectionName.length() > 0 && logLink.indexOf("&category=") < 0) logLink += "&category=" + collectionName.replace(" ", ""); handler.setExeResult(successful); exeInfo += handler.getExeInfo(); handler.release(); if (fileOut != null) { CollectionHandler.close(fileOut); fileOut = null; } } } } else continue; message += exeInfo; if (!successful) { String errors = "Execution failed:\n" + message + "\n"; returnMessage += errors; break; } else { returnMessage += "\n" + message; } } } catch (Exception e) { e.printStackTrace(); returnMessage += e.getMessage(); } finally { if (damsClient != null) damsClient.close(); if (fileOut != null) { CollectionHandler.close(fileOut); fileOut = null; } } } else returnMessage = message; String logMessage = "For details, please download " + "<a href=\"" + logLink + "\">log</a>" + "."; if (returnMessage.length() > 1000) { returnMessage = returnMessage.substring(0, 1000); int idx = returnMessage.lastIndexOf("\n"); if (idx > 0) returnMessage = returnMessage.substring(0, idx); else { idx = returnMessage.lastIndexOf("</a>"); if (idx < returnMessage.lastIndexOf("<a ")) returnMessage = returnMessage.substring(0, idx); } returnMessage = "\n" + returnMessage + "\n ... "; } returnMessage += "\n" + dataLink + "\n" + logMessage; RequestOrganizer.addResultMessage(session, returnMessage.replace("\n", "<br />") + "<br />"); return returnMessage; }
From source file:edu.vt.middleware.ldap.dsml.AbstractDsml.java
License:Open Source License
/** * This will take an LDAP search result and convert it to a DSML entry * element./*from ww w.ja v a2 s . c o m*/ * * @param entryName <code>QName</code> name of element to create * @param ldapEntry <code>LdapEntry</code> to convert * @param ns <code>Namespace</code> of DSML * * @return <code>Document</code> */ protected Element createDsmlEntry(final QName entryName, final LdapEntry ldapEntry, final Namespace ns) { // create Element to hold result content final Element entryElement = DocumentHelper.createElement(entryName); if (ldapEntry != null) { final String dn = ldapEntry.getDn(); if (dn != null) { entryElement.addAttribute("dn", dn); } for (Element e : createDsmlAttributes(ldapEntry.getLdapAttributes(), ns)) { entryElement.add(e); } } return entryElement; }
From source file:edu.vt.middleware.ldap.dsml.Dsmlv1.java
License:Open Source License
/** * This will take the results of a prior LDAP query and convert it to a DSML * <code>Document</code>.//w w w .ja v a 2 s.co m * * @param result <code>LdapResult</code> * * @return <code>Document</code> */ public Document createDsml(final LdapResult result) { final Namespace ns = new Namespace("dsml", "http://www.dsml.org/DSML"); final Document doc = DocumentHelper.createDocument(); final Element dsmlElement = doc.addElement(new QName("dsml", ns)); final Element entriesElement = dsmlElement.addElement(new QName("directory-entries", ns)); // build document object from result if (result != null) { for (LdapEntry le : result.getEntries()) { final Element entryElement = this.createDsmlEntry(new QName("entry", ns), le, ns); entriesElement.add(entryElement); } } return doc; }
From source file:edu.vt.middleware.ldap.dsml.Dsmlv2.java
License:Open Source License
/** * This will take the results of a prior LDAP query and convert it to a DSML * <code>Document</code>./* www .jav a2 s.c om*/ * * @param result <code>LdapResult</code> * * @return <code>Document</code> */ public Document createDsml(final LdapResult result) { final Namespace ns = new Namespace("", "urn:oasis:names:tc:DSML:2:0:core"); final Document doc = DocumentHelper.createDocument(); final Element dsmlElement = doc.addElement(new QName("batchResponse", ns)); final Element entriesElement = dsmlElement.addElement(new QName("searchResponse", ns)); // build document object from results if (result != null) { for (LdapEntry le : result.getEntries()) { final Element entryElement = this.createDsmlEntry(new QName("searchResultEntry", ns), le, ns); entriesElement.add(entryElement); } } final Element doneElement = entriesElement.addElement(new QName("searchResultDone", ns)); final Element codeElement = doneElement.addElement(new QName("resultCode", ns)); codeElement.addAttribute("code", "0"); return doc; }
From source file:eu.planets_project.pp.plato.services.characterisation.xcl.Comparator.java
License:Open Source License
/** * Generates a PCR for all <param>mappedProperties</param> and their metrics. * Note: All possible metrics are included. * /*ww w .jav a2s . c o m*/ * @param mappedProperties * @return */ private String generateConfig(Set<XCLObjectProperty> mappedProperties) { Document doc = DocumentHelper.createDocument(); Element root = doc.addElement("pcRequest"); Namespace xsi = new Namespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"); root.add(xsi); root.addAttribute(xsi.getPrefix() + ":schemaLocation", "http://www.planets-project.eu/xcl/schemas/xcl data/pp5/schemas/pcr/pcr.xsd"); Namespace xcl = new Namespace("", "http://www.planets-project.eu/xcl/schemas/xcl"); root.add(xcl); Element compSet = root.addElement("compSet", xcl.getURI()); compSet.addElement("source").addAttribute("name", "samplerecord"); compSet.addElement("target").addAttribute("name", "experimentresult"); /* Element root = doc.addElement("plans"); Namespace xsi = new Namespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"); root.add(xsi); root.addAttribute(xsi.getPrefix()+":noNamespaceSchemaLocation", "http://www.ifs.tuwien.ac.at/dp/plato/schemas/plato-1.9.xsd"); */ for (XCLObjectProperty objectProperty : mappedProperties) { Element prop = compSet.addElement("property").addAttribute("id", objectProperty.getPropertyId()) .addAttribute("name", objectProperty.getName()); for (Metric metric : objectProperty.getMetrics()) { prop.addElement("metric").addAttribute("id", metric.getMetricId()).addAttribute("name", metric.getName()); } } return doc.asXML(); }
From source file:eu.planets_project.pp.plato.xml.LibraryExport.java
License:Open Source License
public Document exportToDocument(LibraryTree lib) { Document doc = DocumentHelper.createDocument(); Element root = doc.addElement("library"); root.add(xsi); root.add(platoLibNS);/* w w w . j av a2 s . co m*/ // root.addAttribute(xsi.getPrefix()+":schemaLocation", "http://www.planets-project.eu/plato plato-2.1.xsd"); // root.addAttribute(xsi.getPrefix()+":noNamespaceSchemaLocation", "http://www.ifs.tuwien.ac.at/dp/plato/schemas/plato-2.1.xsd"); root.addAttribute("name", lib.getName()); Element rootReq = root.addElement(new QName("requirement", platoLibNS)); addRequirementProperties(rootReq, lib.getRoot()); return doc; }
From source file:eu.planets_project.pp.plato.xml.ProjectExporter.java
License:Open Source License
public Document createProjectDoc() { Document doc = DocumentHelper.createDocument(); Element root = doc.addElement("plans"); root.add(xsi); root.add(platoNS);/* ww w. j a va 2s .c om*/ root.addAttribute(xsi.getPrefix() + ":schemaLocation", "http://www.planets-project.eu/plato plato-3.0.xsd"); root.add(excutablePlanNS); root.add(new Namespace("fits", "http://hul.harvard.edu/ois/xml/ns/fits/fits_output")); // set version of corresponding schema root.addAttribute("version", "3.0.0"); return doc; }
From source file:eu.planets_project.pp.plato.xml.ProjectExporter.java
License:Open Source License
public Document createTemplateDoc() { Document doc = DocumentHelper.createDocument(); Element root = doc.addElement("templates"); root.add(xsi); return doc;//from w w w .j av a 2s. com }
From source file:eu.planets_project.pp.plato.xml.ProjectExporter.java
License:Open Source License
/** * Adds the XML-representation of the given project to the parent <code>projectNode</code> * * @param p//from w ww . j a v a2s .co m * @param projectNode */ public void addProject(Plan p, Document projectsDoc, List<Integer> uploadIDs, List<Integer> recordIDs) { Element projectNode = projectsDoc.getRootElement().addElement(new QName("plan", platoNS)); // Base64 encoder for binary data BASE64Encoder encoder = new BASE64Encoder(); addChangeLog(p.getChangeLog(), projectNode); // Plan state projectNode.addElement("state").addAttribute("value", Integer.toString(p.getState().getValue())); Element properties = projectNode.addElement("properties"); addUpload(p.getPlanProperties().getReportUpload(), properties, "report", encoder, uploadIDs); // Plan properties properties.addAttribute("author", p.getPlanProperties().getAuthor()) .addAttribute("organization", p.getPlanProperties().getOrganization()) .addAttribute("name", p.getPlanProperties().getName()) .addAttribute("privateProject", Boolean.toString(p.getPlanProperties().isPrivateProject())) .addAttribute("reportPublic", Boolean.toString(p.getPlanProperties().isReportPublic())); addStringElement(properties, "description", p.getPlanProperties().getDescription()); addStringElement(properties, "owner", p.getPlanProperties().getOwner()); addChangeLog(p.getPlanProperties().getChangeLog(), properties); // Plan Basis Element basis = projectNode.addElement("basis"); basis.addAttribute("identificationCode", p.getProjectBasis().getIdentificationCode()); addStringElement(basis, "documentTypes", p.getProjectBasis().getDocumentTypes()); addStringElement(basis, "applyingPolicies", p.getProjectBasis().getApplyingPolicies()); addStringElement(basis, "designatedCommunity", p.getProjectBasis().getDesignatedCommunity()); addStringElement(basis, "mandate", p.getProjectBasis().getMandate()); addStringElement(basis, "organisationalProcedures", p.getProjectBasis().getOrganisationalProcedures()); addStringElement(basis, "planningPurpose", p.getProjectBasis().getPlanningPurpose()); addStringElement(basis, "planRelations", p.getProjectBasis().getPlanRelations()); addStringElement(basis, "preservationRights", p.getProjectBasis().getPreservationRights()); addStringElement(basis, "referenceToAgreements", p.getProjectBasis().getReferenceToAgreements()); Element triggers = basis.addElement("triggers"); if (p.getProjectBasis().getTriggers() != null) { addTrigger(triggers, p.getProjectBasis().getTriggers().getNewCollection()); addTrigger(triggers, p.getProjectBasis().getTriggers().getPeriodicReview()); addTrigger(triggers, p.getProjectBasis().getTriggers().getChangedEnvironment()); addTrigger(triggers, p.getProjectBasis().getTriggers().getChangedObjective()); addTrigger(triggers, p.getProjectBasis().getTriggers().getChangedCollectionProfile()); } Element policyTree = basis.addElement("policyTree"); addSubPolicyTree(p.getProjectBasis().getPolicyTree().getRoot(), policyTree); addChangeLog(p.getProjectBasis().getChangeLog(), basis); // Sample Records Element samplerecords = projectNode.addElement("sampleRecords"); addStringElement(samplerecords, "samplesDescription", p.getSampleRecordsDefinition().getSamplesDescription()); Element collectionProfile = samplerecords.addElement("collectionProfile"); if (p.getSampleRecordsDefinition().getCollectionProfile() != null) { addStringElement(collectionProfile, "collectionID", p.getSampleRecordsDefinition().getCollectionProfile().getCollectionID()); addStringElement(collectionProfile, "description", p.getSampleRecordsDefinition().getCollectionProfile().getDescription()); addStringElement(collectionProfile, "expectedGrowthRate", p.getSampleRecordsDefinition().getCollectionProfile().getExpectedGrowthRate()); addStringElement(collectionProfile, "numberOfObjects", p.getSampleRecordsDefinition().getCollectionProfile().getNumberOfObjects()); addStringElement(collectionProfile, "typeOfObjects", p.getSampleRecordsDefinition().getCollectionProfile().getTypeOfObjects()); addStringElement(collectionProfile, "retentionPeriod", p.getSampleRecordsDefinition().getCollectionProfile().getRetentionPeriod()); for (SampleObject rec : p.getSampleRecordsDefinition().getRecords()) { Element sampleRecord = samplerecords.addElement("record") .addAttribute("shortName", rec.getShortName()).addAttribute("fullname", rec.getFullname()) .addAttribute("contentType", rec.getContentType()); Element data = sampleRecord.addElement("data"); if (rec.isDataExistent()) { data.addAttribute("hasData", "true"); data.addAttribute("encoding", "base64"); if (recordIDs != null) { data.setText("" + rec.getId()); recordIDs.add(rec.getId()); } else { data.setText(encoder.encode(rec.getData().getData())); } addUpload(rec.getXcdlDescription(), sampleRecord, "xcdlDescription", encoder, uploadIDs); addJhoveString(rec, encoder, sampleRecord); addFitsInfo(rec, encoder, sampleRecord); } else { data.addAttribute("hasData", "false"); } Element formatInfo = sampleRecord.addElement("formatInfo") .addAttribute("puid", rec.getFormatInfo().getPuid()) .addAttribute("name", rec.getFormatInfo().getName()) .addAttribute("version", rec.getFormatInfo().getVersion()) .addAttribute("mimeType", rec.getFormatInfo().getMimeType()) .addAttribute("defaultExtension", rec.getFormatInfo().getDefaultExtension()); addChangeLog(rec.getFormatInfo().getChangeLog(), formatInfo); addChangeLog(rec.getChangeLog(), sampleRecord); addStringElement(sampleRecord, "description", rec.getDescription()); addStringElement(sampleRecord, "originalTechnicalEnvironment", rec.getOriginalTechnicalEnvironment()); } addChangeLog(p.getSampleRecordsDefinition().getChangeLog(), samplerecords); // Requirementsdefinition Element rdef = projectNode.addElement("requirementsDefinition"); addStringElement(rdef, "description", p.getRequirementsDefinition().getDescription()); Element uploads = rdef.addElement("uploads"); for (DigitalObject upload : p.getRequirementsDefinition().getUploads()) { addUpload(upload, uploads, "upload", encoder, uploadIDs); } addChangeLog(p.getRequirementsDefinition().getChangeLog(), rdef); // Alternatives Element alternatives = projectNode.addElement("alternatives"); addStringElement(alternatives, "description", p.getAlternativesDefinition().getDescription()); for (Alternative a : p.getAlternativesDefinition().getAlternatives()) { /* * Export all alternatives (also discarded ones) * Indices of the result-set reference only the considered alternatives! */ Element alt = alternatives.addElement("alternative") .addAttribute("discarded", Boolean.toString(a.isDiscarded())) .addAttribute("name", a.getName()); addStringElement(alt, "description", a.getDescription()); if (a.getAction() != null) { Element action = alt.addElement("action"); action.addAttribute("shortname", a.getAction().getShortname()) .addAttribute("url", a.getAction().getUrl()) .addAttribute("actionIdentifier", a.getAction().getActionIdentifier()) .addAttribute("info", a.getAction().getInfo()) .addAttribute("targetFormat", a.getAction().getTargetFormat()) .addAttribute("executable", String.valueOf(a.getAction().isExecutable())); addStringElement(action, "descriptor", a.getAction().getDescriptor()); addStringElement(action, "parameterInfo", a.getAction().getParameterInfo()); Element params = action.addElement("params"); if (a.getAction().getParams() != null) { for (Parameter param : a.getAction().getParams()) { params.addElement("param").addAttribute("name", param.getName()).addAttribute("value", param.getValue()); } } addChangeLog(a.getAction().getChangeLog(), action); } Element resourceDescr = alt.addElement("resourceDescription"); addStringElement(resourceDescr, "necessaryResources", a.getResourceDescription().getNecessaryResources()); addStringElement(resourceDescr, "configSettings", a.getResourceDescription().getConfigSettings()); addStringElement(resourceDescr, "reasonForConsidering", a.getResourceDescription().getReasonForConsidering()); addChangeLog(a.getResourceDescription().getChangeLog(), resourceDescr); Element experiment = alt.addElement("experiment"); Experiment exp = a.getExperiment(); addStringElement(experiment, "description", exp.getDescription()); //addStringElement(experiment, "runDescription", exp.getRunDescription()); addStringElement(experiment, "settings", exp.getSettings()); uploads = experiment.addElement("results"); for (SampleObject record : exp.getResults().keySet()) { DigitalObject up = exp.getResults().get(record); if (up != null) { // only existing uploads are exported Element upload = addUpload(up, uploads, "result", encoder, uploadIDs); if (upload != null) { upload.addAttribute("key", record.getShortName()); } } } // // */experiment/xcdlDescriptions/xcdlDescription // Element xcdls = experiment.addElement("xcdlDescriptions"); // for (SampleObject record : exp.getResults().keySet()) { // DigitalObject result = exp.getResults().get(record); // if (result != null) { // XcdlDescription x = result.getXcdlDescription(); // if (x != null) { // // only existing xcdls are exported // Element upload = addUpload(x, xcdls, "xcdlDescription", encoder, uploadIDs); // if (upload != null) { // upload.addAttribute("key", record.getShortName()); // } // } // } // } // export detailed experiment info's Element detailedInfos = experiment.addElement("detailedInfos"); for (SampleObject record : exp.getDetailedInfo().keySet()) { DetailedExperimentInfo dinfo = exp.getDetailedInfo().get(record); Element detailedInfo = detailedInfos.addElement("detailedInfo") .addAttribute("key", record.getShortName()) .addAttribute("successful", "" + dinfo.getSuccessful()); addStringElement(detailedInfo, "programOutput", dinfo.getProgramOutput()); addStringElement(detailedInfo, "cpr", dinfo.getCpr()); Element measurements = detailedInfo.addElement("measurements"); for (Measurement m : dinfo.getMeasurements().values()) { Element measurement = measurements.addElement("measurement"); // measurement.value: String typename = deriveElementname(m.getValue().getClass()); Element valueElem = measurement.addElement(typename); //.addAttribute("value", m.getValue().toString()); addStringElement(valueElem, "value", m.getValue().toString()); addChangeLog(m.getValue().getChangeLog(), valueElem); // measurement.property: Element property = measurement.addElement("property").addAttribute("name", m.getProperty().getName()); addScale(m.getProperty().getScale(), property); } } addChangeLog(a.getExperiment().getChangeLog(), experiment); addChangeLog(a.getChangeLog(), alt); } addChangeLog(p.getAlternativesDefinition().getChangeLog(), alternatives); // go-nogo - is created in the go-nogo step and need not exist if (p.getDecision() != null) { Element decision = projectNode.addElement("decision"); addStringElement(decision, "reason", p.getDecision().getReason()); addStringElement(decision, "actionNeeded", p.getDecision().getActionNeeded()); decision.addElement("goDecision").addAttribute("value", p.getDecision().getDecision().name()); addChangeLog(p.getDecision().getChangeLog(), decision); } // Evaluation Element evaluation = projectNode.addElement("evaluation"); addStringElement(evaluation, "comment", p.getEvaluation().getComment()); addChangeLog(p.getEvaluation().getChangeLog(), evaluation); // importance weighting Element importanceWeighting = projectNode.addElement("importanceWeighting"); addStringElement(importanceWeighting, "comment", p.getImportanceWeighting().getComment()); addChangeLog(p.getImportanceWeighting().getChangeLog(), importanceWeighting); // Recommendation Element recommendation = projectNode.addElement("recommendation"); if (p.getRecommendation().getAlternative() != null) { recommendation.addAttribute("alternativeName", p.getRecommendation().getAlternative().getName()); } addStringElement(recommendation, "reasoning", p.getRecommendation().getReasoning()); addStringElement(recommendation, "effects", p.getRecommendation().getEffects()); addChangeLog(p.getRecommendation().getChangeLog(), recommendation); // transformation Element trafo = projectNode.addElement("transformation"); addStringElement(trafo, "comment", p.getTransformation().getComment()); addChangeLog(p.getTransformation().getChangeLog(), trafo); // Objectivetree (including weights, evaluation values and transformers) Element tree = projectNode.addElement("tree"); tree.addAttribute("weightsInitialized", "" + p.getTree().isWeightsInitialized()); if (p.getTree().getRoot() != null) addSubTree(p.getTree().getRoot(), tree); } Element executablePlan = projectNode.addElement("executablePlan"); try { if (p.getExecutablePlanDefinition().getExecutablePlan() != null) { Document execPlan = DocumentHelper.parseText(p.getExecutablePlanDefinition().getExecutablePlan()); Element execPlanRoot = execPlan.getRootElement(); if (execPlanRoot.hasContent()) { Element planWorkflow = executablePlan.addElement("planWorkflow"); planWorkflow.add(execPlanRoot); } } if (p.getExecutablePlanDefinition().getEprintsExecutablePlan() != null) { Document execPlan = DocumentHelper .parseText(p.getExecutablePlanDefinition().getEprintsExecutablePlan()); Element execPlanRoot = execPlan.getRootElement(); if (execPlanRoot.hasContent()) { //Element planWorkflow = executablePlan.addElement("eprintsPlan"); executablePlan.add(execPlanRoot); } } } catch (DocumentException e) { // if the stored exec. plan is invalid for some reason, we leave the plan out. // TODO: HK this should no happen as we write the xml ourselves, but still, // we need a mechanism here to prevent the export if the xml is invalid. PlatoLogger.getLogger(this.getClass()).error(e.getMessage(), e); } // TODO HK how does this here relate to the upper try-catch block and the exception?? // Smells like a hack! ExecutablePlanDefinition plan = p.getExecutablePlanDefinition(); addStringElement(executablePlan, "objectPath", plan.getObjectPath()); addStringElement(executablePlan, "toolParameters", plan.getToolParameters()); addStringElement(executablePlan, "triggersConditions", plan.getTriggersConditions()); addStringElement(executablePlan, "validateQA", plan.getValidateQA()); addChangeLog(plan.getChangeLog(), executablePlan); Element planDef = projectNode.addElement("planDefinition"); PlanDefinition pdef = p.getPlanDefinition(); planDef.addAttribute("currency", pdef.getCurrency()); addStringElement(planDef, "costsIG", pdef.getCostsIG()); addStringElement(planDef, "costsPA", pdef.getCostsPA()); addStringElement(planDef, "costsPE", pdef.getCostsPE()); addStringElement(planDef, "costsQA", pdef.getCostsQA()); addStringElement(planDef, "costsREI", pdef.getCostsREI()); addStringElement(planDef, "costsRemarks", pdef.getCostsRemarks()); addStringElement(planDef, "costsRM", pdef.getCostsRM()); addStringElement(planDef, "costsTCO", pdef.getCostsTCO()); addStringElement(planDef, "responsibleExecution", pdef.getResponsibleExecution()); addStringElement(planDef, "responsibleMonitoring", pdef.getResponsibleMonitoring()); triggers = planDef.addElement("triggers"); if (pdef.getTriggers() != null) { addTrigger(triggers, pdef.getTriggers().getNewCollection()); addTrigger(triggers, pdef.getTriggers().getPeriodicReview()); addTrigger(triggers, pdef.getTriggers().getChangedEnvironment()); addTrigger(triggers, pdef.getTriggers().getChangedObjective()); addTrigger(triggers, pdef.getTriggers().getChangedCollectionProfile()); } addChangeLog(pdef.getChangeLog(), planDef); }