Example usage for org.dom4j DocumentFactory DocumentFactory

List of usage examples for org.dom4j DocumentFactory DocumentFactory

Introduction

In this page you can find the example usage for org.dom4j DocumentFactory DocumentFactory.

Prototype

public DocumentFactory() 

Source Link

Usage

From source file:com.ztesoft.inf.extend.xstream.io.xml.Dom4JWriter.java

License:Open Source License

/**
 * @since 1.2.1
 */
public Dom4JWriter() {
    this(new DocumentFactory(), new XmlFriendlyReplacer());
}

From source file:edu.ucsd.library.dams.api.DAMSAPIServlet.java

private Element createRdfRootElement() {
    // setup document rdf root element
    Document doc = new DocumentFactory().createDocument();
    Element rdf = addElement(doc, "RDF", new Namespace("rdf", rdfNS));
    doc.setRootElement(rdf);//from  w w w . j av a2s  .c o  m
    rdf.add(new Namespace("mads", madsNS));
    rdf.add(new Namespace("rdf", rdfNS));
    rdf.add(new Namespace("dams", prNS));
    return rdf;
}

From source file:edu.ucsd.library.xdre.web.CollectionOperationController.java

public static String handleProcesses(Map<String, String[]> paramsMap, HttpSession session) throws Exception {

    String message = "";
    String returnMessage = "";
    DAMSClient damsClient = null;/*from  ww  w .  j  av a 2s  . c  o m*/
    String collectionId = getParameter(paramsMap, "category");

    boolean[] operations = new boolean[20];
    operations[0] = getParameter(paramsMap, "validateFileCount") != null;
    operations[1] = getParameter(paramsMap, "validateChecksums") != null;
    operations[2] = getParameter(paramsMap, "rdfImport") != null;
    operations[3] = getParameter(paramsMap, "createDerivatives") != null;
    operations[4] = getParameter(paramsMap, "collectionRelease") != null;
    operations[5] = getParameter(paramsMap, "externalImport") != null;
    operations[6] = getParameter(paramsMap, "marcModsImport") != null
            || getParameter(paramsMap, "excelImport") != null;
    operations[7] = getParameter(paramsMap, "luceneIndex") != null
            || getParameter(paramsMap, "solrDump") != null
            || getParameter(paramsMap, "solrRecordsDump") != null;
    operations[8] = getParameter(paramsMap, "sendToCDL") != null;
    operations[9] = getParameter(paramsMap, "dataConvert") != null;
    operations[10] = getParameter(paramsMap, "ingest") != null;
    operations[11] = getParameter(paramsMap, "serialize") != null;
    operations[12] = getParameter(paramsMap, "tsSyn") != null;
    operations[13] = getParameter(paramsMap, "createJson") != null;
    operations[14] = getParameter(paramsMap, "cacheJson") != null;
    operations[15] = getParameter(paramsMap, "fileUpload") != null;
    operations[16] = getParameter(paramsMap, "jsonDiffUpdate") != null;
    operations[17] = getParameter(paramsMap, "validateManifest") != null;
    operations[18] = getParameter(paramsMap, "metadataExport") != null;
    operations[19] = getParameter(paramsMap, "jhoveReport") != null;

    int submissionId = (int) System.currentTimeMillis();
    String logLink = "https://"
            + (Constants.CLUSTER_HOST_NAME.indexOf("localhost") >= 0 ? "localhost:8443"
                    : Constants.CLUSTER_HOST_NAME.indexOf("lib-ingest") >= 0
                            ? Constants.CLUSTER_HOST_NAME + ".ucsd.edu:8443"
                            : Constants.CLUSTER_HOST_NAME + ".ucsd.edu")
            + "/damsmanager/downloadLog.do?submissionId=" + submissionId;
    String dataLink = "";

    String ds = getParameter(paramsMap, "ts");
    String dsDest = null;
    if ((ds == null || (ds = ds.trim()).length() == 0) && !(operations[15] || operations[16]))
        ds = Constants.DEFAULT_TRIPLESTORE;
    else if (operations[12]) {
        dsDest = getParameter(paramsMap, "dsDest");
        if (dsDest == null)
            throw new ServletException("No destination triplestore data source provided...");
        else if (ds.equals(dsDest) || !dsDest.startsWith("ts/"))
            throw new ServletException("Can't sync triplestore from " + ds + " to destination " + dsDest + ".");
    }

    String fileStore = getParameter(paramsMap, "fs");
    damsClient = new DAMSClient(Constants.DAMS_STORAGE_URL);
    damsClient.setTripleStore(ds);
    damsClient.setFileStore(fileStore);
    damsClient.setUser((String) session.getAttribute("user"));

    String clientVersion = session.getServletContext().getInitParameter("src-version");
    String clientTool = "Custom";

    if (message.length() == 0) {
        int userId = -1;
        String userIdAttr = (String) session.getAttribute("employeeId");
        if (userIdAttr != null && userIdAttr.length() > 0) {
            try {
                userId = Integer.parseInt(userIdAttr);
            } catch (NumberFormatException e) {
                userId = -1;
            }
        }

        CollectionHandler handler = null;
        OutputStream fileOut = null;

        try {

            boolean successful = true;
            for (int i = 0; i < operations.length; i++) {
                handler = null;
                String exeInfo = "";

                if (operations[i]) {
                    String opMessage = "Preparing procedure ";
                    RequestOrganizer.setProgressPercentage(session, 0);
                    message = "";

                    if (i == 0) {
                        session.setAttribute("status",
                                opMessage + "File Count Validation for FileStore " + fileStore + " ...");
                        boolean ingestFile = getParameter(paramsMap, "ingestFile") != null;
                        boolean dams4FileRename = getParameter(paramsMap, "dams4FileRename") != null;
                        handler = new FileCountValidaionHandler(damsClient, collectionId);
                        ((FileCountValidaionHandler) handler).setDams4FileRename(dams4FileRename);
                        if (ingestFile) {
                            String[] filesPaths = getParameter(paramsMap, "filesLocation").split(";");
                            List<String> ingestFiles = new ArrayList<String>();
                            for (int j = 0; j < filesPaths.length; j++)
                                ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j])
                                        .getAbsolutePath());
                            ((FileCountValidaionHandler) handler).setIngestFile(ingestFile);
                            ((FileCountValidaionHandler) handler)
                                    .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()]));
                        }
                    } else if (i == 1) {
                        session.setAttribute("status",
                                opMessage + "Checksum Validation for FileStore " + fileStore + " ...");
                        handler = new ChecksumsHandler(damsClient, collectionId, null);
                    } else if (i == 2) {
                        session.setAttribute("status", opMessage + "Importing metadata ...");
                        String dataFormat = getParameter(paramsMap, "dataFormat");
                        String importMode = getParameter(paramsMap, "importMode");
                        handler = new MetadataImportHandler(damsClient, collectionId,
                                getParameter(paramsMap, "data"), dataFormat, importMode);
                    } else if (i == 3) {
                        session.setAttribute("status", opMessage + "Derivatives Creation ...");
                        boolean derReplace = getParameter(paramsMap, "derReplace") == null ? false : true;

                        String reqSize = getParameter(paramsMap, "size");
                        String[] sizes = null;
                        if (reqSize != null && reqSize.length() > 0)
                            sizes = reqSize.split(",");
                        handler = new DerivativeHandler(damsClient, collectionId, sizes, derReplace);

                    } else if (i == 4) {
                        session.setAttribute("status",
                                opMessage + " release collection " + collectionId + " ...");
                        String releaseState = getParameter(paramsMap, "releaseState");
                        String releaseOption = getParameter(paramsMap, "releaseOption");
                        String collectionToMerge = getParameter(paramsMap, "collectionToMerge");

                        log.info("Collection release:  category =>" + collectionId + ", releaseState => "
                                + releaseState + ", releaseOption => " + releaseOption
                                + ", collectionToMerge => " + collectionToMerge);

                        handler = new CollectionReleaseHandler(damsClient, collectionId, releaseState,
                                releaseOption);
                        ((CollectionReleaseHandler) handler).setCollectionToMerge(collectionToMerge);
                    } else if (i == 5) {
                        session.setAttribute("status", opMessage + "Importing objects ...");
                        String[] dataPaths = getParameter(paramsMap, "dataPath").split(";");
                        String[] filesPaths = getParameter(paramsMap, "filesPath").split(";");
                        String importOption = getParameter(paramsMap, "importOption");
                        boolean replace = getParameter(paramsMap, "externalImportReplace") != null;
                        List<File> dFiles = new ArrayList<File>();
                        for (int j = 0; j < dataPaths.length; j++) {
                            String dataPath = dataPaths[j];
                            if (dataPath != null && (dataPath = dataPath.trim()).length() > 0) {
                                File file = new File(Constants.DAMS_STAGING + "/" + dataPath);
                                CollectionHandler.listFiles(dFiles, file);
                            }
                        }

                        List<String> ingestFiles = new ArrayList<String>();
                        for (int j = 0; j < filesPaths.length; j++) {
                            if ((filesPaths[j] = filesPaths[j].trim()).length() > 0)
                                ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j])
                                        .getAbsolutePath());
                        }

                        String[] excelExts = { "xls", "xlsx" };
                        List<File> excelFiles = FileUtils.filterFiles(dFiles, excelExts);

                        if (excelFiles.size() > 0) {
                            // Remove the Excel source that need conversion from the file list
                            dFiles.removeAll(excelFiles);

                            // Pre-processing
                            boolean preprocessing = importOption.equalsIgnoreCase("pre-processing");
                            Element rdfPreview = null;
                            StringBuilder errorMessage = new StringBuilder();
                            StringBuilder duplicatRecords = new StringBuilder();
                            List<String> ids = new ArrayList<String>();
                            if (preprocessing) {
                                Document doc = new DocumentFactory().createDocument();
                                rdfPreview = TabularRecord.createRdfRoot(doc);
                            }
                            handler = new MetadataImportHandler(damsClient, null);
                            handler.setSubmissionId(submissionId);
                            handler.setSession(session);
                            handler.setUserId(userId);

                            // Directory to hold the converted rdf/xml
                            File tmpDir = new File(Constants.TMP_FILE_DIR + File.separatorChar + "converted");
                            if (!tmpDir.exists())
                                tmpDir.mkdir();

                            // Convert Excel source files to DAMS4 rdf/xml
                            int filesCount = 0;
                            for (File f : excelFiles) {
                                filesCount++;
                                RecordSource src = new ExcelSource(f);

                                for (Record rec = null; (rec = src.nextRecord()) != null;) {
                                    String id = rec.recordID();
                                    handler.logMessage("Pre-processing record with ID " + id + " ... ");

                                    if (ids.indexOf(id) < 0) {
                                        ids.add(id);
                                    } else {
                                        duplicatRecords.append(id + ", ");
                                        handler.logError("Found duplicated record with ID " + id + ".");
                                    }

                                    try {

                                        Document doc = rec.toRDFXML();
                                        if (duplicatRecords.length() == 0 && errorMessage.length() == 0) {
                                            if (preprocessing) {
                                                // preview when there are no error reported
                                                rdfPreview.add(rec.toRDFXML().selectSingleNode("//dams:Object")
                                                        .detach());
                                            } else {
                                                File convertedFile = new File(tmpDir.getAbsolutePath(),
                                                        id.replaceAll("[\\//:.*]+", "") + ".rdf.xml");
                                                try {
                                                    writeXml(convertedFile, doc.asXML());
                                                } finally {
                                                    convertedFile.deleteOnExit();
                                                    if (dFiles.indexOf(convertedFile) < 0) {
                                                        dFiles.add(convertedFile);
                                                        handler.logMessage("Added converted RDF/XML file "
                                                                + convertedFile.getAbsolutePath());
                                                    }
                                                }
                                            }
                                        }
                                    } catch (Exception e) {
                                        log.warn("Excel Input Stream error", e);
                                        errorMessage.append("-" + e.getMessage() + "\n");
                                        handler.logMessage(e.getMessage() + "\n");
                                    }
                                }
                                handler.setProgressPercentage(filesCount * 100 / excelFiles.size());
                            }

                            if (errorMessage.length() == 0 && duplicatRecords.length() == 0) {

                                if (preprocessing) {
                                    File destFile = new File(Constants.TMP_FILE_DIR,
                                            "preview-" + submissionId + "-rdf.xml");
                                    writeXml(destFile, rdfPreview.getDocument().asXML());

                                    successful = true;
                                    message = "\nPre-processing passed. ";
                                    message += "\nThe converted RDF/XML is ready for <a href=\"" + logLink
                                            + "&file=" + destFile.getName() + "\">download</a>.";
                                    //handler.logMessage(message);
                                    handler.release();
                                    handler = null;
                                } else {
                                    handler.release();
                                    // Initiate the ingest task for Excel AND/OR RDF/XML files
                                    handler = new RDFDAMS4ImportTsHandler(damsClient,
                                            dFiles.toArray(new File[dFiles.size()]), importOption);
                                    ((RDFDAMS4ImportTsHandler) handler)
                                            .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()]));
                                    ((RDFDAMS4ImportTsHandler) handler).setReplace(replace);
                                }
                            } else {
                                successful = false;
                                message = "\nPre-processing issues found:";
                                if (duplicatRecords.length() > 0)
                                    message += "\nDuplicated records: " + duplicatRecords
                                            .substring(0, duplicatRecords.length() - 2).toString();
                                if (errorMessage.length() > 0)
                                    message += "\nOther Errors: \n" + errorMessage.toString();
                                //handler.logMessage(message);
                                handler.release();
                                handler = null;
                            }
                        } else {
                            // Ingest for RDF/XML files
                            handler = new RDFDAMS4ImportTsHandler(damsClient,
                                    dFiles.toArray(new File[dFiles.size()]), importOption);
                            ((RDFDAMS4ImportTsHandler) handler)
                                    .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()]));
                            ((RDFDAMS4ImportTsHandler) handler).setReplace(replace);
                        }
                    } else if (i == 6) {
                        session.setAttribute("status",
                                opMessage + "Importing from Standard Input Stream source ...");
                        log.info(opMessage + "Importing from Standard Input Stream source ...");

                        String unit = getParameter(paramsMap, "unit");
                        String source = getParameter(paramsMap, "source");
                        String bibNumber = getParameter(paramsMap, "bibInput");
                        String modsXml = getParameter(paramsMap, "modsInput");
                        String copyrightStatus = getParameter(paramsMap, "copyrightStatus");
                        String copyrightJurisdiction = getParameter(paramsMap, "countryCode");
                        String copyrightOwner = getParameter(paramsMap, "copyrightOwner");
                        String program = getParameter(paramsMap, "program");
                        String access = getParameter(paramsMap, "accessOverride");
                        String beginDate = getParameter(paramsMap, "licenseBeginDate");
                        String endDate = getParameter(paramsMap, "licenseEndDate");
                        String[] dataPaths = getParameter(paramsMap, "dataPath").split(";");
                        String[] filesPaths = getParameter(paramsMap, "filesPath").split(";");
                        String importOption = getParameter(paramsMap, "importOption");
                        List<String> ingestFiles = new ArrayList<String>();
                        for (int j = 0; j < filesPaths.length; j++) {
                            if ((filesPaths[j] = filesPaths[j].trim()).length() > 0)
                                ingestFiles.add(new File(Constants.DAMS_STAGING + "/" + filesPaths[j])
                                        .getAbsolutePath());
                        }

                        List<File> dataFiles = new ArrayList<File>();
                        for (int j = 0; j < dataPaths.length; j++) {
                            String dataPath = dataPaths[j];
                            if (dataPath != null && (dataPath = dataPath.trim()).length() > 0) {
                                File file = new File(Constants.DAMS_STAGING + "/" + dataPath);
                                CollectionHandler.listFiles(dataFiles, file);
                            }
                        }

                        // initiate the source metadata
                        List<Object> sources = new ArrayList<Object>();
                        if (source != null && source.equalsIgnoreCase("bib")) {
                            String[] bibs = bibNumber.split(",");
                            for (int j = 0; j < bibs.length; j++) {
                                if (bibs[j] != null && (bibs[j] = bibs[j].trim()).length() > 0)
                                    sources.add(bibs[j]);
                            }
                        } else {
                            List<String> filters = new ArrayList<>();
                            if (getParameter(paramsMap, "excelImport") != null) {
                                // Excel Input Stream
                                source = "excel";
                                filters.add("xls");
                                filters.add("xlsx");
                            } else {
                                // MARC/MODS source
                                filters.add("xml");
                            }

                            dataFiles = FileUtils.filterFiles(dataFiles,
                                    filters.toArray(new String[filters.size()]));
                            sources.addAll(dataFiles);
                            dataFiles.clear();
                        }

                        // Handling pre-processing request
                        Element rdfPreview = null;
                        StringBuilder duplicatRecords = new StringBuilder();
                        List<String> ids = new ArrayList<String>();
                        boolean preprocessing = importOption.equalsIgnoreCase("pre-processing");
                        boolean ingestWithFiles = importOption.equalsIgnoreCase("metadataAndFiles");

                        if (preprocessing) {
                            Document doc = new DocumentFactory().createDocument();
                            rdfPreview = TabularRecord.createRdfRoot(doc);
                        }

                        boolean preSuccessful = true;
                        StringBuilder proMessage = new StringBuilder();
                        if (source != null && (source.equalsIgnoreCase("bib") || source.equalsIgnoreCase("mods")
                                || source.equalsIgnoreCase("excel"))) {
                            // Initiate the logging handler 
                            handler = new MetadataImportHandler(damsClient, null);
                            handler.setSubmissionId(submissionId);
                            handler.setSession(session);
                            handler.setUserId(userId);

                            Map<String, String> collections = new HashMap<String, String>();
                            if (StringUtils.isNotBlank(collectionId)) {
                                String collType = damsClient.getCollectionType(collectionId);
                                collections.put(collectionId, collType);
                            }

                            for (int j = 0; j < sources.size(); j++) {
                                InputStream in = null;
                                String sourceID = null;

                                Object srcRecord = sources.get(j);
                                sourceID = (srcRecord instanceof File ? ((File) srcRecord).getName()
                                        : srcRecord.toString());
                                if (preprocessing)
                                    handler.setStatus("Pre-processing record " + sourceID + " ... ");
                                else
                                    handler.setStatus("Processing record " + sourceID + " ... ");

                                RecordSource recordSource = null;
                                InputStreamRecord record = null;

                                try {
                                    if (source.equalsIgnoreCase("excel")) {
                                        clientTool = "Excel";
                                        // Handling Excel Input Stream records
                                        recordSource = new ExcelSource((File) srcRecord);

                                        // Report for Excel column name validation
                                        List<String> invalidColumns = ((ExcelSource) recordSource)
                                                .getInvalidColumns();

                                        if (invalidColumns != null && invalidColumns.size() > 0) {
                                            successful = false;
                                            preSuccessful = false;

                                            proMessage.append("Excel source " + sourceID + " - failed - "
                                                    + CollectionHandler.damsDateFormat.format(new Date())
                                                    + ": \n");

                                            if (invalidColumns != null && invalidColumns.size() > 0) {
                                                // Report invalid columns
                                                proMessage.append("* Found the following invalid column name"
                                                        + (invalidColumns.size() > 1 ? "s" : "") + ": ");
                                                for (int k = 0; k < invalidColumns.size(); k++) {
                                                    proMessage.append(invalidColumns.get(k));
                                                    if (k == invalidColumns.size() - 1)
                                                        proMessage.append("\n");
                                                    else
                                                        proMessage.append("; ");
                                                }
                                            }
                                        }
                                    } else {
                                        // Handling AT/Roger records
                                        try {
                                            if (source.equalsIgnoreCase("bib")) {

                                                clientTool = "MARC";
                                                String url = Constants.DAMS_STORAGE_URL.substring(0,
                                                        Constants.DAMS_STORAGE_URL.indexOf("/dams/"))
                                                        + "/jollyroger/get?type=bib&mods=true&ns=true&value="
                                                        + sourceID;

                                                log.info("Getting MARC XML for Roger record " + sourceID
                                                        + " from URL: " + url);
                                                HttpGet req = new HttpGet(url);
                                                Document doc = damsClient.getXMLResult(req);
                                                modsXml = doc.asXML();
                                                in = new ByteArrayInputStream(modsXml.getBytes("UTF-8"));
                                            } else {
                                                // METS/MODS XML from staging area
                                                clientTool = "AT";
                                                File srcFile = (File) sources.get(j);
                                                in = new FileInputStream(srcFile);
                                            }

                                            File xsl = new File(session.getServletContext()
                                                    .getRealPath("files/mets2dams.xsl"));
                                            recordSource = new XsltSource(xsl, sourceID.replaceAll("\\..*", ""),
                                                    in);
                                        } finally {
                                            CollectionHandler.close(in);
                                            in = null;
                                        }
                                    }
                                } catch (Exception e) {
                                    e.printStackTrace();
                                    successful = false;
                                    preSuccessful = false;
                                    String error = e.getMessage() != null ? e.getMessage()
                                            : e.getCause() != null ? e.getCause().getMessage()
                                                    : e.getClass().getName();
                                    handler.setStatus(error);
                                    log.error("Error metadata source " + sourceID + ": " + error);
                                    proMessage.append(sourceID + " - failed - "
                                            + CollectionHandler.damsDateFormat.format(new Date()) + " - "
                                            + error);
                                }

                                String id = "";
                                String info = "";
                                if (recordSource != null && preSuccessful) {
                                    for (Record rec = null; (rec = recordSource.nextRecord()) != null;) {

                                        String objTitle = "";
                                        id = rec.recordID();
                                        StringBuilder errorMessage = new StringBuilder();
                                        try {

                                            record = new InputStreamRecord(rec, collections, unit,
                                                    copyrightStatus, copyrightJurisdiction, copyrightOwner,
                                                    program, access, beginDate, endDate);

                                            objTitle = getTitle(record.toRDFXML());
                                            info = "Pre-processing record with ID " + id + " ... ";
                                            handler.setStatus(info);
                                            log.info(info);

                                            if (ids.indexOf(id) < 0) {
                                                ids.add(id);
                                            } else {
                                                duplicatRecords.append(rec + ", ");
                                                String error = "Duplicated record with ID " + id;
                                                handler.setStatus(error);
                                                log.error(info);
                                                errorMessage.append("\n* " + error);
                                            }

                                            // Add master file(s) for the bib/Roger record: a PDF or a TIFF, or a PDF + ZIP
                                            List<File> filesToIngest = null;
                                            if (source.equalsIgnoreCase("bib") && ingestWithFiles) {
                                                filesToIngest = getRogerFiles((String) srcRecord, ingestFiles);
                                                // Processing the master file(s) with error report. 
                                                if (filesToIngest.size() == 0) {
                                                    errorMessage.append("\n* Roger record " + srcRecord
                                                            + " has no master file(s) for \"Ingest metadata and files\" option.");
                                                } else if (filesToIngest.size() > 2
                                                        || (filesToIngest.size() == 2 && !filesToIngest.get(1)
                                                                .getName().endsWith(".zip"))) {
                                                    errorMessage
                                                            .append("\n* Unexpected file(s) for Roger record "
                                                                    + srcRecord + ": ");
                                                    for (File file : filesToIngest) {
                                                        errorMessage.append(
                                                                (filesToIngest.indexOf(file) > 0 ? ", " : "")
                                                                        + file.getName());
                                                    }
                                                } else {
                                                    // Handle the use property for the file(s)
                                                    Map<String, String> fileUseMap = getFileUse(filesToIngest);

                                                    record.addFiles(0, filesToIngest, fileUseMap);
                                                }
                                            } else if (source.equalsIgnoreCase("excel")) {
                                                // Report for invalid Excel control values validation
                                                List<Map<String, String>> invalidValues = ((ExcelSource) recordSource)
                                                        .getInvalidValues();
                                                if (invalidValues != null && invalidValues.size() > 0) {

                                                    // process to retrieve control values errors for the record since it will parse the row for the next record
                                                    StringBuilder cvErrors = new StringBuilder();
                                                    for (int k = 0; k < invalidValues.size(); k++) {
                                                        Map<String, String> m = invalidValues.get(k);
                                                        if (m.containsKey(TabularRecord.OBJECT_ID)
                                                                && m.get(TabularRecord.OBJECT_ID)
                                                                        .equals(String.valueOf(id))) {
                                                            cvErrors.append(
                                                                    "* Row index " + m.get("row") + " [");

                                                            // don't count for the row number and the record id
                                                            m.remove("row");
                                                            m.remove(TabularRecord.OBJECT_ID);
                                                            int l = 0;
                                                            for (String key : m.keySet()) {
                                                                if (l++ > 0)
                                                                    cvErrors.append(" | ");
                                                                cvErrors.append(key + " => " + m.get(key));
                                                            }
                                                            cvErrors.append("]\n");
                                                        }
                                                    }

                                                    if (cvErrors.length() > 0) {
                                                        errorMessage.append("Invalid control value(s)" + " - \n"
                                                                + cvErrors.toString());
                                                    }
                                                }
                                            }
                                        } catch (Exception e) {
                                            e.printStackTrace();
                                            info = "Error: " + e.getMessage();
                                            handler.setStatus(info);
                                            log.warn(info);
                                            errorMessage.append("\n* " + e.getMessage());
                                        }

                                        objTitle = StringUtils.isEmpty(objTitle) ? "[Object]" : objTitle;
                                        if (errorMessage.length() == 0) {

                                            info = objTitle + " - " + id + " - " + " successful - "
                                                    + CollectionHandler.damsDateFormat.format(new Date());
                                            proMessage.append("\n\n" + info);
                                            log.info(info);

                                            if (preprocessing) {
                                                // Pre-processing with rdf preview
                                                rdfPreview.add(record.toRDFXML()
                                                        .selectSingleNode("//dams:Object").detach());
                                            } else {
                                                // Write the converted rdf/xml to file system
                                                File tmpDir = new File(Constants.TMP_FILE_DIR
                                                        + File.separatorChar + "converted");
                                                if (!tmpDir.exists())
                                                    tmpDir.mkdir();
                                                File convertedFile = new File(tmpDir.getAbsolutePath(),
                                                        id.replaceAll("[\\//:.*]+", "") + ".rdf.xml");
                                                try {
                                                    writeXml(convertedFile, record.toRDFXML().asXML());
                                                } finally {
                                                    convertedFile.deleteOnExit();
                                                    dataFiles.add(convertedFile);
                                                }
                                            }
                                        } else {
                                            preSuccessful = false;

                                            info = objTitle + " - " + id + " - " + " failed - "
                                                    + CollectionHandler.damsDateFormat.format(new Date())
                                                    + " - " + errorMessage.toString();
                                            proMessage.append("\n\n" + info);
                                            log.error(info);
                                        }

                                        handler.setProgressPercentage(j * 100 / sources.size());
                                    }
                                }
                            }

                            // Logging the result for pre-processing
                            if (preprocessing || !preSuccessful) {
                                message = "\nPre-processing " + (preSuccessful ? "successful" : "failed")
                                        + ": \n"
                                        + (proMessage.length() == 0 ? "" : "\n " + proMessage.toString());
                                handler.logMessage(message);
                            }
                            handler.release();
                            handler = null;

                            if (preSuccessful) {
                                // Write the converted RDF/xml for preview
                                if (preprocessing) {
                                    File destFile = new File(Constants.TMP_FILE_DIR,
                                            "preview-" + submissionId + "-rdf.xml");
                                    writeXml(destFile, rdfPreview.getDocument().asXML());

                                    dataLink = "\nThe converted RDF/XML is ready for <a href=\"" + logLink
                                            + "&file=" + destFile.getName() + "\">download</a>.\n";

                                } else {
                                    // Ingest the converted RDF/XML files
                                    handler = new RDFDAMS4ImportTsHandler(damsClient,
                                            dataFiles.toArray(new File[dataFiles.size()]), importOption);
                                    ((RDFDAMS4ImportTsHandler) handler)
                                            .setFilesPaths(ingestFiles.toArray(new String[ingestFiles.size()]));
                                    ((RDFDAMS4ImportTsHandler) handler).setReplace(true);
                                }
                            } else {
                                successful = false;
                            }
                        } else {
                            successful = false;
                            message += "\nUnknown source type: " + source;
                        }
                    } else if (i == 7) {
                        session.setAttribute("status", opMessage + "SOLR Index ...");
                        boolean update = getParameter(paramsMap, "indexReplace") != null;
                        if (getParameter(paramsMap, "solrRecordsDump") != null) {
                            // Handle single records submission
                            List<String> items = new ArrayList<String>();
                            String txtInput = getParameter(paramsMap, "textInput");
                            String fileInputValue = getParameter(paramsMap, "data");
                            if (txtInput != null && (txtInput = txtInput.trim()).length() > 0) {
                                String[] subjects = txtInput.split(",");
                                for (String subject : subjects) {
                                    subject = subject.trim();
                                    if (subject.length() > 0) {
                                        items.add(subject);
                                    }
                                }
                            }

                            // Handle records submitted in file with csv format, in lines or mixed together
                            if (fileInputValue != null
                                    && (fileInputValue = fileInputValue.trim()).length() > 0) {
                                // Handle record with line input
                                String[] lines = fileInputValue.split("\n");
                                for (String line : lines) {
                                    // Handle CSV encoding records and records delimited by comma, whitespace etc.
                                    if (line != null && (line = line.trim().replace("\"", "")).length() > 0) {
                                        String[] tokens = line.split(",");
                                        for (String token : tokens) {
                                            String[] records = token.split(" ");
                                            for (String record : records) {
                                                record = record.trim();
                                                if (record.length() > 0) {
                                                    items.add(record);
                                                }
                                            }
                                        }
                                    }
                                }
                            }

                            // Initiate SOLRIndexHandler to index the records
                            handler = new SOLRIndexHandler(damsClient, null, update);
                            handler.setItems(items);
                            handler.setCollectionTitle("SOLR Records");
                        } else {
                            // Handle solr update for collections
                            if (collectionId.indexOf(",") > 0) {
                                String collIDs = collectionId;
                                String[] collArr = collectionId.split(",");
                                List<String> items = new ArrayList<String>();
                                String collNames = "";
                                for (int j = 0; j < collArr.length; j++) {
                                    if (collArr[j] != null && (collArr[j] = collArr[j].trim()).length() > 0) {
                                        collectionId = collArr[j];
                                        if (collectionId.equalsIgnoreCase("all")) {
                                            items.addAll(damsClient.listAllRecords());
                                            collNames += "All Records (" + items.size() + "), ";
                                        } else {
                                            try {
                                                handler = new SOLRIndexHandler(damsClient, collectionId);
                                                items.addAll(handler.getItems());
                                                collNames += handler.getCollectionTitle() + "("
                                                        + handler.getFilesCount() + "), ";
                                                if (j > 0 && j % 5 == 0)
                                                    collNames += "\n";
                                            } finally {
                                                if (handler != null) {
                                                    handler.release();
                                                    handler = null;
                                                }
                                            }
                                        }
                                    }
                                }
                                handler = new SOLRIndexHandler(damsClient, null, update);
                                handler.setItems(items);
                                handler.setCollectionTitle(collNames.substring(0, collNames.lastIndexOf(",")));
                                handler.setCollectionId(collIDs);
                            } else {
                                if (collectionId.equalsIgnoreCase("all")) {
                                    handler = new SOLRIndexHandler(damsClient, null, update);
                                    handler.setItems(damsClient.listAllRecords());
                                } else
                                    handler = new SOLRIndexHandler(damsClient, collectionId, update);
                            }
                        }
                    } /*else if (i == 8){   
                           //session.setAttribute("status", opMessage + "CDL Sending ...");
                           int operationType = 0;
                              boolean resend = getParameter(paramsMap, "cdlResend") != null;
                              if(resend){
                                 operationType = 1;
                              }else{
                                 resend = getParameter(paramsMap, "cdlResendMets") != null;
                                 if(resend)
                                    operationType = 2;
                              }
                             //handler = new CdlIngestHandler(tsUtils, collectionId, userId, operationType);
                              
                           String feeder = getParameter(paramsMap, "feeder");
                           session.setAttribute("status", opMessage + "CDL " + feeder.toUpperCase() + " METS feeding ...");
                           boolean includeEmbargoed = (getParameter(paramsMap, "includeEmbargoed")!=null);
                           if(feeder.equals("merritt")){
                              String account = getParameter(paramsMap, "account");
                              String password = getParameter(paramsMap, "password");
                              //String accessGroupId = getParameter(paramsMap, "accessGroup");
                              handler = new CdlIngestHandler(damsClient, collectionId, userId, operationType, feeder, account, password);
                           }else
                              handler = new CdlIngestHandler(damsClient, collectionId, userId, operationType);
                           if(!includeEmbargoed)
                              handler.excludeEmbargoedObjects();
                      }else if (i == 9){   
                           session.setAttribute("status", opMessage + "Metadata Converting and populating ...");
                           String tsOperation = getParameter(paramsMap, "sipOption");
                                   
                           if(tsOperation == null || tsOperation.length() == 0)
                              tsOperation = "tsNew";
                                   
                           int operationType = MetadataImportController.getOperationId(tsOperation);
                           String srcFile = (String) session.getAttribute("source");
                           String srcFormat = (String) session.getAttribute("format");
                           String pathMap = (String) session.getAttribute("pathMap");
                           int sheetNo = 0;
                           if(session.getAttribute("sheetNo") != null)
                              sheetNo = ((Integer)session.getAttribute("sheetNo")).intValue();
                                   
                           String rdfFileToWrite = Constants.TMP_FILE_DIR + "tmpRdf_" + session.getId() + ".xml";
                           if("excel".equalsIgnoreCase(srcFormat)){
                              handler = new ExcelConverter(damsClient, collectionId, srcFile, sheetNo, pathMap, operationType);
                             ExcelConverter converter = (ExcelConverter)handler;
                             converter.setUseArk(true);
                             converter.setRdfFileToWrite(rdfFileToWrite);
                           }else
                              throw new ServletException("Unsupported data format: " + srcFormat);
                              
                      }*/else if (i == 10) {
                        session.setAttribute("status", opMessage + "Stage Ingesting ...");

                        String unit = getParameter(paramsMap, "unit");
                        String arkSetting = getParameter(paramsMap, "arkSetting").trim();
                        String filePath = getParameter(paramsMap, "filePath").trim();
                        String fileFilter = getParameter(paramsMap, "fileFilter").trim();
                        String preferedOrder = getParameter(paramsMap, "preferedOrder");
                        String fileSuffixes = getParameter(paramsMap, "fileSuffixes");
                        String fileUse = getParameter(paramsMap, "fileUse");
                        if (fileSuffixes != null && fileSuffixes.length() > 0)
                            fileSuffixes = fileSuffixes.trim();

                        String coDelimiter = "p";
                        if (arkSetting.equals("1")) {
                            if (preferedOrder == null || preferedOrder.equalsIgnoreCase("cofDelimiter")) {
                                coDelimiter = getParameter(paramsMap, "cofDelimiter").trim();
                            } else if (preferedOrder.equals("suffix"))
                                coDelimiter = getParameter(paramsMap, "coDelimiter").trim();
                            else
                                coDelimiter = null;
                        } else {
                            if (arkSetting.equals("5")) {
                                coDelimiter = getParameter(paramsMap, "coDelimiter").trim();
                            }
                        }

                        String[] fileOrderSuffixes = null;
                        if (fileSuffixes != null && fileSuffixes.length() > 0)
                            fileOrderSuffixes = fileSuffixes.split(",");

                        String[] fileUses = null;
                        if (fileUse != null && (fileUse = fileUse.trim()).length() > 0) {
                            fileUses = fileUse.split(",");
                            for (int j = 0; j < fileUses.length; j++) {
                                if (fileUses[j] != null)
                                    fileUses[j] = fileUses[j].trim();
                            }
                        }

                        session.setAttribute("category", collectionId);
                        session.setAttribute("unit", unit);
                        session.setAttribute("arkSetting", arkSetting);
                        session.setAttribute("filePath", filePath);
                        session.setAttribute("fileFilter", fileFilter);
                        session.setAttribute("preferedOrder", preferedOrder);
                        session.setAttribute("fileSuffixes", fileSuffixes);
                        session.setAttribute("fileUse", fileUse);

                        String[] dirArr = filePath.split(";");
                        List<String> fileList = new ArrayList<String>();
                        String dir = null;
                        for (int j = 0; j < dirArr.length; j++) {
                            dir = dirArr[j];
                            if (dir != null && (dir = dir.trim()).length() > 0) {
                                if ((dir.startsWith("/") || dir.startsWith("\\"))
                                        && (Constants.DAMS_STAGING.endsWith("/")
                                                || Constants.DAMS_STAGING.endsWith("\\")))
                                    dir = dir.substring(1);
                                fileList.add(Constants.DAMS_STAGING + dir);
                            }
                        }

                        handler = new FileIngestionHandler(damsClient, fileList, Integer.parseInt(arkSetting),
                                collectionId, fileFilter, coDelimiter);
                        ((FileIngestionHandler) handler).setFileOrderSuffixes(fileOrderSuffixes);
                        ((FileIngestionHandler) handler).setPreferedOrder(preferedOrder);
                        ((FileIngestionHandler) handler).setUnit(unit);
                        ((FileIngestionHandler) handler).setFileUses(fileUses);

                    } else if (i == 11) {
                        session.setAttribute("status",
                                opMessage + "Serialize records as RDF/XML to filestore ...");
                        if (collectionId.indexOf(",") > 0) {
                            String collIDs = collectionId;
                            String[] collArr = collectionId.split(",");
                            List<String> items = new ArrayList<String>();
                            String collNames = "";
                            for (int j = 0; j < collArr.length; j++) {
                                if (collArr[j] != null && (collArr[j] = collArr[j].trim()).length() > 0) {
                                    collectionId = collArr[j];
                                    if (collectionId.equalsIgnoreCase("all")) {
                                        items.addAll(damsClient.listAllRecords());
                                        collNames += "All Records (" + items.size() + "), ";
                                    } else {
                                        try {
                                            handler = new SOLRIndexHandler(damsClient, collectionId);
                                            items.addAll(handler.getItems());
                                            collNames += handler.getCollectionTitle() + "("
                                                    + handler.getFilesCount() + "), ";
                                            if (j > 0 && j % 5 == 0)
                                                collNames += "\n";
                                        } finally {
                                            if (handler != null) {
                                                handler.release();
                                                handler = null;
                                            }
                                        }
                                    }
                                }
                            }
                            handler = new FilestoreSerializationHandler(damsClient, null);
                            handler.setItems(items);
                            handler.setCollectionTitle(collNames.substring(0, collNames.lastIndexOf(",")));
                            handler.setCollectionId(collIDs);
                        } else {
                            if (collectionId.equalsIgnoreCase("all")) {
                                handler = new FilestoreSerializationHandler(damsClient, null);
                                handler.setItems(damsClient.listAllRecords());
                            } else
                                handler = new FilestoreSerializationHandler(damsClient, collectionId);
                        }
                    } else if (i == 15) {
                        session.setAttribute("status", opMessage + "Uploading files from dams-staging to "
                                + damsClient.getFileStore() + " ...");
                        Map<String, String> filesMap = new TreeMap<String, String>();
                        for (Iterator<String> it = paramsMap.keySet().iterator(); it.hasNext();) {
                            String key = it.next();
                            if (key.startsWith("f-")) {
                                String file = paramsMap.get(key)[0];
                                String fileURI = paramsMap.get(key.replaceFirst("f-", "fid-"))[0];

                                if (fileURI != null && fileURI.startsWith(Constants.DAMS_ARK_URL_BASE))
                                    filesMap.put(file, fileURI.trim());
                                else
                                    message += "Invalid fileURL for file " + file + " (" + fileURI + "). \n";
                            }
                        }
                        handler = new FileUploadHandler(damsClient, filesMap);
                        handler.setItems(Arrays.asList(filesMap.keySet().toArray(new String[filesMap.size()])));
                    } else if (i == 18) {
                        boolean components = getParameter(paramsMap, "exComponents") == null;
                        String exFormat = getParameter(paramsMap, "exportFormat");
                        String xslSource = getParameter(paramsMap, "xsl");
                        if (xslSource == null || (xslSource = xslSource.trim()).length() == 0) {
                            xslSource = "/pub/data1/import/apps/glossary/xsl/dams/convertToCSV.xsl";
                            if (!new File(xslSource).exists())
                                xslSource = Constants.CLUSTER_HOST_NAME + "glossary/xsl/dams/convertToCSV.xsl";
                        }
                        session.setAttribute("status",
                                opMessage + (exFormat.equalsIgnoreCase("csv") ? "CSV"
                                        : exFormat.equalsIgnoreCase("N-TRIPLE") ? "N-TRIPLE" : "RDF XML ")
                                        + " Metadata Export ...");
                        File outputFile = new File(Constants.TMP_FILE_DIR,
                                "export-" + DAMSClient.stripID(collectionId) + "-" + System.currentTimeMillis()
                                        + "-rdf.xml");
                        String nsInput = getParameter(paramsMap, "nsInput");
                        List<String> nsInputs = new ArrayList<String>();
                        boolean componentsIncluded = true;
                        if (nsInput != null && (nsInput = nsInput.trim()).length() > 0) {
                            String[] nsInputArr = nsInput.split(",");
                            for (int j = 0; j < nsInputArr.length; j++) {
                                if (nsInputArr[j] != null
                                        && (nsInputArr[j] = nsInputArr[j].trim()).length() > 0)
                                    nsInputs.add(nsInputArr[j]);
                            }
                        }
                        fileOut = new FileOutputStream(outputFile);
                        handler = new MetadataExportHandler(damsClient, collectionId, nsInputs,
                                componentsIncluded, exFormat, fileOut);
                        ((MetadataExportHandler) handler).setFileUri(logLink + "&file=" + outputFile.getName());
                        ((MetadataExportHandler) handler).setComponents(components);

                    } else if (i == 19) {
                        session.setAttribute("status", opMessage + "Jhove report ...");
                        boolean bytestreamFilesOnly = getParameter(paramsMap, "bsJhoveReport") != null;
                        boolean update = getParameter(paramsMap, "bsJhoveUpdate") != null;
                        handler = new JhoveReportHandler(damsClient, collectionId, bytestreamFilesOnly);
                        if (update)
                            ((JhoveReportHandler) handler)
                                    .setJhoveUpdate(getParameter(paramsMap, "jhoveUpdate"));

                    } else
                        throw new ServletException("Unhandle operation index: " + i);

                    if (handler != null) {
                        try {
                            damsClient.setClientInfo(clientTool
                                    + (StringUtils.isNotBlank(clientVersion) ? " " + clientVersion : ""));
                            handler.setSubmissionId(submissionId);
                            handler.setDamsClient(damsClient);
                            handler.setSession(session);
                            handler.setUserId(userId);
                            if (handler.getCollectionId() == null
                                    && (collectionId != null && collectionId.length() > 0))
                                handler.setCollectionId(collectionId);

                            successful = handler.execute();
                        } catch (InterruptedException e) {
                            successful = false;
                            exeInfo += e.getMessage();
                            e.printStackTrace();
                        } catch (Exception e) {
                            successful = false;
                            exeInfo += "\n" + e.getMessage();
                            e.printStackTrace();
                        } finally {
                            String collectionName = handler.getCollectionId();
                            if (collectionName != null && collectionName.length() > 0
                                    && logLink.indexOf("&category=") < 0)
                                logLink += "&category=" + collectionName.replace(" ", "");
                            handler.setExeResult(successful);
                            exeInfo += handler.getExeInfo();
                            handler.release();
                            if (fileOut != null) {
                                CollectionHandler.close(fileOut);
                                fileOut = null;
                            }
                        }
                    }
                } else
                    continue;

                message += exeInfo;
                if (!successful) {
                    String errors = "Execution failed:\n" + message + "\n";
                    returnMessage += errors;
                    break;
                } else {
                    returnMessage += "\n" + message;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            returnMessage += e.getMessage();
        } finally {
            if (damsClient != null)
                damsClient.close();
            if (fileOut != null) {
                CollectionHandler.close(fileOut);
                fileOut = null;
            }
        }
    } else
        returnMessage = message;

    String logMessage = "For details, please download " + "<a href=\"" + logLink + "\">log</a>" + ".";
    if (returnMessage.length() > 1000) {
        returnMessage = returnMessage.substring(0, 1000);
        int idx = returnMessage.lastIndexOf("\n");
        if (idx > 0)
            returnMessage = returnMessage.substring(0, idx);
        else {
            idx = returnMessage.lastIndexOf("</a>");
            if (idx < returnMessage.lastIndexOf("<a "))
                returnMessage = returnMessage.substring(0, idx);
        }
        returnMessage = "\n" + returnMessage + "\n    ...     ";
    }
    returnMessage += "\n" + dataLink + "\n" + logMessage;
    RequestOrganizer.addResultMessage(session, returnMessage.replace("\n", "<br />") + "<br />");
    return returnMessage;
}

From source file:edu.umd.cs.findbugs.SortedBugCollection.java

License:Open Source License

/**
 * Convert the BugCollection into a dom4j Document object.
 *
 * @return the Document representing the BugCollection as a dom4j tree
 *//*  w ww . j a  v  a2 s.  c om*/
@Override
public Document toDocument() {
    // if (project == null) throw new NullPointerException("No project");
    assert project != null;

    DocumentFactory docFactory = new DocumentFactory();
    Document document = docFactory.createDocument();
    Dom4JXMLOutput treeBuilder = new Dom4JXMLOutput(document);

    try {
        writeXML(treeBuilder);
    } catch (IOException e) {
        // Can't happen
    }
    return document;
}

From source file:eu.sisob.uma.crawler.ResearchersCrawlers.Workers.ExportDocumentsInFolder.java

License:Open Source License

/**
 * //  ww w .  j  a v a  2  s .  co m
 */
@Override
protected void beginActions() {
    docOut = new DocumentFactory().createDocument();
    rootOut = docOut.addElement("root");

    hitsTable = new int[2][5];

    lTimerAux = java.lang.System.currentTimeMillis();
}

From source file:eu.sisob.uma.crawler.ResearchersCrawlers.Workers.ExportDocumentsOnXMLFileForTextMiningCreatorWithFilter.java

License:Open Source License

/**
 * /*from ww w .  j  ava  2  s .c  o  m*/
 */
@Override
protected void beginActions() {
    docOut = new DocumentFactory().createDocument();
    rootOut = docOut.addElement("root");

    hitsTable = new int[2][5];

    lTimerAux = java.lang.System.currentTimeMillis();

    sContentForView = ""; //For collect results
}

From source file:no.met.jtimeseries.service.ServiceDescriptionGenerator.java

License:Open Source License

/**
 * Create a description of the web service offered by a class in XML. The
 * documentation is created by looking at the web service annotations.
 *
 * @param c/*from w w w. j a v a 2s .c o  m*/
 *            The class to generate documentation for.
 * @return A XML document object.
 */
public static Document getXMLServiceDescription(Class<? extends Object> c) {

    List<Method> serviceMethods = getServiceMethods(c);

    DocumentFactory df = new DocumentFactory();
    Document xmlDoc = df.createDocument();

    Element rootElement = df.createElement("services");
    xmlDoc.add(rootElement);
    for (Method m : serviceMethods) {

        Element service = rootElement.addElement("service");
        MethodInfo mi = getMethodInfo(m);
        service.addAttribute("path", mi.path);

        if (mi.produces != null) {
            service.addAttribute("returmMimeType", StringUtils.join(mi.produces, ','));
        }

        if (mi.description != null) {
            service.addAttribute("description", mi.description);
        }

        List<ParameterInfo> params = getParameters(m);
        for (ParameterInfo pi : params) {
            Element param = service.addElement("parameter");
            param.addAttribute("name", pi.name);
            if (pi.defaultValue != null) {
                param.addAttribute("defaultValue", pi.defaultValue);
            }
        }
    }

    return xmlDoc;
}

From source file:org.danann.cernunnos.AbstractContainerTask.java

License:Apache License

private static List<Element> createSuppressEmptySubtasksWarningsList() {
    DocumentFactory fac = new DocumentFactory();
    List<Element> list = new LinkedList<Element>();
    list.add(fac.createElement("org.danann.cernunnos.NoOpTask"));
    return Collections.unmodifiableList(list);
}

From source file:org.danann.cernunnos.xml.NewDocumentPhrase.java

License:Apache License

public Object evaluate(TaskRequest req, TaskResponse res) {

    Branch rslt = new DocumentFactory().createDocument();
    if (name != null) {
        rslt = rslt.addElement((String) name.evaluate(req, res));
    }/*  w  ww  . ja  v a2s  . c  o  m*/
    return rslt;

}

From source file:org.danann.cernunnos.xml.XslTransformTask.java

License:Apache License

public void perform(TaskRequest req, TaskResponse res) {
    final String contextLocation = (String) context.evaluate(req, res);
    final String stylesheetLocation = (String) stylesheet.evaluate(req, res);
    final Tuple<String, String> transformerKey = new Tuple<String, String>(contextLocation, stylesheetLocation);
    final Templates templates = this.transformerCache.getCachedObject(req, res, transformerKey,
            this.transformerFactory);

    Element srcElement = null;//from w w w  .j a v a  2s. co m
    Node nodeReagentEvaluated = node != null ? (Node) node.evaluate(req, res) : null;
    if (nodeReagentEvaluated != null) {
        // Reading from the NODE reagent is preferred...
        srcElement = (Element) nodeReagentEvaluated;
    } else {
        // But read from LOCATION if NODE isn't set...
        final String locationStr = (String) location.evaluate(req, res);
        final URL loc;
        try {
            final URL ctx;
            try {
                ctx = new URL(contextLocation);
            } catch (MalformedURLException mue) {
                throw new RuntimeException("Failed to parse context '" + contextLocation + "' into URL", mue);
            }

            loc = new URL(ctx, locationStr);
        } catch (MalformedURLException mue) {
            throw new RuntimeException("Failed to parse location '" + locationStr + "' with context '"
                    + contextLocation + "' into URL", mue);
        }

        // Use an EntityResolver if provided...
        SAXReader rdr = new SAXReader();
        EntityResolver resolver = (EntityResolver) entityResolver.evaluate(req, res);
        if (resolver != null) {
            rdr.setEntityResolver(resolver);
        }

        final Document document;
        try {
            document = rdr.read(loc);
        } catch (DocumentException de) {
            throw new RuntimeException("Failed to read XML Document for XSLT from " + loc.toExternalForm(), de);
        }
        srcElement = document.getRootElement();
    }

    DocumentFactory dfac = new DocumentFactory();
    Document ddoc = dfac.createDocument((Element) srcElement.clone());
    DOMWriter dwriter = new DOMWriter();

    DocumentResult rslt = new DocumentResult();

    final Transformer trans;
    try {
        trans = templates.newTransformer();
    } catch (TransformerConfigurationException tce) {
        throw new RuntimeException("Failed to retrieve Transformer for XSLT", tce);
    }

    try {
        trans.transform(new DOMSource(dwriter.write(ddoc)), rslt);
    } catch (TransformerException te) {
        throw new RuntimeException("Failed to perform XSL transformation", te);
    } catch (DocumentException de) {
        throw new RuntimeException("Failed to translate JDOM Document to W3C Document", de);
    }

    final Element rootElement = rslt.getDocument().getRootElement();

    if (to_file != null) {
        File f = new File((String) to_file.evaluate(req, res));
        if (f.getParentFile() != null) {
            // Make sure the necessary directories are in place...
            f.getParentFile().mkdirs();
        }

        final XMLWriter writer;
        try {
            writer = new XMLWriter(new FileOutputStream(f), new OutputFormat("  ", true));
        } catch (UnsupportedEncodingException uee) {
            throw new RuntimeException("Failed to create XML writer", uee);
        } catch (FileNotFoundException fnfe) {
            throw new RuntimeException("Could not create file for XML output: " + f, fnfe);
        }

        try {
            writer.write(rootElement);
        } catch (IOException ioe) {
            throw new RuntimeException("Failed to write transformed XML document to: " + f, ioe);
        }
    } else {
        // default behavior...
        res.setAttribute(Attributes.NODE, rootElement);
    }

    super.performSubtasks(req, res);

}