List of usage examples for javax.xml.parsers DocumentBuilder getDOMImplementation
public abstract DOMImplementation getDOMImplementation();
From source file:org.opencastproject.metadata.dublincore.DublinCoreCatalogList.java
/** * Serializes list to XML.//w w w .java 2 s. c o m * * @return serialized array as XML string * @throws IOException * if serialization cannot be properly performed */ public String getResultsAsXML() throws IOException { try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); DOMImplementation impl = builder.getDOMImplementation(); Document doc = impl.createDocument(null, null, null); Element root = doc.createElement("dublincorelist"); root.setAttribute("totalCount", String.valueOf(totalCatalogCount)); doc.appendChild(root); for (DublinCoreCatalog series : catalogList) { Node node = doc.importNode(series.toXml().getDocumentElement(), true); root.appendChild(node); } Transformer tf = TransformerFactory.newInstance().newTransformer(); DOMSource xmlSource = new DOMSource(doc); StringWriter out = new StringWriter(); tf.transform(xmlSource, new StreamResult(out)); return out.toString(); } catch (Exception e) { throw new IOException(e); } }
From source file:org.opendedup.sdfs.replication.ArchiveImporter.java
public Element importArchive(String srcArchive, String dest, String server, String password, int port, int maxSz, SDFSEvent evt, boolean useSSL, boolean useLz4) throws Exception { ievt = SDFSEvent.archiveImportEvent( "Importing " + srcArchive + " from " + server + ":" + port + " to " + dest, evt); ReadLock l = GCMain.gclock.readLock(); l.lock();//from w ww . jav a2 s . com runningJobs.put(evt.uid, this); String sdest = dest + "." + RandomGUID.getGuid(); File f = new File(srcArchive); File fDstFiles = new File(Main.volume.getPath() + File.separator + sdest); try { SDFSLogger.getLog().info("setting up staging at " + sdest); try { SDFSLogger.getLog() .info("Importing " + srcArchive + " from " + server + ":" + port + " to " + dest); if (!f.exists()) throw new IOException("File does not exist " + srcArchive); if (OSValidator.isWindows()) { TFile srcRoot = new TFile(new File(srcArchive + "/")); ievt.maxCt = FileCounts.getSize(srcRoot); SDFSLogger.getLog().info("Tar file size is " + ievt.maxCt); TFile srcFilesRoot = new TFile(new File(srcArchive + "/files/")); TFile srcFiles = null; try { srcFiles = srcFilesRoot.listFiles()[0]; } catch (Exception e) { SDFSLogger.getLog().error("Replication archive is corrupt " + srcArchive + " size of " + new File(srcArchive).length(), e); throw e; } TFile tfDstFiles = new TFile(Main.volume.getPath() + File.separator + sdest); this.export(srcFiles, tfDstFiles); srcFiles = new TFile(new File(srcArchive + "/ddb/")); File ddb = new File(Main.dedupDBStore + File.separator); if (!ddb.exists()) ddb.mkdirs(); TFile mDstFiles = new TFile(Main.dedupDBStore + File.separator); this.export(srcFiles, mDstFiles); TVFS.umount(srcFiles); TVFS.umount(mDstFiles); TVFS.umount(srcRoot.getInnerArchive()); } else { ievt.maxCt = 3; File stg = null; try { stg = new File(new File(srcArchive).getParentFile().getPath() + File.separator + RandomGUID.getGuid()); stg.mkdirs(); String expFile = "tar -xzpf " + srcArchive + " -C " + stg.getPath(); if (useLz4) expFile = "lz4 -dc " + srcArchive + " | tar -xpf -"; int xt = ProcessWorker.runProcess(expFile); if (xt != 0) throw new IOException("expand failed in " + expFile + " exit value was " + xt); ievt.curCt++; SDFSLogger.getLog().info("executed " + expFile + " exit code was " + xt); File srcFilesRoot = new File(stg.getPath() + File.separator + "files"); File srcFiles = null; try { srcFiles = srcFilesRoot.listFiles()[0]; } catch (Exception e) { SDFSLogger.getLog().error("Replication archive is corrupt " + srcArchive + " size of " + new File(srcArchive).length(), e); throw e; } SDFSLogger.getLog().info("setting up staging at " + fDstFiles.getPath()); fDstFiles.getParentFile().mkdirs(); String cpCmd = "cp -rfap " + srcFiles + " " + fDstFiles; xt = ProcessWorker.runProcess(cpCmd); if (xt != 0) throw new IOException("copy failed in " + cpCmd + " exit value was " + xt); SDFSLogger.getLog().info("executed " + cpCmd + " exit code was " + xt); ievt.curCt++; srcFiles = new File(stg.getPath() + File.separator + "ddb"); File ddb = new File(Main.dedupDBStore + File.separator); if (!ddb.exists()) ddb.mkdirs(); if (srcFiles.exists()) { cpCmd = "cp -rfap " + srcFiles + File.separator + " " + ddb.getParentFile().getPath(); xt = ProcessWorker.runProcess(cpCmd); if (xt != 0) throw new IOException("copy failed in " + cpCmd + " exit value was " + xt); } SDFSLogger.getLog().info("executed " + cpCmd + " exit code was " + xt); ievt.endEvent("Staging completed successfully"); } catch (Exception e) { ievt.endEvent(e.getMessage(), SDFSEvent.ERROR); throw e; } finally { // FileUtils.deleteDirectory(stg); Process p = Runtime.getRuntime().exec("rm -rf " + stg); p.waitFor(); f.delete(); } } imp = new MetaFileImport(Main.volume.getPath() + File.separator + sdest, server, password, port, maxSz, evt, useSSL); imp.runImport(); if (imp.isCorrupt()) { // evt.endEvent("Import failed for " + srcArchive + // " because not all the data could be imported from " + // server,SDFSEvent.WARN); SDFSLogger.getLog().warn("Import failed for " + srcArchive + " because not all the data could be imported from " + server); SDFSLogger.getLog().warn("rolling back import"); rollBackImport(Main.volume.getPath() + File.separator + sdest); SDFSLogger.getLog().warn("Import rolled back"); throw new IOException("uable to import files: There are files that are missing blocks"); } else { if (!Main.chunkStoreLocal) new ClusterRedundancyCheck(ievt, new File(Main.volume.getPath() + File.separator + sdest), true); commitImport(Main.volume.getPath() + File.separator + dest, Main.volume.getPath() + File.separator + sdest); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder; builder = factory.newDocumentBuilder(); DOMImplementation impl = builder.getDOMImplementation(); // Document. Document doc = impl.createDocument(null, "replication-import", null); // Root element. Element root = doc.getDocumentElement(); root.setAttribute("src", srcArchive); root.setAttribute("dest", dest); root.setAttribute("srcserver", server); root.setAttribute("srcserverport", Integer.toString(port)); root.setAttribute("batchsize", Integer.toString(maxSz)); root.setAttribute("filesimported", Long.toString(imp.getFilesProcessed())); root.setAttribute("bytesimported", Long.toString(imp.getBytesTransmitted())); root.setAttribute("entriesimported", Long.toString(imp.getEntries())); root.setAttribute("virtualbytesimported", Long.toString(imp.getVirtualBytesTransmitted())); root.setAttribute("starttime", Long.toString(imp.getStartTime())); root.setAttribute("endtime", Long.toString(imp.getEndTime())); root.setAttribute("volume", Main.volume.getName()); root.setAttribute("volumeconfig", Main.volume.getConfigPath()); evt.endEvent( srcArchive + " from " + server + ":" + port + " to " + dest + " imported successfully"); return (Element) root.cloneNode(true); } } catch (Exception e) { SDFSLogger.getLog().warn("rolling back import ", e); rollBackImport(Main.volume.getPath() + File.separator + sdest); SDFSLogger.getLog().warn("Import rolled back"); if (!evt.isDone()) evt.endEvent("Import failed and was rolled back ", SDFSEvent.ERROR, e); throw e; } } finally { try { } catch (Exception e) { if (SDFSLogger.isDebug()) SDFSLogger.getLog().debug("error", e); } runningJobs.remove(evt.uid); l.unlock(); } }
From source file:org.openmrs.module.drawing.obs.handler.DrawingHandler.java
public void saveAnnotation(Obs obs, ImageAnnotation annotation, boolean delete) { try {//from w ww. j av a 2 s. c om log.info("drawing: Saving annotation for obs " + obs.getObsId()); File metadataFile = getComplexMetadataFile(obs); log.info("drawing: Using file " + metadataFile.getCanonicalPath()); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document xmldoc; Element annotationsParent; int newId = 0; if (metadataFile.exists()) { xmldoc = builder.parse(metadataFile); annotationsParent = (Element) xmldoc.getElementsByTagName("Annotations").item(0); NodeList annotationNodeList = xmldoc.getElementsByTagName("Annotation"); for (int i = 0; i < annotationNodeList.getLength(); i++) { NamedNodeMap attributes = annotationNodeList.item(i).getAttributes(); String idString = attributes.getNamedItem("id").getNodeValue(); int existingId = Integer.parseInt(idString); if (existingId == annotation.getId() && !(annotation.getStatus() == Status.UNCHANGED)) { annotationsParent.removeChild(annotationNodeList.item(i)); break; } if (existingId >= newId) newId = existingId + 1; } } else { metadataFile.createNewFile(); DOMImplementation domImpl = builder.getDOMImplementation(); xmldoc = domImpl.createDocument(null, "ImageMetadata", null); Element root = xmldoc.getDocumentElement(); annotationsParent = xmldoc.createElementNS(null, "Annotations"); root.appendChild(annotationsParent); } if (!delete && annotation.getStatus() != Status.UNCHANGED) { if (annotation.getId() >= 0) newId = annotation.getId(); Element e = xmldoc.createElementNS(null, "Annotation"); Node n = xmldoc.createTextNode(annotation.getText()); e.setAttributeNS(null, "id", newId + ""); e.setAttributeNS(null, "xcoordinate", annotation.getLocation().getX() + ""); e.setAttributeNS(null, "ycoordinate", annotation.getLocation().getY() + ""); e.setAttributeNS(null, "userid", annotation.getUser().getUserId() + ""); e.setAttributeNS(null, "date", annotation.getDate().getTime() + ""); e.appendChild(n); annotationsParent.appendChild(e); } Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF8"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.transform(new DOMSource(xmldoc), new StreamResult(metadataFile)); log.info("drawing: Saving annotation complete"); } catch (Exception e) { log.error("drawing: Error saving image metadata: " + e.getClass() + " " + e.getMessage()); } }
From source file:org.sakaiproject.util.StorageUtils.java
/** * Write a DOM Document to an xml file.//from www . java 2 s . com * * @param doc * The DOM Document to write. * @param fileName * The complete file name path. */ public static void writeDocument(Document doc, String fileName) { OutputStream out = null; try { out = new FileOutputStream(fileName); // get an instance of the DOMImplementation registry DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); DOMImplementation impl = builder.getDOMImplementation(); DOMImplementationLS feature = (DOMImplementationLS) impl.getFeature("LS", "3.0"); LSSerializer serializer = feature.createLSSerializer(); LSOutput output = feature.createLSOutput(); output.setByteStream(out); output.setEncoding("UTF-8"); serializer.write(doc, output); out.close(); } catch (Exception any) { M_log.warn("writeDocument: " + any.toString()); } finally { if (out != null) { try { out.close(); } catch (IOException e) { } } } }
From source file:org.sakaiproject.util.StorageUtils.java
/** * Write a DOM Document to an output stream. * //from w ww . ja v a 2s . c o m * @param doc * The DOM Document to write. * @param out * The output stream. */ public static String writeDocumentToString(Document doc) { try { StringWriter sw = new StringWriter(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); DOMImplementation impl = builder.getDOMImplementation(); DOMImplementationLS feature = (DOMImplementationLS) impl.getFeature("LS", "3.0"); LSSerializer serializer = feature.createLSSerializer(); LSOutput output = feature.createLSOutput(); output.setCharacterStream(sw); output.setEncoding("UTF-8"); serializer.write(doc, output); sw.flush(); return sw.toString(); } catch (Exception any) { M_log.warn("writeDocumentToString: " + any.toString()); return null; } }
From source file:org.servalproject.maps.indexgenerator.IndexWriter.java
public static boolean writeXmlIndex(File outputFile, ArrayList<MapInfo> mapInfoList) { // create the xml document builder factory object DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); // create the xml document builder object and get the DOMImplementation object DocumentBuilder builder = null; try {/* w ww .ja v a 2s . c om*/ builder = factory.newDocumentBuilder(); } catch (javax.xml.parsers.ParserConfigurationException e) { System.err.println("ERROR: unable to build the XML data. " + e.getMessage()); return false; } DOMImplementation domImpl = builder.getDOMImplementation(); // start to build the document Document document = domImpl.createDocument(null, "maps", null); // get the root element Element rootElement = document.getDocumentElement(); // add the basic metadata Element element = document.createElement("version"); element.setTextContent(VERSION); rootElement.appendChild(element); element = document.createElement("generated"); element.setTextContent(Long.toString(System.currentTimeMillis())); rootElement.appendChild(element); element = document.createElement("author"); element.setTextContent(AUTHOR); rootElement.appendChild(element); element = document.createElement("data_source"); element.setTextContent(DATA_SOURCE); rootElement.appendChild(element); element = document.createElement("data_format"); element.setTextContent(DATA_FORMAT); rootElement.appendChild(element); element = document.createElement("data_format_info"); element.setTextContent(DATA_FORMAT_INFO); rootElement.appendChild(element); element = document.createElement("data_format_version"); element.setTextContent(DATA_FORMAT_VERSION); rootElement.appendChild(element); element = document.createElement("more_info"); element.setTextContent(MORE_INFO); rootElement.appendChild(element); // add the map file information Element mapInfoElement = document.createElement("map-info"); rootElement.appendChild(mapInfoElement); for (MapInfo info : mapInfoList) { mapInfoElement.appendChild(info.toXml(document.createElement("map"))); } // output the xml try { // create a transformer TransformerFactory transFactory = TransformerFactory.newInstance(); Transformer transformer = transFactory.newTransformer(); // set some options on the transformer transformer.setOutputProperty(OutputKeys.ENCODING, "utf-8"); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); // get a transformer and supporting classes StreamResult result = new StreamResult(new PrintWriter(outputFile)); DOMSource source = new DOMSource(document); // transform the internal objects into XML and print it transformer.transform(source, result); } catch (javax.xml.transform.TransformerException e) { System.err.println("ERROR: unable to write the XML data. " + e.getMessage()); return false; } catch (FileNotFoundException e) { System.err.println("ERROR: unable to write the XML data. " + e.getMessage()); return false; } return true; }
From source file:org.smartfrog.projects.alpine.xmlutils.ParserHelper.java
/** * use the JAXP APIs to locate and bind to a parser * * @return a new instance (somehow)/*from w ww . ja v a 2 s. c om*/ * @throws javax.xml.parsers.ParserConfigurationException if it refuses to be * */ public static DOMImplementation loadDomImplementation() throws ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); DOMImplementation impl = builder.getDOMImplementation(); return impl; }
From source file:org.wyona.yanel.core.util.ConfigurationUtil.java
/** * Create a DOM Document from a custom config element modelled with avalon * * @param repoConfigElement/*from www.j a va 2s . c o m*/ * The configuration element * @param rootName * The root element name * @param rootNamespace * The target namespace * @return Custom config as DOM */ public static Document getCustomConfiguration(Configuration repoConfigElement, String rootName, String rootNamespace) { try { if (repoConfigElement == null || repoConfigElement.getChildren() == null || repoConfigElement.getChildren().length == 0) { if (repoConfigElement.getValue(null) == null) { log.warn("Did not find any child elements nor text within " + repoConfigElement); return null; } } if (log.isDebugEnabled()) log.debug("Creating custom config - rootName=" + rootName + ", rootNamespace=" + rootNamespace); Document doc = null; DocumentBuilderFactory dbf = javax.xml.parsers.DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation(); org.w3c.dom.DocumentType doctype = null; doc = impl.createDocument(rootNamespace, rootName, doctype); // Copy the very first text node in order to stay backwards compatible Element rootElement = doc.getDocumentElement(); if (repoConfigElement.getValue(null) != null) { if (log.isDebugEnabled()) log.debug("Very first text node: " + repoConfigElement.getValue()); rootElement.appendChild(doc.createTextNode(repoConfigElement.getValue())); } // Copy elements Configuration[] children = repoConfigElement.getChildren(); if (children.length > 0) { log.debug("root element " + rootElement); for (int i = 0; i < children.length; i++) { rootElement.appendChild(ConfigurationUtil.createElement(children[i], doc)); } } return doc; } catch (Exception e) { log.error(e.getMessage(), e); return null; } // original comment by michi: // avalon ConfigurationUtil doesn't seem to work properly /* org.w3c.dom.Element element = ConfigurationUtil.toElement(customConfig); log.debug("Element name: " + element.getLocalName()); org.w3c.dom.Document doc = element.getOwnerDocument(); org.w3c.dom.Element rootElement = doc.getDocumentElement(); rootElement.appendChild(element); return doc; */ }
From source file:petascope.util.XMLUtil.java
public static org.w3c.dom.Document convert(Document n) { DocumentBuilder db = null; try {/*from w w w. j a v a 2s . c o m*/ db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); return DOMConverter.convert(n, db.getDOMImplementation()); } catch (ParserConfigurationException exc) { System.err.println(exc.getMessage()); return null; } }
From source file:petascope.util.XMLUtil.java
/** * Converts a list of XOM nodes to an equivalent DOM NodeList * * @param l the list of XOM nodes/*from w w w . ja va 2 s. com*/ * @return a DOM NodeList equivalent to l */ /* (cl) */ public static org.w3c.dom.NodeList convert(List<Node> l) { Element dummyRoot = new Element("DummyRoot"); for (Node n : l) { dummyRoot.appendChild(n); } Document dummyDoc = new Document(dummyRoot); DocumentBuilder db = null; try { db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); final org.w3c.dom.Document doc = DOMConverter.convert(dummyDoc, db.getDOMImplementation()); /* * TODO test this again once we use a newer version of Saxon For some strange reason, this does not work if convert is called by a Saxon XSLT extension function. If we return * this as instance of something that's more powerful than NodeList, Saxon 8.9 recognizes that and starts output at the DummyRoot element. */ // return doc.getDocumentElement().chn(); return new org.w3c.dom.NodeList() { public int getLength() { return doc.getDocumentElement().getChildNodes().getLength(); } public org.w3c.dom.Node item(int index) { return doc.getDocumentElement().getChildNodes().item(index); } }; } catch (ParserConfigurationException exc) { log.error("Error converting", exc); return null; } }