List of usage examples for org.w3c.dom Document createProcessingInstruction
public ProcessingInstruction createProcessingInstruction(String target, String data) throws DOMException;
ProcessingInstruction
node given the specified name and data strings. From source file:com.photon.phresco.plugins.xcode.Instrumentation.java
private void preparePlistResult() throws MojoExecutionException { try {// ww w . ja va 2 s . com DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); Document xmldoc = dbf.newDocumentBuilder() .parse(new File(project.getBasedir().getAbsolutePath() + File.separator + plistResult)); Element root = xmldoc.getDocumentElement(); Node pi = xmldoc.createProcessingInstruction("DOCTYPE plist SYSTEM \\", "file://localhost/System/Library/DTDs/PropertyList.dtd>"); xmldoc.insertBefore(pi, root); StreamResult out = new StreamResult( project.getBasedir().getAbsolutePath() + File.separator + plistResult); DOMSource domSource = new DOMSource(xmldoc); TransformerFactory tf = TransformerFactory.newInstance(); Transformer transformer = tf.newTransformer(); transformer.transform(domSource, out); } catch (Exception e) { e.printStackTrace(); } }
From source file:Main.java
public static void copyInto(Node src, Node dest) throws DOMException { Document factory = dest.getOwnerDocument(); //Node start = src; Node parent = null;// w w w . j a v a 2s . c o m Node place = src; // traverse source tree while (place != null) { // copy this node Node node = null; int type = place.getNodeType(); switch (type) { case Node.CDATA_SECTION_NODE: { node = factory.createCDATASection(place.getNodeValue()); break; } case Node.COMMENT_NODE: { node = factory.createComment(place.getNodeValue()); break; } case Node.ELEMENT_NODE: { Element element = factory.createElement(place.getNodeName()); node = element; NamedNodeMap attrs = place.getAttributes(); int attrCount = attrs.getLength(); for (int i = 0; i < attrCount; i++) { Attr attr = (Attr) attrs.item(i); String attrName = attr.getNodeName(); String attrValue = attr.getNodeValue(); element.setAttribute(attrName, attrValue); } break; } case Node.ENTITY_REFERENCE_NODE: { node = factory.createEntityReference(place.getNodeName()); break; } case Node.PROCESSING_INSTRUCTION_NODE: { node = factory.createProcessingInstruction(place.getNodeName(), place.getNodeValue()); break; } case Node.TEXT_NODE: { node = factory.createTextNode(place.getNodeValue()); break; } default: { throw new IllegalArgumentException( "can't copy node type, " + type + " (" + place.getNodeName() + ')'); } } dest.appendChild(node); // iterate over children if (place.hasChildNodes()) { parent = place; place = place.getFirstChild(); dest = node; } else if (parent == null) { place = null; } else { // advance place = place.getNextSibling(); while (place == null && parent != null) { place = parent.getNextSibling(); parent = parent.getParentNode(); dest = dest.getParentNode(); } } } }
From source file:jp.co.opentone.bsol.framework.core.generator.excel.strategy.XmlWorkbookGeneratorStrategy.java
/** * SpreadsheetML??.//from www .j a v a 2 s . c om * @return DOM Document * @throws ParserConfigurationException ?? */ private Document createDocument() throws ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); Document doc = factory.newDocumentBuilder().newDocument(); Map<String, String> param = new HashMap<String, String>(); param.put("progid", "Excel.Sheet"); ProcessingInstruction pi = doc.createProcessingInstruction("mso-application", "progid=\"Excel.Sheet\""); doc.appendChild(pi); return doc; }
From source file:Main.java
@SuppressWarnings("null") public static void copyInto(Node src, Node dest) throws DOMException { Document factory = dest.getOwnerDocument(); //Node start = src; Node parent = null;//from w ww.ja v a 2 s . c o m Node place = src; // traverse source tree while (place != null) { // copy this node Node node = null; int type = place.getNodeType(); switch (type) { case Node.CDATA_SECTION_NODE: { node = factory.createCDATASection(place.getNodeValue()); break; } case Node.COMMENT_NODE: { node = factory.createComment(place.getNodeValue()); break; } case Node.ELEMENT_NODE: { Element element = factory.createElement(place.getNodeName()); node = element; NamedNodeMap attrs = place.getAttributes(); int attrCount = attrs.getLength(); for (int i = 0; i < attrCount; i++) { Attr attr = (Attr) attrs.item(i); String attrName = attr.getNodeName(); String attrValue = attr.getNodeValue(); element.setAttribute(attrName, attrValue); /* if (domimpl && !attr.getSpecified()) { ((Attr) element.getAttributeNode(attrName)).setSpecified(false); } */ } break; } case Node.ENTITY_REFERENCE_NODE: { node = factory.createEntityReference(place.getNodeName()); break; } case Node.PROCESSING_INSTRUCTION_NODE: { node = factory.createProcessingInstruction(place.getNodeName(), place.getNodeValue()); break; } case Node.TEXT_NODE: { node = factory.createTextNode(place.getNodeValue()); break; } default: { throw new IllegalArgumentException( "can't copy node type, " + type + " (" + node.getNodeName() + ')'); } } dest.appendChild(node); // iterate over children if (place.hasChildNodes()) { parent = place; place = place.getFirstChild(); dest = node; } else if (parent == null) { place = null; } else { // advance place = place.getNextSibling(); while (place == null && parent != null && dest != null) { place = parent.getNextSibling(); parent = parent.getParentNode(); dest = dest.getParentNode(); } } } }
From source file:Main.java
/** * Clone given Node into target Document. If targe is null, same Document will be used. * If deep is specified, all children below will also be cloned. *///from w w w. ja v a2 s .co m public static Node cloneNode(Node node, Document target, boolean deep) throws DOMException { if (target == null || node.getOwnerDocument() == target) // same Document return node.cloneNode(deep); else { //DOM level 2 provides this in Document, so once xalan switches to that, //we can take out all the below and just call target.importNode(node, deep); //For now, we implement based on the javadocs for importNode Node newNode; int nodeType = node.getNodeType(); switch (nodeType) { case Node.ATTRIBUTE_NODE: newNode = target.createAttribute(node.getNodeName()); break; case Node.DOCUMENT_FRAGMENT_NODE: newNode = target.createDocumentFragment(); break; case Node.ELEMENT_NODE: Element newElement = target.createElement(node.getNodeName()); NamedNodeMap nodeAttr = node.getAttributes(); if (nodeAttr != null) for (int i = 0; i < nodeAttr.getLength(); i++) { Attr attr = (Attr) nodeAttr.item(i); if (attr.getSpecified()) { Attr newAttr = (Attr) cloneNode(attr, target, true); newElement.setAttributeNode(newAttr); } } newNode = newElement; break; case Node.ENTITY_REFERENCE_NODE: newNode = target.createEntityReference(node.getNodeName()); break; case Node.PROCESSING_INSTRUCTION_NODE: newNode = target.createProcessingInstruction(node.getNodeName(), node.getNodeValue()); break; case Node.TEXT_NODE: newNode = target.createTextNode(node.getNodeValue()); break; case Node.CDATA_SECTION_NODE: newNode = target.createCDATASection(node.getNodeValue()); break; case Node.COMMENT_NODE: newNode = target.createComment(node.getNodeValue()); break; case Node.NOTATION_NODE: case Node.ENTITY_NODE: case Node.DOCUMENT_TYPE_NODE: case Node.DOCUMENT_NODE: default: throw new IllegalArgumentException("Importing of " + node + " not supported yet"); } if (deep) for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) newNode.appendChild(cloneNode(child, target, true)); return newNode; } }
From source file:Main.java
/** * Clone given Node into target Document. If targe is null, same Document will be used. * If deep is specified, all children below will also be cloned. *//*from w w w . j av a 2s . c om*/ public final static Node cloneNode(Node node, Document target, boolean deep) throws DOMException { if ((target == null) || (node.getOwnerDocument() == target)) { // same Document return node.cloneNode(deep); } else { //DOM level 2 provides this in Document, so once xalan switches to that, //we can take out all the below and just call target.importNode(node, deep); //For now, we implement based on the javadocs for importNode Node newNode; int nodeType = node.getNodeType(); switch (nodeType) { case Node.ATTRIBUTE_NODE: newNode = target.createAttribute(node.getNodeName()); break; case Node.DOCUMENT_FRAGMENT_NODE: newNode = target.createDocumentFragment(); break; case Node.ELEMENT_NODE: Element newElement = target.createElement(node.getNodeName()); NamedNodeMap nodeAttr = node.getAttributes(); if (nodeAttr != null) { for (int i = 0; i < nodeAttr.getLength(); i++) { Attr attr = (Attr) nodeAttr.item(i); if (attr.getSpecified()) { Attr newAttr = (Attr) cloneNode(attr, target, true); newElement.setAttributeNode(newAttr); } } } newNode = newElement; break; case Node.ENTITY_REFERENCE_NODE: newNode = target.createEntityReference(node.getNodeName()); break; case Node.PROCESSING_INSTRUCTION_NODE: newNode = target.createProcessingInstruction(node.getNodeName(), node.getNodeValue()); break; case Node.TEXT_NODE: newNode = target.createTextNode(node.getNodeValue()); break; case Node.CDATA_SECTION_NODE: newNode = target.createCDATASection(node.getNodeValue()); break; case Node.COMMENT_NODE: newNode = target.createComment(node.getNodeValue()); break; case Node.NOTATION_NODE: case Node.ENTITY_NODE: case Node.DOCUMENT_TYPE_NODE: case Node.DOCUMENT_NODE: default: throw new IllegalArgumentException("Importing of " + node + " not supported yet"); } if (deep) { for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { newNode.appendChild(cloneNode(child, target, true)); } } return newNode; } }
From source file:Main.java
/** * Copies the source tree into the specified place in a destination * tree. The source node and its children are appended as children * of the destination node./*from ww w . j a va2s . c o m*/ * <p> * <em>Note:</em> This is an iterative implementation. */ public static void copyInto(Node src, Node dest) throws DOMException { // get node factory Document factory = dest.getOwnerDocument(); boolean domimpl = factory instanceof DocumentImpl; // placement variables Node start = src; Node parent = src; Node place = src; // traverse source tree while (place != null) { // copy this node Node node = null; int type = place.getNodeType(); switch (type) { case Node.CDATA_SECTION_NODE: { node = factory.createCDATASection(place.getNodeValue()); break; } case Node.COMMENT_NODE: { node = factory.createComment(place.getNodeValue()); break; } case Node.ELEMENT_NODE: { Element element = factory.createElement(place.getNodeName()); node = element; NamedNodeMap attrs = place.getAttributes(); int attrCount = attrs.getLength(); for (int i = 0; i < attrCount; i++) { Attr attr = (Attr) attrs.item(i); String attrName = attr.getNodeName(); String attrValue = attr.getNodeValue(); element.setAttribute(attrName, attrValue); if (domimpl && !attr.getSpecified()) { ((AttrImpl) element.getAttributeNode(attrName)).setSpecified(false); } } break; } case Node.ENTITY_REFERENCE_NODE: { node = factory.createEntityReference(place.getNodeName()); break; } case Node.PROCESSING_INSTRUCTION_NODE: { node = factory.createProcessingInstruction(place.getNodeName(), place.getNodeValue()); break; } case Node.TEXT_NODE: { node = factory.createTextNode(place.getNodeValue()); break; } default: { throw new IllegalArgumentException( "can't copy node type, " + type + " (" + node.getNodeName() + ')'); } } dest.appendChild(node); // iterate over children if (place.hasChildNodes()) { parent = place; place = place.getFirstChild(); dest = node; } // advance else { place = place.getNextSibling(); while (place == null && parent != start) { place = parent.getNextSibling(); parent = parent.getParentNode(); dest = dest.getParentNode(); } } } }
From source file:be.docarch.odt2braille.PEF.java
/** * Converts the flat .odt filt to a .pef file according to the braille settings. * * This function/*from w w w . j a va 2 s.c om*/ * <ul> * <li>uses {@link ODT} to convert the .odt file to multiple DAISY-like xml files,</li> * <li>uses {@link LiblouisXML} to translate these files into braille, and</li> * <li>recombines these braille files into one single .pef file.</li> * </ul> * * First, the document <i>body</i> is processed and split in volumes, then the <i>page ranges</i> are calculated * and finally the <i>preliminary pages</i> of each volume are processed and inserted at the right places. * The checker checks the DAISY-like files and the volume lengths. * */ public boolean makePEF() throws IOException, ParserConfigurationException, TransformerException, InterruptedException, SAXException, ConversionException, LiblouisXMLException, Exception { logger.entering("PEF", "makePEF"); Configuration settings = odt.getConfiguration(); Element[] volumeElements; Element sectionElement; File bodyFile = null; File brailleFile = null; File preliminaryFile = null; List<Volume> volumes = manager.getVolumes(); String volumeInfo = capitalizeFirstLetter( ResourceBundle.getBundle(L10N, settings.mainLocale).getString("in")) + " " + volumes.size() + " " + ResourceBundle.getBundle(L10N, settings.mainLocale) .getString((volumes.size() > 1) ? "volumes" : "volume") + "\n@title\n@pages"; volumeElements = new Element[volumes.size()]; DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); docFactory.setValidating(false); docFactory.setNamespaceAware(true); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); DOMImplementation impl = docBuilder.getDOMImplementation(); Document document = impl.createDocument(pefNS, "pef", null); Element root = document.getDocumentElement(); root.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", pefNS); root.setAttributeNS(null, "version", "2008-1"); Element headElement = document.createElementNS(pefNS, "head"); Element metaElement = document.createElementNS(pefNS, "meta"); metaElement.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:dc", "http://purl.org/dc/elements/1.1/"); Element dcElement = document.createElementNS("http://purl.org/dc/elements/1.1/", "dc:identifier"); dcElement.appendChild(document.createTextNode(Integer.toHexString((int) (Math.random() * 1000000)) + " " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format((new Date())))); metaElement.appendChild(dcElement); dcElement = document.createElementNS("http://purl.org/dc/elements/1.1/", "dc:format"); dcElement.appendChild(document.createTextNode("application/x-pef+xml")); metaElement.appendChild(dcElement); headElement.appendChild(metaElement); root.appendChild(headElement); int columns = pefSettings.getColumns(); int rows = pefSettings.getRows(); boolean duplex = pefSettings.getDuplex(); int rowgap = pefSettings.getEightDots() ? 1 : 0; int beginPage = settings.getBeginningBraillePageNumber(); if (statusIndicator != null) { statusIndicator.start(); statusIndicator.setSteps(volumes.size()); statusIndicator.setStatus(ResourceBundle.getBundle(L10N, statusIndicator.getPreferredLocale()) .getString("statusIndicatorStep")); } for (int volumeCount = 0; volumeCount < volumes.size(); volumeCount++) { volumeElements[volumeCount] = document.createElementNS(pefNS, "volume"); volumeElements[volumeCount].setAttributeNS(null, "cols", String.valueOf(columns)); volumeElements[volumeCount].setAttributeNS(null, "rows", String.valueOf(rows + (int) Math.ceil(((rows - 1) * rowgap) / 4d))); volumeElements[volumeCount].setAttributeNS(null, "rowgap", String.valueOf(rowgap)); volumeElements[volumeCount].setAttributeNS(null, "duplex", duplex ? "true" : "false"); Volume volume = volumes.get(volumeCount); // Body section logger.info("Processing volume " + (volumeCount + 1) + " : " + volume.getTitle()); if (!(volume instanceof PreliminaryVolume)) { bodyFile = File.createTempFile(TMP_NAME, ".daisy.body." + (volumeCount + 1) + ".xml", TMP_DIR); bodyFile.deleteOnExit(); brailleFile = File.createTempFile(TMP_NAME, ".txt", TMP_DIR); brailleFile.deleteOnExit(); odt.getBodyMatter(bodyFile, volume); liblouisXML.configure(bodyFile, brailleFile, false, beginPage); liblouisXML.run(); // Read pages sectionElement = document.createElementNS(pefNS, "section"); int pageCount = addPagesToSection(document, sectionElement, brailleFile, rows, columns, -1); volumeElements[volumeCount].appendChild(sectionElement); // Checker if (checker != null) { checker.checkDaisyFile(bodyFile); } // Braille page range volume.setBraillePagesStart(beginPage); volume.setNumberOfBraillePages(pageCount); beginPage += pageCount; // Print page range if (volume.getFrontMatter() && settings.getVolumeInfoEnabled()) { extractPrintPageRange(bodyFile, volume, settings); } } // Special symbols list if (volume.getSpecialSymbolListEnabled()) { extractSpecialSymbols(bodyFile, volume, volumeCount, settings); } // Preliminary section if (volume.getFrontMatter() || volume.getTableOfContent() || volume.getTranscribersNotesPageEnabled() || volume.getSpecialSymbolListEnabled()) { preliminaryFile = File.createTempFile(TMP_NAME, ".daisy.front." + (volumeCount + 1) + ".xml", TMP_DIR); preliminaryFile.deleteOnExit(); brailleFile = File.createTempFile(TMP_NAME, ".txt", TMP_DIR); brailleFile.deleteOnExit(); odt.getFrontMatter(preliminaryFile, volume, volumeInfo); liblouisXML.configure(preliminaryFile, brailleFile, true, volume.getTableOfContent() ? volume.getFirstBraillePage() : 1); liblouisXML.run(); // Page range int pageCount = countPages(brailleFile, volume); volume.setNumberOfPreliminaryPages(pageCount); // Translate again with updated volume info and without volume separator marks brailleFile = File.createTempFile(TMP_NAME, ".txt", TMP_DIR); brailleFile.deleteOnExit(); odt.getFrontMatter(preliminaryFile, volume, volumeInfo); liblouisXML.configure(preliminaryFile, brailleFile, false, volume.getTableOfContent() ? volume.getFirstBraillePage() : 1); liblouisXML.run(); // Read pages sectionElement = document.createElementNS(pefNS, "section"); addPagesToSection(document, sectionElement, brailleFile, rows, columns, pageCount); volumeElements[volumeCount].insertBefore(sectionElement, volumeElements[volumeCount].getFirstChild()); // Checker if (checker != null) { checker.checkDaisyFile(preliminaryFile); } } if (statusIndicator != null) { statusIndicator.increment(); } } if (checker != null) { checker.checkVolumes(volumes); } Element bodyElement = document.createElementNS(pefNS, "body"); for (int volumeCount = 0; volumeCount < volumes.size(); volumeCount++) { bodyElement.appendChild(volumeElements[volumeCount]); } root.appendChild(bodyElement); document.insertBefore((ProcessingInstruction) document.createProcessingInstruction("xml-stylesheet", "type='text/css' href='pef.css'"), document.getFirstChild()); OdtUtils.saveDOM(document, pefFile); logger.exiting("PEF", "makePEF"); if (!validatePEF(pefFile)) { return false; } return true; }
From source file:org.apache.airavata.gfac.hadoop.handler.HadoopDeploymentHandler.java
private void clusterPropertiesToHadoopSiteXml(Properties props, File hadoopSiteXml) throws ParserConfigurationException, TransformerException { DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder documentBuilder = domFactory.newDocumentBuilder(); Document hadoopSiteXmlDoc = documentBuilder.newDocument(); hadoopSiteXmlDoc.setXmlVersion("1.0"); hadoopSiteXmlDoc.setXmlStandalone(true); hadoopSiteXmlDoc.createProcessingInstruction("xml-stylesheet", "type=\"text/xsl\" href=\"configuration.xsl\""); Element configEle = hadoopSiteXmlDoc.createElement("configuration"); hadoopSiteXmlDoc.appendChild(configEle); for (Map.Entry<Object, Object> entry : props.entrySet()) { addPropertyToConfiguration(entry, configEle, hadoopSiteXmlDoc); }//from w w w . jav a2 s . c o m saveDomToFile(hadoopSiteXmlDoc, hadoopSiteXml); }
From source file:org.apache.ode.utils.DOMUtils.java
private static void parse(XMLStreamReader reader, Document doc, Node parent) throws XMLStreamException { int event = reader.getEventType(); while (reader.hasNext()) { switch (event) { case XMLStreamConstants.START_ELEMENT: // create element Element e = doc.createElementNS(reader.getNamespaceURI(), reader.getLocalName()); if (reader.getPrefix() != null && reader.getPrefix() != "") { e.setPrefix(reader.getPrefix()); }// w w w.j a va 2 s . c om parent.appendChild(e); // copy namespaces for (int ns = 0; ns < reader.getNamespaceCount(); ns++) { String uri = reader.getNamespaceURI(ns); String prefix = reader.getNamespacePrefix(ns); declare(e, uri, prefix); } // copy attributes for (int att = 0; att < reader.getAttributeCount(); att++) { String name = reader.getAttributeLocalName(att); String prefix = reader.getAttributePrefix(att); if (prefix != null && prefix.length() > 0) { name = prefix + ":" + name; } Attr attr = doc.createAttributeNS(reader.getAttributeNamespace(att), name); attr.setValue(reader.getAttributeValue(att)); e.setAttributeNode(attr); } // sub-nodes if (reader.hasNext()) { reader.next(); parse(reader, doc, e); } if (parent instanceof Document) { while (reader.hasNext()) reader.next(); return; } break; case XMLStreamConstants.END_ELEMENT: return; case XMLStreamConstants.CHARACTERS: if (parent != null) { parent.appendChild(doc.createTextNode(reader.getText())); } break; case XMLStreamConstants.COMMENT: if (parent != null) { parent.appendChild(doc.createComment(reader.getText())); } break; case XMLStreamConstants.CDATA: parent.appendChild(doc.createCDATASection(reader.getText())); break; case XMLStreamConstants.PROCESSING_INSTRUCTION: parent.appendChild(doc.createProcessingInstruction(reader.getPITarget(), reader.getPIData())); break; case XMLStreamConstants.ENTITY_REFERENCE: parent.appendChild(doc.createProcessingInstruction(reader.getPITarget(), reader.getPIData())); break; case XMLStreamConstants.NAMESPACE: case XMLStreamConstants.ATTRIBUTE: break; default: break; } if (reader.hasNext()) { event = reader.next(); } } }