List of usage examples for org.w3c.dom Node setTextContent
public void setTextContent(String textContent) throws DOMException;
From source file:net.sourceforge.pmd.build.RuleSetToDocs.java
private static void escapeTextContent(Node node) { String content = node.getTextContent(); content = content.replaceAll("&", "&"); content = content.replaceAll("<", "<"); node.setTextContent(content); }
From source file:nl.b3p.imagetool.CombineArcServerUrl.java
/** * Create a new CombineImageUrl with the given values * In this implementation the body is changed. * @param width width//from w w w.ja va 2s. c o m * @param height height * @param bbox bbox * @return new clone of this CombineImageUrl but with changed values. * @see CombineImageUrl#calculateNewUrl(java.lang.Integer, java.lang.Integer, nl.b3p.viewer.image.Bbox) */ @Override public List<CombineImageUrl> calculateNewUrl(ImageBbox imbbox) { Integer width = imbbox.getWidth(); Integer height = imbbox.getHeight(); Bbox bbox = imbbox.getBbox(); CombineArcServerUrl ciu = new CombineArcServerUrl(this); try { Document doc = bodyAsDocument(); Node root = doc.getFirstChild(); //change the bbox Node extent = (Node) xPathExtent.evaluate(root, XPathConstants.NODE); NodeList nl = extent.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { Node child = nl.item(i); if ("XMin".equals(child.getLocalName())) { child.setTextContent("" + bbox.getMinx()); } else if ("YMin".equals(child.getLocalName())) { child.setTextContent("" + bbox.getMiny()); } else if ("XMax".equals(child.getLocalName())) { child.setTextContent("" + bbox.getMaxx()); } else if ("YMax".equals(child.getLocalName())) { child.setTextContent("" + bbox.getMaxy()); } } //image size Node imageSize = (Node) xPathImageDisplay.evaluate(root, XPathConstants.NODE); nl = imageSize.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { Node child = nl.item(i); if ("ImageHeight".equals(child.getLocalName())) { child.setTextContent(height.toString()); } else if ("ImageWidth".equals(child.getLocalName())) { child.setTextContent(width.toString()); } } ciu.setBody(doc); } catch (Exception e) { log.warn("Error while changing body fragment", e); } List<CombineImageUrl> list = new ArrayList<CombineImageUrl>(); list.add(ciu); return list; }
From source file:nl.b3p.viewer.stripes.SldActionBean.java
private void addFilterToExistingSld() throws Exception { Filter f = CQL.toFilter(filter); f = (Filter) f.accept(new ChangeMatchCase(false), null); if (featureTypeName == null) { featureTypeName = layer;/*from www. j a v a 2s .c o m*/ } FeatureTypeConstraint ftc = sldFactory.createFeatureTypeConstraint(featureTypeName, f, new Extent[] {}); if (newSld == null) { SLDTransformer sldTransformer = new SLDTransformer(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); sldTransformer.transform(ftc, bos); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); DocumentBuilder db = dbf.newDocumentBuilder(); Document sldXmlDoc = db.parse(new ByteArrayInputStream(sldXml)); Document ftcDoc = db.parse(new ByteArrayInputStream(bos.toByteArray())); String sldVersion = sldXmlDoc.getDocumentElement().getAttribute("version"); if ("1.1.0".equals(sldVersion)) { // replace sld:FeatureTypeName element generated by GeoTools // by se:FeatureTypeName NodeList sldFTNs = ftcDoc.getElementsByTagNameNS(NS_SLD, "FeatureTypeName"); if (sldFTNs.getLength() == 1) { Node sldFTN = sldFTNs.item(0); Node seFTN = ftcDoc.createElementNS(NS_SE, "FeatureTypeName"); seFTN.setTextContent(sldFTN.getTextContent()); sldFTN.getParentNode().replaceChild(seFTN, sldFTN); } } // Ignore namespaces to tackle both SLD 1.0.0 and SLD 1.1.0 // Add constraint to all NamedLayers, not only to the layer specified // in layers parameter NodeList namedLayers = sldXmlDoc.getElementsByTagNameNS(NS_SLD, "NamedLayer"); for (int i = 0; i < namedLayers.getLength(); i++) { Node namedLayer = namedLayers.item(i); // Search where to insert the FeatureTypeConstraint from our ftcDoc // Insert LayerFeatureConstraints after sld:Name, se:Name or se:Description // and before sld:NamedStyle or sld:UserStyle so search backwards. // If we find an existing LayerFeatureConstraints, use that NodeList childs = namedLayer.getChildNodes(); Node insertBefore = null; Node layerFeatureConstraints = null; int j = childs.getLength() - 1; do { Node child = childs.item(j); if ("LayerFeatureConstraints".equals(child.getLocalName())) { layerFeatureConstraints = child; break; } if ("Description".equals(child.getLocalName()) || "Name".equals(child.getLocalName())) { break; } insertBefore = child; j--; } while (j >= 0); Node featureTypeConstraint = sldXmlDoc.adoptNode(ftcDoc.getDocumentElement().cloneNode(true)); if (layerFeatureConstraints == null) { layerFeatureConstraints = sldXmlDoc.createElementNS(NS_SLD, "LayerFeatureConstraints"); layerFeatureConstraints.appendChild(featureTypeConstraint); namedLayer.insertBefore(layerFeatureConstraints, insertBefore); } else { layerFeatureConstraints.appendChild(featureTypeConstraint); } } TransformerFactory tf = TransformerFactory.newInstance(); Transformer t = tf.newTransformer(); DOMSource source = new DOMSource(sldXmlDoc); bos = new ByteArrayOutputStream(); StreamResult result = new StreamResult(bos); t.transform(source, result); sldXml = bos.toByteArray(); } }
From source file:org.apache.accumulo.start.Test.java
public void testChangingDirectory() throws Exception { String configFile = System.getProperty("org.apache.accumulo.config.file", "accumulo-site.xml"); String CONF_DIR = System.getenv("ACCUMULO_HOME") + "/conf/"; String SITE_CONF = CONF_DIR + configFile; File oldConf = new File(SITE_CONF); boolean exists = oldConf.exists(); String siteBkp = SITE_CONF + ".bkp"; if (exists) { if (oldConf.exists()) { oldConf.renameTo(new File(siteBkp)); }//from w w w .ja va 2 s. co m oldConf = new File(siteBkp); } String randomFolder = System.getenv("ACCUMULO_HOME") + "/lib/notExt" + new Random().nextInt(); File rf = new File(randomFolder); rf.mkdirs(); try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document d; if (exists) d = db.parse(siteBkp); else d = db.parse(new File(CONF_DIR + "examples/512MB/standalone/" + configFile)); NodeList pnodes = d.getElementsByTagName("property"); for (int i = pnodes.getLength() - 1; i >= 0; i--) { Element current_property = (Element) pnodes.item(i); Node cname = current_property.getElementsByTagName("name").item(0); if (cname != null && cname.getTextContent() .compareTo(AccumuloClassLoader.DYNAMIC_CLASSPATH_PROPERTY_NAME) == 0) { Node cvalue = current_property.getElementsByTagName("value").item(0); if (cvalue != null) { cvalue.setTextContent(randomFolder + "/.*"); } else { cvalue = d.createElement("value"); cvalue.setTextContent(randomFolder + "/.*"); current_property.appendChild(cvalue); } break; } } TransformerFactory cybertron = TransformerFactory.newInstance(); Transformer optimusPrime = cybertron.newTransformer(); Result result = new StreamResult(new File(SITE_CONF)); optimusPrime.transform(new DOMSource(d), result); setUp(); testReloadingClassLoader(); } finally { new File(SITE_CONF).delete(); if (exists) oldConf.renameTo(new File(SITE_CONF)); for (File deleteMe : rf.listFiles()) deleteMe.delete(); rf.delete(); } }
From source file:org.apache.openaz.xacml.pdp.policy.dom.DOMPolicyDefaults.java
public static boolean repair(Node nodePolicyDefaults) throws DOMStructureException { Element elementPolicyDefaults = DOMUtil.getElement(nodePolicyDefaults); boolean result = false; NodeList children = elementPolicyDefaults.getChildNodes(); int numChildren; if (children != null && (numChildren = children.getLength()) > 0) { for (int i = 0; i < numChildren; i++) { Node child = children.item(i); if (DOMUtil.isElement(child)) { if (DOMUtil.isInNamespace(child, XACML3.XMLNS) && XACML3.ELEMENT_XPATHVERSION.equals(child.getLocalName())) { try { DOMUtil.getURIContent(child); } catch (DOMStructureException ex) { logger.warn("Setting invalid " + XACML3.ELEMENT_XPATHVERSION + " attribute " + child.getTextContent() + " to " + XACML.XPATHVERSION_2_0); child.setTextContent(XACML.XPATHVERSION_2_0); result = true;//from ww w . j a v a 2 s . c om } } else { logger.warn("Unexpected element " + child.getNodeName()); elementPolicyDefaults.removeChild(child); result = true; } } } } return result; }
From source file:org.apache.shindig.gadgets.rewrite.ConcatVisitor.java
/** * For css:/* ww w. j av a 2 s . c o m*/ * Link tags are first split into buckets separated by tags with mediaType == "all" * / title attribute different from their previous link tag / nodes that are * not 'link' tags. * This ensures that the buckets can be processed separately without losing title / * "all" mediaType information. * * Link tags with same mediaType are concatenated within each bucket. * This exercise ensures that css information is loaded in the same relative order * as that of the original html page, and that the css information within * mediaType=="all" is retained and applies to all media types. * * Look at the areLinkNodesBucketable method for details on mediaType=="all" and * title attribute * * Example: Assume we have the following node list. (all have same parent, * nodes between Node6 and Node12 are non link nodes, and hence did not come * to revisit() call) * <link href="1.css" rel="stylesheet" type="text/css" media="screen"> -- Node1 * <link href="2.css" rel="stylesheet" type="text/css" media="print"> -- Node2 * <link href="3.css" rel="stylesheet" type="text/css" media="screen"> -- Node3 * <link href="4.css" rel="stylesheet" type="text/css" media="all"> -- Node4 * <link href="5.css" rel="stylesheet" type="text/css" media="all"> -- Node5 * <link href="6.css" rel="stylesheet" type="text/css" media="screen"> -- Node6 * <link href="12.css" rel="stylesheet" type="text/css" media="screen"> -- Node12 * <link href="13.css" rel="stylesheet" type="text/css" media="screen"> -- Node13 * * First we split to buckets bassed on the adjacency and other conditions. * buckets - [ [ Node1, Node2, Node3 ], [ Node4, Node 5 ], [ Node6 ], [ Node12, Node13 ] * Within each bucket we group them based on media type. * batches - [ Node1, Node2, Node3 ] --> [ [Node1, Node3], [Node2] ] * - [ Node4, Node 5 ] --> [ [ Node4, Node 5 ] ] * - [ Node6 ] --> [ [ Node6 ] ] * - [ Node12, Node13 ] --> [ [ Node12, Node13 ] ] * * Refer Tests for more examples. */ public boolean revisit(Gadget gadget, List<Node> nodes) throws RewritingException { // Collate Elements into Buckets. List<List<Element>> concatBuckets = Lists.newLinkedList(); List<Element> curBucket = Lists.newLinkedList(); Iterator<Node> nodeIter = nodes.iterator(); Element cur = (Element) nodeIter.next(); curBucket.add(cur); while (nodeIter.hasNext()) { Element next = (Element) nodeIter.next(); if ((!split && cur != getSibling(next, true)) || (type == ConcatUriManager.Type.CSS && !areLinkNodesBucketable(cur, next))) { // Break off current bucket and add to list of all. concatBuckets.add(curBucket); curBucket = Lists.newLinkedList(); } curBucket.add(next); cur = next; } // Add leftovers. concatBuckets.add(curBucket); // Split the existing buckets based on media types into concat batches. List<List<Element>> concatBatches = Lists.newLinkedList(); Iterator<List<Element>> batchesIter = concatBuckets.iterator(); while (batchesIter.hasNext()) { splitBatchOnMedia(batchesIter.next(), concatBatches); } // Prepare batches of Uris to send to generate concat Uris List<List<Uri>> uriBatches = Lists.newLinkedList(); batchesIter = concatBatches.iterator(); while (batchesIter.hasNext()) { List<Element> batch = batchesIter.next(); List<Uri> uris = Lists.newLinkedList(); if (batch.isEmpty() || !getUris(type, batch, uris)) { batchesIter.remove(); continue; } uriBatches.add(uris); } if (uriBatches.isEmpty()) { return false; } // Generate the ConcatUris, then correlate with original elements. List<ConcatUriManager.ConcatData> concatUris = uriManager .make(ConcatUriManager.ConcatUri.fromList(gadget, uriBatches, type), !split); Iterator<List<Element>> elemBatchIt = concatBatches.iterator(); Iterator<List<Uri>> uriBatchIt = uriBatches.iterator(); for (ConcatUriManager.ConcatData concatUri : concatUris) { List<Element> sourceBatch = elemBatchIt.next(); List<Uri> sourceUris = uriBatchIt.next(); // Regardless what happens, inject a copy of the first node, // with new (concat) URI, immediately ahead of the first elem. Element firstElem = sourceBatch.get(0); Element elemConcat = (Element) firstElem.cloneNode(true); elemConcat.setAttribute(type.getSrcAttrib(), concatUri.getUri().toString()); firstElem.getParentNode().insertBefore(elemConcat, firstElem); // Now for all Elements, either A) remove them or B) replace each // with a <script> node with snippet of code configuring/evaluating // the resultant inserted code. This is useful for split-JS in particular, // and might also be used in spriting later. Iterator<Uri> uriIt = sourceUris.iterator(); for (Element elem : sourceBatch) { Uri elemOrigUri = uriIt.next(); String snippet = concatUri.getSnippet(elemOrigUri); if (!StringUtils.isEmpty(snippet)) { Node scriptNode = elem.getOwnerDocument().createElement("script"); scriptNode.setTextContent(snippet); elem.getParentNode().insertBefore(scriptNode, elem); } elem.getParentNode().removeChild(elem); } } return true; }
From source file:org.apache.stratos.load.balancer.conf.configurator.SynapseConfigurator.java
/** * Configure main sequence send mediator endpoint. * * @param configuration Load balancer configuration. * @param inputFilePath Input file path. * @param outputFilePath Output file path. *///ww w .j ava 2 s . c o m public static void configureMainSequence(LoadBalancerConfiguration configuration, String inputFilePath, String outputFilePath) { try { if (log.isInfoEnabled()) { log.info("Configuring synapse main sequence..."); } if (log.isDebugEnabled()) { log.debug(String.format("Reading synapse main sequence: %s", inputFilePath)); } File inputFile = new File(inputFilePath); if (!inputFile.exists()) { throw new RuntimeException(String.format("File not found: %s", inputFilePath)); } FileInputStream file = new FileInputStream(inputFile); DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = builderFactory.newDocumentBuilder(); Document xmlDocument = builder.parse(file); XPath xPath = XPathFactory.newInstance().newXPath(); String expression = "/sequence/in/send/endpoint/class/parameter"; if (log.isDebugEnabled()) { log.debug(String.format("xpath expression = %s", expression)); } boolean updated = false; NodeList nodeList = (NodeList) xPath.compile(expression).evaluate(xmlDocument, XPathConstants.NODESET); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); Node parameter = node.getAttributes().getNamedItem("name"); if (parameter.getNodeValue().equals("algorithmClassName")) { String defaultAlgorithmName = configuration.getDefaultAlgorithmName(); if (StringUtils.isBlank(defaultAlgorithmName)) { throw new RuntimeException( "Default algorithm name not found in load balancer configuration"); } Algorithm defaultAlgorithm = configuration.getAlgorithm(defaultAlgorithmName); if (defaultAlgorithm == null) { throw new RuntimeException("Default algorithm not found in load balancer configuration"); } String algorithmClassName = defaultAlgorithm.getClassName(); if (log.isDebugEnabled()) { log.debug(String.format("Setting algorithm-class-name = %s", algorithmClassName)); } node.setTextContent(algorithmClassName); updated = true; } else if (parameter.getNodeValue().equals("failover")) { String value = String.valueOf(configuration.isFailOverEnabled()); if (log.isDebugEnabled()) { log.debug(String.format("Setting failover = %s", value)); } node.setTextContent(value); updated = true; } else if (parameter.getNodeValue().equals("sessionAffinity")) { String value = String.valueOf(configuration.isSessionAffinityEnabled()); if (log.isDebugEnabled()) { log.debug(String.format("Setting session-affinity = %s", value)); } node.setTextContent(value); updated = true; } else if (parameter.getNodeValue().equals("sessionTimeout")) { String value = String.valueOf(configuration.getSessionTimeout()); if (log.isDebugEnabled()) { log.debug(String.format("Setting session-timeout = %s", value)); } node.setTextContent(value); updated = true; } } if (updated) { if (log.isDebugEnabled()) { log.debug(String.format("Updating synapse main sequence: %s", outputFilePath)); } write(xmlDocument, outputFilePath); if (log.isInfoEnabled()) { log.info("Synapse main sequence configured successfully"); } } else { throw new RuntimeException( String.format("Send mediator endpoint configuration not found: %s", inputFilePath)); } } catch (Exception e) { throw new RuntimeException("Could not configure synapse settings", e); } }
From source file:org.asqatasun.crawler.util.HeritrixAttributeValueModifier.java
@Override public Document modifyDocument(Document document, String value) { if (value == null || value.isEmpty()) { LOGGER.debug(" value is empty " + this.getClass()); return document; }//from ww w. ja v a2s .c o m try { Node parentNode = getNodeFromXpath(document); NamedNodeMap attr = parentNode.getAttributes(); Node nodeAttr = attr.getNamedItem(DEFAULT_ATTRIBUTE_NAME); if (StringUtils.isNotEmpty(value)) { nodeAttr.setTextContent(value); } else { parentNode.getParentNode().removeChild(parentNode); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Update " + getAttributeValue() + " attribute of bean " + getIdBeanParent() + " with value " + value); } } catch (XPathExpressionException ex) { LOGGER.warn(ex); } return document; }
From source file:org.asqatasun.crawler.util.HeritrixAttributeValueModifierAndEraser.java
@Override public Document modifyDocument(Document document, String value) { try {//from w w w . j ava 2 s. co m Node parentNode = getNodeFromXpath(document); NamedNodeMap attr = parentNode.getAttributes(); Node nodeAttr = attr.getNamedItem(DEFAULT_ATTRIBUTE_NAME); if (StringUtils.isNotEmpty(value)) { nodeAttr.setTextContent(value); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Update " + getAttributeValue() + " attribute of bean " + getIdBeanParent() + " with value " + value); } } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Delete " + getAttributeValue() + " attribute of bean " + getIdBeanParent() + " because of null or empty value "); } parentNode.getParentNode().removeChild(parentNode); } } catch (XPathExpressionException ex) { Logger.getLogger(HeritrixParameterValueModifier.class.getName()).warn(ex); } return document; }
From source file:org.asqatasun.crawler.util.HeritrixInverseBooleanAttributeValueModifier.java
@Override public Document modifyDocument(Document document, String value) { if (StringUtils.isBlank(value) || (!value.equalsIgnoreCase(Boolean.FALSE.toString()) && !value.equalsIgnoreCase(Boolean.TRUE.toString()))) { return document; }//from ww w . j a v a 2 s . c o m try { Boolean valueToSet = !Boolean.valueOf(value); Node parentNode = getNodeFromXpath(document); NamedNodeMap attr = parentNode.getAttributes(); Node nodeAttr = attr.getNamedItem(DEFAULT_ATTRIBUTE_NAME); nodeAttr.setTextContent(valueToSet.toString()); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Update " + getAttributeValue() + " attribute of bean " + getIdBeanParent() + " with value " + valueToSet.toString()); } } catch (XPathExpressionException ex) { LOGGER.warn(ex); } return document; }