List of usage examples for javax.xml.parsers DocumentBuilderFactory setExpandEntityReferences
public void setExpandEntityReferences(boolean expandEntityRef)
From source file:com.jaxio.celerio.output.XmlCodeFormatter.java
private Document parseXmlFile(String in) { try {/*from w w w . ja va 2 s .c o m*/ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false); dbf.setExpandEntityReferences(false); // prevent dtd download... // http://stackoverflow.com/questions/155101/make-documentbuilder-parse-ignore-dtd-references dbf.setFeature("http://xml.org/sax/features/namespaces", false); dbf.setFeature("http://xml.org/sax/features/validation", false); dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); DocumentBuilder db = dbf.newDocumentBuilder(); // // prevent dtd download... // db.setEntityResolver(new EntityResolver() { // @Override // public InputSource resolveEntity(String publicId, String systemId) // throws SAXException, IOException { // return new InputSource(new StringReader("")); // } // }); // InputSource is = new InputSource(new StringReader(in)); return db.parse(is); } catch (ParserConfigurationException e) { throw new RuntimeException(e); } catch (SAXException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:importer.handler.post.stages.Splitter.java
/** * Split a TEI-XML file into versions of XML * @param tei the TEI file containing versions * @return a map of version names to XML files as strings * @throws ImportException if something went wrong *//*from ww w . j a v a 2s . com*/ public Map<String, String> split(String tei) throws ImporterException { try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); if (dbf.isExpandEntityReferences()) dbf.setExpandEntityReferences(false); DocumentBuilder db = dbf.newDocumentBuilder(); StringReader sr = new StringReader(tei); InputSource is = new InputSource(sr); Document doc = db.parse(is); root = doc.getDocumentElement(); root.setAttribute(VERSIONS, BASE); prepare(root, new Cluster(discriminator)); percolateDown(root); //verifyRule1( root ); return XMLPrinter.splitAll(doc, discriminator.drops, discriminator.removals); } catch (Exception e) { throw new ImporterException(e); } }
From source file:com.amalto.core.storage.hibernate.DefaultStorageClassLoader.java
@Override public InputStream generateEhCacheConfig() { try {//from ww w. j av a 2 s.com DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setExpandEntityReferences(false); DocumentBuilder documentBuilder = factory.newDocumentBuilder(); Document document = documentBuilder.parse(this.getClass().getResourceAsStream(EHCACHE_XML_CONFIG)); // <diskStore path="java.io.tmpdir"/> XPathExpression compile = pathFactory.compile("ehcache/diskStore"); //$NON-NLS-1$ Node node = (Node) compile.evaluate(document, XPathConstants.NODE); node.getAttributes().getNamedItem("path") //$NON-NLS-1$ .setNodeValue(dataSource.getCacheDirectory() + '/' + dataSource.getName()); return toInputStream(document); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:com.amalto.core.storage.hibernate.DefaultStorageClassLoader.java
@Override public InputStream generateHibernateMapping() { if (resolver == null) { throw new IllegalStateException("Expected table resolver to be set before this method is called."); }//from ww w . jav a 2s . c o m try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setExpandEntityReferences(false); DocumentBuilder documentBuilder = factory.newDocumentBuilder(); documentBuilder.setEntityResolver(HibernateStorage.ENTITY_RESOLVER); Document document = documentBuilder .parse(this.getClass().getResourceAsStream(HIBERNATE_MAPPING_TEMPLATE)); MappingGenerator mappingGenerator = getMappingGenerator(document, resolver); for (Map.Entry<String, Class<? extends Wrapper>> classNameToClass : registeredClasses.entrySet()) { ComplexTypeMetadata typeMetadata = knownTypes.get(classNameToClass.getKey()); if (typeMetadata != null && typeMetadata.getSuperTypes().isEmpty()) { Element classElement = typeMetadata.accept(mappingGenerator); if (classElement != null) { // Class element might be null if mapping is not applicable for this type document.getDocumentElement().appendChild(classElement); } } } return toInputStream(document); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:com.amalto.core.storage.hibernate.DefaultStorageClassLoader.java
public Document generateHibernateConfiguration(RDBMSDataSource rdbmsDataSource) throws ParserConfigurationException, SAXException, IOException, XPathExpressionException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true);/*w w w. j a v a 2s . co m*/ factory.setExpandEntityReferences(false); DocumentBuilder documentBuilder = factory.newDocumentBuilder(); documentBuilder.setEntityResolver(HibernateStorage.ENTITY_RESOLVER); Document document = documentBuilder .parse(DefaultStorageClassLoader.class.getResourceAsStream(HIBERNATE_CONFIG_TEMPLATE)); String connectionUrl = rdbmsDataSource.getConnectionURL(); String userName = rdbmsDataSource.getUserName(); String driverClass = rdbmsDataSource.getDriverClassName(); RDBMSDataSource.DataSourceDialect dialectType = rdbmsDataSource.getDialectName(); String dialect = getDialect(dialectType); String password = rdbmsDataSource.getPassword(); String indexBase = rdbmsDataSource.getIndexDirectory(); int connectionPoolMinSize = rdbmsDataSource.getConnectionPoolMinSize(); int connectionPoolMaxSize = rdbmsDataSource.getConnectionPoolMaxSize(); if (connectionPoolMaxSize == 0) { LOGGER.info("No value provided for property connectionPoolMaxSize of datasource " //$NON-NLS-1$ + rdbmsDataSource.getName() + ". Using default value: " //$NON-NLS-1$ + RDBMSDataSourceBuilder.CONNECTION_POOL_MAX_SIZE_DEFAULT); connectionPoolMaxSize = RDBMSDataSourceBuilder.CONNECTION_POOL_MAX_SIZE_DEFAULT; } setPropertyValue(document, "hibernate.connection.url", connectionUrl); //$NON-NLS-1$ setPropertyValue(document, "hibernate.connection.username", userName); //$NON-NLS-1$ setPropertyValue(document, "hibernate.connection.driver_class", driverClass); //$NON-NLS-1$ setPropertyValue(document, "hibernate.dialect", dialect); //$NON-NLS-1$ setPropertyValue(document, "hibernate.connection.password", password); //$NON-NLS-1$ // Sets up DBCP pool features setPropertyValue(document, "hibernate.dbcp.initialSize", String.valueOf(connectionPoolMinSize)); //$NON-NLS-1$ setPropertyValue(document, "hibernate.dbcp.maxActive", String.valueOf(connectionPoolMaxSize)); //$NON-NLS-1$ setPropertyValue(document, "hibernate.dbcp.maxIdle", String.valueOf(10)); //$NON-NLS-1$ setPropertyValue(document, "hibernate.dbcp.maxTotal", String.valueOf(connectionPoolMaxSize)); //$NON-NLS-1$ setPropertyValue(document, "hibernate.dbcp.maxWaitMillis", "60000"); //$NON-NLS-1$ //$NON-NLS-2$ Node sessionFactoryElement = document.getElementsByTagName("session-factory").item(0); //$NON-NLS-1$ if (rdbmsDataSource.supportFullText()) { /* <property name="hibernate.search.default.directory_provider" value="filesystem"/> <property name="hibernate.search.default.indexBase" value="/var/lucene/indexes"/> */ addProperty(document, sessionFactoryElement, "hibernate.search.default.directory_provider", //$NON-NLS-1$ "filesystem"); //$NON-NLS-1$ addProperty(document, sessionFactoryElement, "hibernate.search.default.indexBase", //$NON-NLS-1$ indexBase + '/' + storageName); addProperty(document, sessionFactoryElement, "hibernate.search.default.sourceBase", //$NON-NLS-1$ indexBase + '/' + storageName); addProperty(document, sessionFactoryElement, "hibernate.search.default.source", ""); //$NON-NLS-1$ //$NON-NLS-2$ addProperty(document, sessionFactoryElement, "hibernate.search.default.exclusive_index_use", "false"); //$NON-NLS-1$ //$NON-NLS-2$ addProperty(document, sessionFactoryElement, "hibernate.search.lucene_version", "LUCENE_CURRENT"); //$NON-NLS-1$ //$NON-NLS-2$ } else { addProperty(document, sessionFactoryElement, "hibernate.search.autoregister_listeners", "false"); //$NON-NLS-1$ //$NON-NLS-2$ } if (dataSource.getCacheDirectory() != null && !dataSource.getCacheDirectory().isEmpty()) { /* <!-- Second level cache --> <property name="hibernate.cache.use_second_level_cache">true</property> <property name="hibernate.cache.provider_class">net.sf.ehcache.hibernate.EhCacheProvider</property> <property name="hibernate.cache.use_query_cache">true</property> <property name="net.sf.ehcache.configurationResourceName">ehcache.xml</property> */ addProperty(document, sessionFactoryElement, "hibernate.cache.use_second_level_cache", "true"); //$NON-NLS-1$ //$NON-NLS-2$ addProperty(document, sessionFactoryElement, "hibernate.cache.provider_class", //$NON-NLS-1$ "net.sf.ehcache.hibernate.EhCacheProvider"); //$NON-NLS-1$ addProperty(document, sessionFactoryElement, "hibernate.cache.use_query_cache", "true"); //$NON-NLS-1$ //$NON-NLS-2$ addProperty(document, sessionFactoryElement, "net.sf.ehcache.configurationResourceName", "ehcache.xml"); //$NON-NLS-1$ //$NON-NLS-2$ } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Hibernate configuration does not define second level cache extensions due to datasource configuration."); //$NON-NLS-1$ } addProperty(document, sessionFactoryElement, "hibernate.cache.use_second_level_cache", "false"); //$NON-NLS-1$ //$NON-NLS-2$ } // Override default configuration with values from configuration Map<String, String> advancedProperties = rdbmsDataSource.getAdvancedProperties(); for (Map.Entry<String, String> currentAdvancedProperty : advancedProperties.entrySet()) { setPropertyValue(document, currentAdvancedProperty.getKey(), currentAdvancedProperty.getValue()); } // Order of elements highly matters and mapping shall be declared after <property/> and before <event/>. Element mapping = document.createElement("mapping"); //$NON-NLS-1$ Attr resource = document.createAttribute("resource"); //$NON-NLS-1$ resource.setValue(HIBERNATE_MAPPING); mapping.getAttributes().setNamedItem(resource); sessionFactoryElement.appendChild(mapping); if (rdbmsDataSource.supportFullText()) { addEvent(document, sessionFactoryElement, "post-update", //$NON-NLS-1$ "org.hibernate.search.event.FullTextIndexEventListener"); //$NON-NLS-1$ addEvent(document, sessionFactoryElement, "post-insert", //$NON-NLS-1$ "org.hibernate.search.event.FullTextIndexEventListener"); //$NON-NLS-1$ addEvent(document, sessionFactoryElement, "post-delete", //$NON-NLS-1$ "org.hibernate.search.event.FullTextIndexEventListener"); //$NON-NLS-1$ } else if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Hibernate configuration does not define full text extensions due to datasource configuration."); //$NON-NLS-1$ } return document; }
From source file:com.ecyrd.jspwiki.auth.authorize.XMLGroupDatabase.java
private void buildDOM() throws WikiSecurityException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false);/*w w w .j a va 2 s . c o m*/ factory.setExpandEntityReferences(false); factory.setIgnoringComments(true); factory.setNamespaceAware(false); try { m_dom = factory.newDocumentBuilder().parse(m_file); log.debug("Database successfully initialized"); m_lastModified = m_file.lastModified(); m_lastCheck = System.currentTimeMillis(); } catch (ParserConfigurationException e) { log.error("Configuration error: " + e.getMessage()); } catch (SAXException e) { log.error("SAX error: " + e.getMessage()); } catch (FileNotFoundException e) { log.info("Group database not found; creating from scratch..."); } catch (IOException e) { log.error("IO error: " + e.getMessage()); } if (m_dom == null) { try { // // Create the DOM from scratch // m_dom = factory.newDocumentBuilder().newDocument(); m_dom.appendChild(m_dom.createElement("groups")); } catch (ParserConfigurationException e) { log.fatal("Could not create in-memory DOM"); } } // Ok, now go and read this sucker in if (m_dom != null) { NodeList groupNodes = m_dom.getElementsByTagName(GROUP_TAG); for (int i = 0; i < groupNodes.getLength(); i++) { Element groupNode = (Element) groupNodes.item(i); String groupName = groupNode.getAttribute(GROUP_NAME); if (groupName == null) { log.warn("Detected null group name in XMLGroupDataBase. Check your group database."); } else { Group group = buildGroup(groupNode, groupName); m_groups.put(groupName, group); } } } }
From source file:com.snaplogic.snaps.checkfree.SoapExecuteTest.java
private DocumentBuilderFactory getDocumentBuilderFactory() { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setExpandEntityReferences(true); documentBuilderFactory.setNamespaceAware(true); documentBuilderFactory.setCoalescing(true); return documentBuilderFactory; }
From source file:hydrograph.ui.engine.ui.util.UiConverterUtil.java
private Graph unMarshall(File inputFile) throws JAXBException, ParserConfigurationException, SAXException, IOException { LOGGER.debug("Un-Marshaling generated object into target XML"); JAXBContext jaxbContext;/*from ww w. j av a 2 s. c o m*/ Graph graph = null; Document document = null; parseXML(inputFile); String inputFileAsString = replaceParametersWithDefaultValues(inputFile); DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); builderFactory.setExpandEntityReferences(false); builderFactory.setNamespaceAware(true); builderFactory.setFeature(Constants.DISALLOW_DOCTYPE_DECLARATION, true); DocumentBuilder documentBuilder = builderFactory.newDocumentBuilder(); ByteArrayInputStream byteStream = new ByteArrayInputStream(inputFileAsString.getBytes()); InputSource inputSource = new InputSource(byteStream); document = documentBuilder.parse(inputSource); jaxbContext = JAXBContext.newInstance(Graph.class); Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller(); graph = (Graph) jaxbUnmarshaller.unmarshal(document); if (graph != null) { componentRepo.genrateComponentRepo(graph); } byteStream.close(); return graph; }
From source file:com.esri.geoportal.harvester.migration.MigrationDataBuilder.java
private Document strToDom(String strXml) throws ParserConfigurationException, SAXException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); factory.setFeature("http://xml.org/sax/features/external-general-entities", false); factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); factory.setXIncludeAware(false);//from www . j a v a 2 s . com factory.setExpandEntityReferences(false); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); return builder.parse(new InputSource(new StringReader(strXml))); }
From source file:hydrograph.ui.propertywindow.widgets.customwidgets.schema.GridRowLoader.java
/** * For importing engine-XML, this method import schema rows from schema file into schema grid. * //from ww w . j a v a 2 s. co m */ public List<GridRow> importGridRowsFromXML() { List<GridRow> schemaGridRowListToImport = new ArrayList<GridRow>(); if (StringUtils.isNotBlank(schemaFile.getPath())) { try (InputStream xml = new FileInputStream(schemaFile); InputStream xsd = new FileInputStream(SCHEMA_CONFIG_XSD_PATH)) { if (validateXML(xml, xsd)) { DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); builderFactory.setExpandEntityReferences(false); builderFactory.setNamespaceAware(true); builderFactory.setFeature(Constants.DISALLOW_DOCTYPE_DECLARATION, true); DocumentBuilder documentBuilder = builderFactory.newDocumentBuilder(); Document document = documentBuilder.parse(schemaFile); JAXBContext jaxbContext = JAXBContext.newInstance(Schema.class); Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller(); Schema schema = (Schema) jaxbUnmarshaller.unmarshal(document); fields = schema.getFields(); List<Field> fieldsList = fields.getField(); GridRow gridRow = null; schemaGridRowListToImport = new ArrayList<GridRow>(); if (Messages.GENERIC_GRID_ROW.equals(gridRowType)) { for (Field field : fieldsList) { gridRow = getBasicSchemaGridRow(field); schemaGridRowListToImport.add(gridRow); } } else if (Messages.FIXEDWIDTH_GRID_ROW.equals(gridRowType)) { for (Field field : fieldsList) { schemaGridRowListToImport.add(getFixedWidthGridRow(field)); } } else if (Messages.GENERATE_RECORD_GRID_ROW.equals(gridRowType)) { for (Field field : fieldsList) { gridRow = getGenerateRecordGridRow(field); schemaGridRowListToImport.add(gridRow); } } else if (Messages.MIXEDSCHEME_GRID_ROW.equals(gridRowType)) { for (Field field : fieldsList) { gridRow = getMixedSchemeGridRow(field); schemaGridRowListToImport.add(gridRow); } } else if (Messages.XPATH_GRID_ROW.equals(gridRowType)) { for (Field field : fieldsList) { gridRow = new XPathGridRow(); populateCommonFields(gridRow, field); ((XPathGridRow) gridRow).setXPath(field.getAbsoluteOrRelativeXpath()); schemaGridRowListToImport.add(gridRow); } } } } catch (JAXBException e1) { logger.warn(Messages.IMPORT_XML_FORMAT_ERROR); return schemaGridRowListToImport; } catch (IOException ioException) { logger.warn(Messages.IMPORT_XML_ERROR); return schemaGridRowListToImport; } catch (ParserConfigurationException | SAXException exception) { logger.warn("Doctype is not allowed in schema files", exception); return schemaGridRowListToImport; } } else { logger.warn(Messages.EXPORT_XML_EMPTY_FILENAME); } return schemaGridRowListToImport; }