Example usage for javax.xml.xpath XPathFactory newXPath

List of usage examples for javax.xml.xpath XPathFactory newXPath

Introduction

In this page you can find the example usage for javax.xml.xpath XPathFactory newXPath.

Prototype

public abstract XPath newXPath();

Source Link

Document

Return a new XPath using the underlying object model determined when the XPathFactory was instantiated.

Usage

From source file:org.apache.zeppelin.sap.universe.UniverseClient.java

public List<UniverseQueryPrompt> getParameters(String token, String queryId) throws UniverseException {
    HttpGet httpGet = new HttpGet(String.format("%s%s%s%s", apiUrl, "/sl/v1/queries/", queryId, "/parameters"));
    setHeaders(httpGet, token);// w  w  w . ja v a2 s .co m
    HttpResponse response = null;
    try {
        response = httpClient.execute(httpGet);
        if (response.getStatusLine().getStatusCode() != 200) {
            throw new UniverseException(String.format(errorMessageTemplate,
                    "UniverseClient " + "(get parameters): Request failed\n",
                    EntityUtils.toString(response.getEntity())));
        }
    } catch (IOException e) {
        throw new UniverseException(String.format(errorMessageTemplate,
                "UniverseClient " + "(get parameters): Request failed", ExceptionUtils.getStackTrace(e)));
    }

    try (InputStream xmlStream = response.getEntity().getContent()) {
        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
        DocumentBuilder builder = factory.newDocumentBuilder();
        Document doc = builder.parse(xmlStream);
        XPathFactory xPathfactory = XPathFactory.newInstance();
        XPath xpath = xPathfactory.newXPath();
        XPathExpression expr = xpath.compile("//parameters/parameter");
        NodeList parametersNodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
        if (parametersNodes != null) {
            return parseParameters(parametersNodes);
        } else {
            throw new UniverseException(String.format(errorMessageTemplate,
                    "UniverseClient " + "(get parameters): Response processing failed"));
        }
    } catch (IOException e) {
        throw new UniverseException(String.format(errorMessageTemplate,
                "UniverseClient " + "(get parameters): Response processing failed",
                ExceptionUtils.getStackTrace(e)));
    } catch (ParserConfigurationException | SAXException | XPathExpressionException e) {
        throw new UniverseException(String.format(errorMessageTemplate,
                "UniverseClient " + "(get parameters): Response processing failed",
                ExceptionUtils.getStackTrace(e)));
    }
}

From source file:com.connexta.arbitro.ctx.xacml3.XACML3EvaluationCtx.java

private Set<String> getChildXPaths(Node root, String xPath) {

    Set<String> xPaths = new HashSet<String>();
    NamespaceContext namespaceContext = null;

    XPathFactory factory = XPathFactory.newInstance();
    XPath xpath = factory.newXPath();

    if (namespaceContext == null) {

        //see if the request root is in a namespace
        String namespace = null;/*from w w  w  . j a  v  a  2  s  . c o  m*/
        if (root != null) {
            namespace = root.getNamespaceURI();
        }
        // name spaces are used, so we need to lookup the correct
        // prefix to use in the search string
        NamedNodeMap namedNodeMap = root.getAttributes();

        Map<String, String> nsMap = new HashMap<String, String>();
        if (namedNodeMap != null) {
            for (int i = 0; i < namedNodeMap.getLength(); i++) {
                Node n = namedNodeMap.item(i);
                // we found the matching namespace, so get the prefix
                // and then break out
                String prefix = DOMHelper.getLocalName(n);
                String nodeValue = n.getNodeValue();
                nsMap.put(prefix, nodeValue);
            }
        }

        // if there is not any namespace is defined for content element, default XACML request
        //  name space would be there.
        if (XACMLConstants.REQUEST_CONTEXT_3_0_IDENTIFIER.equals(namespace)
                || XACMLConstants.REQUEST_CONTEXT_2_0_IDENTIFIER.equals(namespace)
                || XACMLConstants.REQUEST_CONTEXT_1_0_IDENTIFIER.equals(namespace)) {
            nsMap.put("xacml", namespace);
        }

        namespaceContext = new DefaultNamespaceContext(nsMap);
    }

    xpath.setNamespaceContext(namespaceContext);

    try {
        XPathExpression expression = xpath.compile(xPath);
        NodeList matches = (NodeList) expression.evaluate(root, XPathConstants.NODESET);
        if (matches != null && matches.getLength() > 0) {

            for (int i = 0; i < matches.getLength(); i++) {
                String text = null;
                Node node = matches.item(i);
                short nodeType = node.getNodeType();

                // see if this is straight text, or a node with data under
                // it and then get the values accordingly
                if ((nodeType == Node.CDATA_SECTION_NODE) || (nodeType == Node.COMMENT_NODE)
                        || (nodeType == Node.TEXT_NODE) || (nodeType == Node.ATTRIBUTE_NODE)) {
                    // there is no child to this node
                    text = node.getNodeValue();
                } else {

                    // the data is in a child node
                    text = "/" + DOMHelper.getLocalName(node);
                }
                String newXPath = '(' + xPath + ")[" + (i + 1) + ']';
                xPaths.add(newXPath);
            }
        }
    } catch (Exception e) {
        // TODO
    }

    return xPaths;
}

From source file:fmiquerytest.Coordinates.java

static List<routeStep> getSteps(String gQuery) {
    List<routeStep> list = new ArrayList<>();
    try {/*from w w w . j  a va 2  s . c o m*/
        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
        factory.setNamespaceAware(true);
        DocumentBuilder builder = factory.newDocumentBuilder();
        Document doc = builder.parse(gQuery);

        XPathFactory xPathfactory = XPathFactory.newInstance();
        XPath xpath = xPathfactory.newXPath();
        String expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/start_location/lat/text()";
        // System.out.println(expr);
        List<String> start_lats = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/start_location/lng/text()";
        // System.out.println(expr);
        List<String> start_lngs = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/end_location/lat/text()";
        List<String> end_lats = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/end_location/lng/text()";
        List<String> end_lngs = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/distance/value/text()";
        List<String> dists = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/duration/value/text()";
        List<String> durs = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/polyline/points/text()";
        List<String> polys = Parser.getValueList(doc, xpath, expr);
        expr = "/DirectionsResponse/route[" + (FmiQueryTest.gRouteOption + 1)
                + "]/leg/step/html_instructions/text()";
        List<String> insts = Parser.getValueList(doc, xpath, expr);

        for (int i = 0; i < start_lats.size(); i++) {
            double slat = Double.parseDouble(start_lats.get(i));
            double slon = Double.parseDouble(start_lngs.get(i));
            Coordinates start = new Coordinates(slat, slon);
            double elat = Double.parseDouble(end_lats.get(i));
            double elon = Double.parseDouble(end_lngs.get(i));
            Coordinates end = new Coordinates(elat, elon);
            int dist = parseInt(dists.get(i));
            int dur = parseInt(durs.get(i));
            String poly = polys.get(i);
            String inst = insts.get(i);

            list.add(new routeStep(start, end, dist, dur, poly, inst));
        }
    } catch (ParserConfigurationException | SAXException | IOException ex) {
        Logger.getLogger(FmiQueryTest.class.getName()).log(Level.SEVERE, null, ex);
    }
    return list;
}

From source file:org.apache.zeppelin.sap.universe.UniverseClient.java

public Map<String, UniverseNodeInfo> getUniverseNodesInfo(String token, String universeName)
        throws UniverseException {
    UniverseInfo universeInfo = universesMap.get(universeName);
    if (universeInfo != null && StringUtils.isNotBlank(universeInfo.getId())) {
        Map<String, UniverseNodeInfo> universeNodeInfoMap = universeInfosMap.get(universeName);
        if (universeNodeInfoMap != null && universesInfoUpdatedMap.containsKey(universeName)
                && !isExpired(universesInfoUpdatedMap.get(universeName))) {
            return universeNodeInfoMap;
        } else {/*from  ww w  .j a v a2s  .  com*/
            universeNodeInfoMap = new HashMap<>();
        }
        try {
            HttpGet httpGet = new HttpGet(
                    String.format("%s%s%s", apiUrl, "/sl/v1/universes/", universeInfo.getId()));
            setHeaders(httpGet, token);
            HttpResponse response = httpClient.execute(httpGet);

            if (response.getStatusLine().getStatusCode() == 200) {
                try (InputStream xmlStream = response.getEntity().getContent()) {
                    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
                    DocumentBuilder builder = factory.newDocumentBuilder();
                    Document doc = builder.parse(xmlStream);
                    XPathFactory xPathfactory = XPathFactory.newInstance();
                    XPath xpath = xPathfactory.newXPath();
                    XPathExpression expr = xpath.compile("//outline/folder");
                    XPathExpression exprRootItems = xpath.compile("//outline/item");
                    NodeList universeInfoNodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
                    NodeList universeRootInfoNodes = (NodeList) exprRootItems.evaluate(doc,
                            XPathConstants.NODESET);
                    if (universeInfoNodes != null) {
                        parseUniverseInfo(universeInfoNodes, universeNodeInfoMap);
                    }
                    if (universeRootInfoNodes != null) {
                        parseUniverseInfo(universeRootInfoNodes, universeNodeInfoMap);
                    }
                } catch (Exception e) {
                    throw new UniverseException(String.format(errorMessageTemplate,
                            "UniverseClient " + "(get universe nodes info): Response processing failed",
                            ExceptionUtils.getStackTrace(e)));
                }
            }
        } catch (IOException e) {
            throw new UniverseException(String.format(errorMessageTemplate,
                    "UniverseClient " + "(get universe nodes info): Request failed",
                    ExceptionUtils.getStackTrace(e)));
        }
        universeInfosMap.put(universeName, universeNodeInfoMap);
        universesInfoUpdatedMap.put(universeName, System.currentTimeMillis());

        return universeNodeInfoMap;
    }
    return Collections.emptyMap();

}

From source file:org.kuali.mobility.maps.service.LocationServiceImpl.java

private List<MapsGroup> buildMapsGroupsFromXml(String xml) {
    List<MapsGroup> mapsGroups = new ArrayList<MapsGroup>();
    XPathFactory factory = XPathFactory.newInstance();
    XPath xPath = factory.newXPath();
    DocumentBuilderFactory dbf;/*from   w w w . j  a v  a 2  s. co  m*/
    try {
        dbf = DocumentBuilderFactory.newInstance();
        Document doc = dbf.newDocumentBuilder().parse(new InputSource(new StringReader(xml)));
        //Get all markers from XML
        NodeList nodes = (NodeList) xPath.evaluate("/root/group", doc, XPathConstants.NODESET);
        /*
         * Iterate nodes, should only find one group but if there are many they'll just be many root nodes.
         */
        Map<String, MapsGroup> map = new HashMap<String, MapsGroup>();
        mapsGroups = this.iterateMapsGroupNodes(nodes, map, null);
    } catch (Exception e) {
        //         LOG.info("Error loading data: ");
    }
    return mapsGroups;
}

From source file:org.apache.zeppelin.sap.universe.UniverseClient.java

private void loadUniverses(String token, int offset, Map<String, UniverseInfo> universesMap)
        throws UniverseException {
    int limit = 50;
    HttpGet httpGet = new HttpGet(
            String.format("%s%s?offset=%s&limit=%s", apiUrl, "/sl/v1/universes", offset, limit));
    setHeaders(httpGet, token);/*w ww  .ja va  2  s  .com*/
    HttpResponse response = null;
    try {
        response = httpClient.execute(httpGet);
    } catch (Exception e) {
        throw new UniverseException(String.format(errorMessageTemplate,
                "UniverseClient " + "(get universes): Request failed", ExceptionUtils.getStackTrace(e)));
    }
    if (response != null && response.getStatusLine().getStatusCode() == 200) {
        try (InputStream xmlStream = response.getEntity().getContent()) {
            DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
            DocumentBuilder builder = factory.newDocumentBuilder();
            Document doc = builder.parse(xmlStream);
            XPathFactory xPathfactory = XPathFactory.newInstance();
            XPath xpath = xPathfactory.newXPath();
            XPathExpression expr = xpath.compile("//universe");
            NodeList universesNodes = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
            if (universesNodes != null) {
                int count = universesNodes.getLength();
                for (int i = 0; i < count; i++) {
                    Node universe = universesNodes.item(i);
                    if (universe.hasChildNodes()) {
                        NodeList universeParameters = universe.getChildNodes();
                        int parapetersCount = universeParameters.getLength();
                        String id = null;
                        String name = null;
                        String type = null;
                        for (int j = 0; j < parapetersCount; j++) {
                            Node parameterNode = universeParameters.item(j);
                            parameterNode.getNodeName();
                            if (parameterNode.getNodeType() == Node.ELEMENT_NODE) {
                                if (parameterNode.getNodeName().equalsIgnoreCase("id")) {
                                    id = parameterNode.getTextContent();
                                    continue;
                                }
                                if (parameterNode.getNodeName().equalsIgnoreCase("name")) {
                                    name = parameterNode.getTextContent();
                                    continue;
                                }
                                if (parameterNode.getNodeName().equalsIgnoreCase("type")) {
                                    type = parameterNode.getTextContent();
                                    continue;
                                }
                            }
                        }
                        if (StringUtils.isNotBlank(type)) {
                            name = name.replaceAll(String.format("\\.%s$", type), StringUtils.EMPTY);
                        }
                        universesMap.put(name, new UniverseInfo(id, name, type));
                    }
                }
                if (count == limit) {
                    offset += limit;
                    loadUniverses(token, offset, universesMap);
                }
            }
        } catch (IOException e) {
            throw new UniverseException(String.format(errorMessageTemplate,
                    "UniverseClient " + "(get universes): Response processing failed",
                    ExceptionUtils.getStackTrace(e)));
        } catch (ParserConfigurationException | SAXException | XPathExpressionException e) {
            throw new UniverseException(String.format(errorMessageTemplate,
                    "UniverseClient " + "(get universes): Response processing failed",
                    ExceptionUtils.getStackTrace(e)));
        }
    }
}

From source file:de.bps.course.nodes.vc.provider.adobe.AdobeConnectProvider.java

private Object evaluate(Document responseDoc, String expression, QName type) {
    if (responseDoc == null)
        return null;
    XPathFactory factory = XPathFactory.newInstance();
    XPath xpath = factory.newXPath();
    XPathExpression expr;/*from www  . ja v a 2  s . co m*/
    Object result;
    try {
        expr = xpath.compile(expression);
        result = expr.evaluate(responseDoc, type);
    } catch (XPathExpressionException e) {
        result = null;
    }

    return result;
}

From source file:dk.netarkivet.harvester.harvesting.WARCWriterProcessor.java

/**
 * Return relevant values as header-like fields (here ANVLRecord, but spec-defined "application/warc-fields" type
 * when written). Field names from from DCMI Terms and the WARC/0.17 specification.
 *
 * @see org.archive.crawler.framework.WriterPoolProcessor#getFirstrecordBody(java.io.File)
 *//*  ww w . j  av  a2 s .c  o m*/
@Override
protected String getFirstrecordBody(File orderFile) {
    ANVLRecord record = new ANVLRecord(7);
    record.addLabelValue("software", "Heritrix/" + Heritrix.getVersion() + " http://crawler.archive.org");

    try {
        InetAddress host = InetAddress.getLocalHost();
        record.addLabelValue("ip", host.getHostAddress());
        record.addLabelValue("hostname", host.getCanonicalHostName());
    } catch (UnknownHostException e) {
        logger.log(Level.WARNING, "unable top obtain local crawl engine host", e);
    }

    // conforms to ISO 28500:2009 as of May 2009
    // as described at http://bibnum.bnf.fr/WARC/
    // latest draft as of November 2008
    record.addLabelValue("format", "WARC File Format 1.0");
    record.addLabelValue("conformsTo", "http://bibnum.bnf.fr/WARC/WARC_ISO_28500_version1_latestdraft.pdf");

    // Get other values from order.xml
    try {
        Document doc = XmlUtils.getDocument(orderFile);
        addIfNotBlank(record, "operator", XmlUtils.xpathOrNull(doc, "//meta/operator"));
        addIfNotBlank(record, "publisher", XmlUtils.xpathOrNull(doc, "//meta/organization"));
        addIfNotBlank(record, "audience", XmlUtils.xpathOrNull(doc, "//meta/audience"));
        addIfNotBlank(record, "isPartOf", XmlUtils.xpathOrNull(doc, "//meta/name"));

        // disabling "created" field per HER-1634
        // though it's theoretically useful as a means of distinguishing
        // one crawl from another, the current usage/specification is too
        // vague... in particular a 'created' field in the 'warcinfo' is
        // reasonable to interpret as applying to the WARC-unit, rather
        // than the crawl-job-unit so we remove it and see if anyone
        // complains or makes a case for restoring it in a less-ambiguous
        // manner
        // String rawDate = XmlUtils.xpathOrNull(doc,"//meta/date");
        // if(StringUtils.isNotBlank(rawDate)) {
        // Date date;
        // try {
        // date = ArchiveUtils.parse14DigitDate(rawDate);
        // addIfNotBlank(record,"created",ArchiveUtils.getLog14Date(date));
        // } catch (ParseException e) {
        // logger.log(Level.WARNING,"obtaining warc created date",e);
        // }
        // }

        addIfNotBlank(record, "description", XmlUtils.xpathOrNull(doc, "//meta/description"));
        addIfNotBlank(record, "robots",
                XmlUtils.xpathOrNull(doc, "//newObject[@name='robots-honoring-policy']/string[@name='type']"));
        addIfNotBlank(record, "http-header-user-agent",
                XmlUtils.xpathOrNull(doc, "//map[@name='http-headers']/string[@name='user-agent']"));
        addIfNotBlank(record, "http-header-from",
                XmlUtils.xpathOrNull(doc, "//map[@name='http-headers']/string[@name='from']"));
        if (metadataMap == null) {
            //metadataMap = getMetadataItems();
            XPathFactory factory = XPathFactory.newInstance();
            XPath xpath = factory.newXPath();
            XPathExpression expr = xpath.compile(H1HeritrixTemplate.METADATA_ITEMS_XPATH);
            Node node = (Node) expr.evaluate(doc, XPathConstants.NODE);
            //NodeList nodeList = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
            //Node node = nodeList.item(0);
            if (node != null) {
                NodeList nodeList = node.getChildNodes();
                if (nodeList != null) {
                    metadataMap = new HashMap();
                    for (int i = 0; i < nodeList.getLength(); ++i) {
                        node = nodeList.item(i);
                        if (node.getNodeType() == Node.ELEMENT_NODE) {
                            String typeName = node.getNodeName();
                            if ("string".equals(typeName)) {
                                Node attribute = node.getAttributes().getNamedItem("name");
                                if (attribute != null && attribute.getNodeType() == Node.ATTRIBUTE_NODE) {
                                    String key = attribute.getNodeValue();
                                    if (key != null && key.length() > 0) {
                                        String value = node.getTextContent();
                                        metadataMap.put(key, value);
                                        // debug
                                        //System.out.println(key + "=" + value);
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
    } catch (IOException e) {
        logger.log(Level.WARNING, "Error obtaining warcinfo", e);
    } catch (XPathExpressionException e) {
        logger.log(Level.WARNING, "Error obtaining metadata items", e);
    }

    // add fields from harvesInfo.xml version 0.4
    /*
     * <harvestInfo> <version>0.4</version> <jobId>1</jobId> <priority>HIGHPRIORITY</priority>
     * <harvestNum>0</harvestNum> <origHarvestDefinitionID>1</origHarvestDefinitionID>
     * <maxBytesPerDomain>500000000</maxBytesPerDomain> <maxObjectsPerDomain>2000</maxObjectsPerDomain>
     * <orderXMLName>default_orderxml</orderXMLName>
     * <origHarvestDefinitionName>netarkivet</origHarvestDefinitionName> <scheduleName>Once_a_week</scheduleName>
     * <harvestFilenamePrefix>1-1</harvestFilenamePrefix> <jobSubmitDate>Some date</jobSubmitDate>
     * <performer>undefined</performer> </harvestInfo>
     */
    String netarchiveSuiteComment = "#added by NetarchiveSuite "
            + dk.netarkivet.common.Constants.getVersionString();
    ANVLRecord recordNAS = new ANVLRecord(7);

    if (metadataMap != null) {
        // Add the data from the metadataMap to the WarcInfoRecord.
        recordNAS.addLabelValue(HARVESTINFO_VERSION, (String) metadataMap.get(HARVESTINFO_VERSION));
        recordNAS.addLabelValue(HARVESTINFO_JOBID, (String) metadataMap.get(HARVESTINFO_JOBID));
        recordNAS.addLabelValue(HARVESTINFO_CHANNEL, (String) metadataMap.get(HARVESTINFO_CHANNEL));
        recordNAS.addLabelValue(HARVESTINFO_HARVESTNUM, (String) metadataMap.get(HARVESTINFO_HARVESTNUM));
        recordNAS.addLabelValue(HARVESTINFO_ORIGHARVESTDEFINITIONID,
                (String) metadataMap.get(HARVESTINFO_ORIGHARVESTDEFINITIONID));
        recordNAS.addLabelValue(HARVESTINFO_MAXBYTESPERDOMAIN,
                (String) metadataMap.get(HARVESTINFO_MAXBYTESPERDOMAIN));

        recordNAS.addLabelValue(HARVESTINFO_MAXOBJECTSPERDOMAIN,
                (String) metadataMap.get(HARVESTINFO_MAXOBJECTSPERDOMAIN));
        recordNAS.addLabelValue(HARVESTINFO_ORDERXMLNAME, (String) metadataMap.get(HARVESTINFO_ORDERXMLNAME));
        recordNAS.addLabelValue(HARVESTINFO_ORIGHARVESTDEFINITIONNAME,
                (String) metadataMap.get(HARVESTINFO_ORIGHARVESTDEFINITIONNAME));

        if (metadataMap.containsKey((HARVESTINFO_SCHEDULENAME))) {
            recordNAS.addLabelValue(HARVESTINFO_SCHEDULENAME,
                    (String) metadataMap.get(HARVESTINFO_SCHEDULENAME));
        }
        recordNAS.addLabelValue(HARVESTINFO_HARVESTFILENAMEPREFIX,
                (String) metadataMap.get(HARVESTINFO_HARVESTFILENAMEPREFIX));

        recordNAS.addLabelValue(HARVESTINFO_JOBSUBMITDATE, (String) metadataMap.get(HARVESTINFO_JOBSUBMITDATE));

        if (metadataMap.containsKey(HARVESTINFO_PERFORMER)) {
            recordNAS.addLabelValue(HARVESTINFO_PERFORMER, (String) metadataMap.get(HARVESTINFO_PERFORMER));
        }

        if (metadataMap.containsKey(HARVESTINFO_AUDIENCE)) {
            recordNAS.addLabelValue(HARVESTINFO_AUDIENCE, (String) metadataMap.get(HARVESTINFO_AUDIENCE));
        }
    } else {
        logger.log(Level.SEVERE, "Error missing metadata");
    }

    // really ugly to return as string, when it may just be merged with
    // a couple other fields at write time, but changing would require
    // larger refactoring
    return record.toString() + netarchiveSuiteComment + "\n" + recordNAS.toString();
}

From source file:ddf.catalog.source.opensearch.CddaOpenSearchSite.java

/**
 * Creates an OpenSearch Site instance. Sets an initial default endpointUrl that can be
 * overwritten using the setter methods.
 * /*from  w w w.j av a 2  s  .  co  m*/
 * @param connection
 *            TODO
 * 
 * @throws UnsupportedQueryException
 */
public CddaOpenSearchSite(SecureRemoteConnection connection) throws UnsupportedQueryException {
    this.version = "1.0";
    this.connection = connection;
    endpointUrl = "https://example.com?q={searchTerms}&src={fs:routeTo?}&mr={fs:maxResults?}&count={count?}&mt={fs:maxTimeout?}&dn={idn:userDN?}&lat={geo:lat?}&lon={geo:lon?}&radius={geo:radius?}&bbox={geo:box?}&polygon={geo:polygon?}&dtstart={time:start?}&dtend={time:end?}&dateName={cat:dateName?}&filter={fsa:filter?}&sort={fsa:sort?}";
    lastAvailableDate = null;
    InputStream xsltStream = getClass().getResourceAsStream("/" + NORMALIZE_XSLT);
    try {
        normalizeXslt = OpenSearchSiteUtil.convertStreamToDocument(xsltStream);
    } catch (ConversionException ce) {
        throw new UnsupportedQueryException("Could not parse setup files, cannot talk to federated site.", ce);
    } finally {
        IOUtils.closeQuietly(xsltStream);
    }
    XPathFactory xpFactory = XPathFactory.newInstance();
    xpath = xpFactory.newXPath();

}