Example usage for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars

List of usage examples for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars.

Prototype


public static String escapeQueryChars(String s) 

Source Link

Document

See: <a href="https://www.google.com/?gws_rd=ssl#q=lucene+query+parser+syntax">Lucene query parser syntax</a> for more information on Escaping Special Characters

Usage

From source file:au.org.ala.biocache.dao.SearchDAOImpl.java

License:Open Source License

/**
 * Returns a list of species that are endemic to the supplied region. Values are cached
 * due to the "expensive" operation.// w w w.j a v a  2 s  .  c  o  m
 */
@Cacheable(cacheName = "endemicCache")
public List<FieldResultDTO> getEndemicSpecies(SpatialSearchRequestParams requestParams) throws Exception {
    ExecutorService nextExecutor = getEndemicThreadPoolExecutor();
    // 1)get a list of species that are in the WKT
    if (logger.isDebugEnabled()) {
        logger.debug("Starting to get Endemic Species...");
    }
    List<FieldResultDTO> list1 = getValuesForFacet(requestParams);//new ArrayList(Arrays.asList(getValuesForFacets(requestParams)));
    if (logger.isDebugEnabled()) {
        logger.debug("Retrieved species within area...(" + list1.size() + ")");
    }
    // 2)get a list of species that occur in the inverse WKT

    String reverseQuery = SpatialUtils.getWKTQuery(spatialField, requestParams.getWkt(), true);//"-geohash:\"Intersects(" +wkt + ")\"";

    if (logger.isDebugEnabled()) {
        logger.debug("The reverse query:" + reverseQuery);
    }

    requestParams.setWkt(null);

    int i = 0, localterms = 0;

    String facet = requestParams.getFacets()[0];
    String[] originalFqs = requestParams.getFq();
    //add the negated WKT query to the fq
    originalFqs = (String[]) ArrayUtils.add(originalFqs, reverseQuery);
    List<Future<List<FieldResultDTO>>> threads = new ArrayList<Future<List<FieldResultDTO>>>();
    //batch up the rest of the world query so that we have fqs based on species we want to test for. This should improve the performance of the endemic services.
    while (i < list1.size()) {
        StringBuffer sb = new StringBuffer();
        while ((localterms == 0 || localterms % termQueryLimit != 0) && i < list1.size()) {
            if (localterms > 0) {
                sb.append(" OR ");
            }
            sb.append(facet).append(":").append(ClientUtils.escapeQueryChars(list1.get(i).getFieldValue()));
            i++;
            localterms++;
        }
        String newfq = sb.toString();
        if (localterms == 1)
            newfq = newfq + " OR " + newfq; //cater for the situation where there is only one term.  We don't want the term to be escaped again
        localterms = 0;
        //System.out.println("FQ = " + newfq);
        SpatialSearchRequestParams srp = new SpatialSearchRequestParams();
        BeanUtils.copyProperties(requestParams, srp);
        srp.setFq((String[]) ArrayUtils.add(originalFqs, newfq));
        int batch = i / termQueryLimit;
        EndemicCallable callable = new EndemicCallable(srp, batch, this);
        threads.add(nextExecutor.submit(callable));
    }
    for (Future<List<FieldResultDTO>> future : threads) {
        List<FieldResultDTO> list = future.get();
        if (list != null) {
            list1.removeAll(list);
        }
    }
    if (logger.isDebugEnabled()) {
        logger.debug("Determined final endemic list (" + list1.size() + ")...");
    }
    return list1;
}

From source file:au.org.ala.biocache.dao.SearchDAOImpl.java

License:Open Source License

/**
 * (Endemic)//from  w w w. j a  va  2s.com
 * <p>
 * Returns a list of species that are only within a subQuery.
 * <p>
 * The subQuery is a subset of parentQuery.
 */
public List<FieldResultDTO> getSubquerySpeciesOnly(SpatialSearchRequestParams subQuery,
        SpatialSearchRequestParams parentQuery) throws Exception {
    ExecutorService nextExecutor = getEndemicThreadPoolExecutor();
    // 1)get a list of species that are in the WKT
    if (logger.isDebugEnabled()) {
        logger.debug("Starting to get Endemic Species...");
    }
    subQuery.setFacet(true);
    subQuery.setFacets(parentQuery.getFacets());
    List<FieldResultDTO> list1 = getValuesForFacet(subQuery);
    if (logger.isDebugEnabled()) {
        logger.debug("Retrieved species within area...(" + list1.size() + ")");
    }

    int i = 0, localterms = 0;

    String facet = parentQuery.getFacets()[0];
    String[] originalFqs = parentQuery.getFq();
    List<Future<List<FieldResultDTO>>> futures = new ArrayList<Future<List<FieldResultDTO>>>();
    //batch up the rest of the world query so that we have fqs based on species we want to test for.
    // This should improve the performance of the endemic services.
    while (i < list1.size()) {
        StringBuffer sb = new StringBuffer();
        while ((localterms == 0 || localterms % termQueryLimit != 0) && i < list1.size()) {
            if (localterms > 0) {
                sb.append(" OR ");
            }
            String value = list1.get(i).getFieldValue();
            if (facet.equals(NAMES_AND_LSID)) {
                if (value.startsWith("\"") && value.endsWith("\"")) {
                    value = value.substring(1, value.length() - 1);
                }
                value = "\"" + ClientUtils.escapeQueryChars(value) + "\"";
            } else {
                value = ClientUtils.escapeQueryChars(value);
            }
            sb.append(facet).append(":").append(value);
            i++;
            localterms++;
        }
        String newfq = sb.toString();
        if (localterms == 1)
            newfq = newfq + " OR " + newfq; //cater for the situation where there is only one term.  We don't want the term to be escaped again
        localterms = 0;
        SpatialSearchRequestParams srp = new SpatialSearchRequestParams();
        BeanUtils.copyProperties(parentQuery, srp);
        srp.setFq((String[]) ArrayUtils.add(originalFqs, newfq));
        int batch = i / termQueryLimit;
        EndemicCallable callable = new EndemicCallable(srp, batch, this);
        futures.add(nextExecutor.submit(callable));
    }

    Collections.sort(list1);
    for (Future<List<FieldResultDTO>> future : futures) {
        List<FieldResultDTO> list = future.get();
        if (list != null) {
            for (FieldResultDTO find : list) {
                int idx = Collections.binarySearch(list1, find);
                //remove if sub query count < parent query count
                if (idx >= 0 && list1.get(idx).getCount() < find.getCount()) {
                    list1.remove(idx);
                }
            }
        }
    }
    if (logger.isDebugEnabled()) {
        logger.debug("Determined final endemic list (" + list1.size() + ")...");
    }
    return list1;
}

From source file:au.org.ala.biocache.util.ALANameSearcherExt.java

License:Open Source License

private String concatName(String name) {
    String patternA = "[^a-zA-Z]";
    /* replace multiple whitespaces between words with single blank */
    String patternB = "\\b\\s{2,}\\b";

    String cleanQuery = "";
    if (name != null) {
        cleanQuery = ClientUtils.escapeQueryChars(name);//.toLowerCase();
        cleanQuery = cleanQuery.toLowerCase();
        cleanQuery = cleanQuery.replaceAll(patternA, "");
        cleanQuery = cleanQuery.replaceAll(patternB, "");
        cleanQuery = cleanQuery.trim();/*from   w  ww .  j a v a 2  s. c om*/
    }
    return cleanQuery;
}

From source file:bamboo.trove.rule.RuleChangeUpdateManager.java

License:Apache License

private String urlSearch(String url) {
    if (url.startsWith("*.")) {
        if (url.contains("/")) {
            throw new IllegalArgumentException("can't use a domain wildcard with a path");
        }/*w ww  . j a  va 2  s  .  com*/
        url = url.substring(2);
    } else {
        if (url.endsWith("*")) {
            // remove the *
            url = url.substring(0, url.length() - 1);
        }
    }
    url = CdxAccessControl.getSearchUrl(url);
    url = ClientUtils.escapeQueryChars(url);
    return SolrEnum.URL_TOKENIZED + ":" + url;
}

From source file:cn.edu.pku.lib.dataverse.SolrSearchServiceBean.java

public SolrSearchResult searchDataverseByName(String query, Locale locale) throws SearchException {
    boolean isZh = locale.getLanguage().equals("zh");
    SolrQuery solrQuery = new SolrQuery();
    StringBuilder queryStr = new StringBuilder();
    query = ClientUtils.escapeQueryChars(query);
    queryStr.append("(");
    queryStr.append(SearchFields.NAME);/*from   www . j  a  va 2s  .  co m*/
    queryStr.append(":");
    queryStr.append(query);
    queryStr.append(" OR ");
    queryStr.append(SearchFields.NAME_ZH);
    queryStr.append(":");
    queryStr.append(query);
    queryStr.append(")");
    solrQuery.setQuery(queryStr.toString());
    solrQuery.setFilterQueries(SearchFields.TYPE + ":dataverses",
            SearchFields.PUBLICATION_STATUS + ":Published");
    if (isZh) {
        solrQuery.setFields(SearchFields.ENTITY_ID, SearchFields.NAME_ZH, SearchFields.DATAVERSE_AFFILIATION_ZH,
                SearchFields.DESCRIPTION_ZH);
    } else {
        solrQuery.setFields(SearchFields.ENTITY_ID, SearchFields.NAME, SearchFields.DATAVERSE_AFFILIATION,
                SearchFields.DESCRIPTION);
    }
    logger.log(Level.INFO, "Solr query:{0}", solrQuery);
    solrQuery.setRows(100);
    QueryResponse queryResponse = null;
    SolrSearchResult result = new SolrSearchResult();
    try {
        queryResponse = solrServer.query(solrQuery);
    } catch (HttpSolrServer.RemoteSolrException ex) {
        logger.log(Level.INFO, null, ex);
        result.setTotal(0);
        result.setResults(Collections.EMPTY_LIST);
        return result;
    } catch (SolrServerException ex) {
        throw new SearchException("Internal Dataverse Search Engine Error", ex);
    }
    SolrDocumentList docs = queryResponse.getResults();
    docs.getNumFound();
    Iterator<SolrDocument> iter = docs.iterator();
    List<Dataverse> dataverses = new ArrayList<>();
    while (iter.hasNext()) {
        SolrDocument solrDocument = iter.next();
        Dataverse dataverse = new Dataverse();
        if (isZh) {
            dataverse.setId((Long) solrDocument.getFieldValue(SearchFields.ENTITY_ID));
            dataverse.setNameZh((String) solrDocument.getFieldValue(SearchFields.NAME_ZH));
            dataverse.setAffiliationZh(
                    (String) solrDocument.getFieldValue(SearchFields.DATAVERSE_AFFILIATION_ZH));
            dataverse.setDescriptionZh((String) solrDocument.getFieldValue(SearchFields.DESCRIPTION_ZH));
        } else {
            dataverse.setId((Long) solrDocument.getFieldValue(SearchFields.ENTITY_ID));
            dataverse.setName((String) solrDocument.getFieldValue(SearchFields.NAME));
            dataverse.setAffiliation((String) solrDocument.getFieldValue(SearchFields.DATAVERSE_AFFILIATION));
            dataverse.setDescription((String) solrDocument.getFieldValue(SearchFields.DESCRIPTION));
        }
        dataverses.add(dataverse);
    }
    result.setResults(dataverses);
    result.setTotal((int) docs.getNumFound());
    return result;
}

From source file:com.doculibre.constellio.services.SolrServicesImpl.java

License:Open Source License

@Override
public SolrDocument get(String docId, RecordCollection collection) {
    SolrDocument doc;//w w w  . j av  a  2s . co  m
    SolrServer solrServer = getSolrServer(collection);
    SolrQuery query = new SolrQuery();
    String escapedDocId = ClientUtils.escapeQueryChars(docId);
    query.setQuery(IndexField.UNIQUE_KEY_FIELD + ":" + escapedDocId + "");
    try {
        QueryResponse queryResponse = solrServer.query(query);
        SolrDocumentList solrDocumentList = queryResponse.getResults();
        if (!solrDocumentList.isEmpty()) {
            doc = solrDocumentList.get(0);
        } else {
            doc = null;
        }
    } catch (SolrServerException e) {
        throw new RuntimeException(e);
    }
    return doc;
}

From source file:com.lyncode.oai.proxy.data.ProxyItemRepository.java

License:Apache License

@Override
public AbstractItem getItem(String identifier) throws IdDoesNotExistException {
    try {//from  w ww.  j a va2s  . c  om
        SolrQuery query = new SolrQuery(
                ProxyItem.IDENTIFIER_FIELD + ":" + ClientUtils.escapeQueryChars(identifier));
        SolrDocumentList list = SolrServerManager.getServer().query(query).getResults();
        if (list.getNumFound() > 0) {
            return new ProxyItem(list.get(0));
        } else
            throw new IdDoesNotExistException();
    } catch (SolrServerException e) {
        throw new IdDoesNotExistException(e);
    }
}

From source file:com.lyncode.oai.proxy.filters.DateFromFilter.java

License:Apache License

@Override
public String query() {
    return "item.lastmodified:[" + ClientUtils.escapeQueryChars(DateUtils.formatToSolr(from)) + " TO *]";
}

From source file:com.lyncode.oai.proxy.filters.DateUntilFilter.java

License:Apache License

@Override
public String query() {
    return "item.lastmodified:[* TO " + ClientUtils.escapeQueryChars(DateUtils.formatToSolr(until)) + "]";
}

From source file:com.nominanuda.solr.SolrHelper.java

License:Apache License

public String esc(String s) {
    return ClientUtils.escapeQueryChars(s);
}