Example usage for org.apache.solr.common SolrDocumentList getNumFound

List of usage examples for org.apache.solr.common SolrDocumentList getNumFound

Introduction

In this page you can find the example usage for org.apache.solr.common SolrDocumentList getNumFound.

Prototype

public long getNumFound() 

Source Link

Usage

From source file:citation.java

License:Open Source License

public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header,
        final serverObjects post, final serverSwitch env) {
    // return variable that accumulates replacements
    final Switchboard sb = (Switchboard) env;
    final serverObjects prop = new serverObjects();
    final Segment segment = sb.index;
    final SolrConnector connector = segment.fulltext().getDefaultConnector();

    // avoid UNRESOLVED PATTERN
    prop.put("url", "");
    prop.put("citations", 0);
    prop.put("sentences", 0);

    DigestURL uri = null;//w w w.ja v a2s  .  co  m
    String url = "";
    String hash = "";
    int ch = 10;
    boolean filter = false; // show cited sentences only
    if (post != null) {
        if (post.containsKey("url")) {
            url = post.get("url");
            if (!url.startsWith("http://") && !url.startsWith("https://") && !url.startsWith("ftp://")
                    && !url.startsWith("smb://") && !url.startsWith("file://")) {
                url = "http://" + url;
            }
        }
        if (post.containsKey("hash")) {
            hash = post.get("hash");
        }
        if (post.containsKey("ch")) {
            ch = post.getInt("ch", ch);
        }
        filter = post.getBoolean("filter");
    }
    prop.put("filter", filter);
    if (url.length() > 0) {
        try {
            uri = new DigestURL(url, null);
            hash = ASCII.String(uri.hash());
        } catch (final MalformedURLException e) {
        }
    }
    if (uri == null && hash.length() > 0) {
        try {
            uri = sb.getURL(ASCII.getBytes(hash));
            if (uri == null) {
                connector.commit(true); // try again, that url can be fresh
                uri = sb.getURL(ASCII.getBytes(hash));
            }
        } catch (IOException e) {
            ConcurrentLog.logException(e);
        }
    }
    if (uri == null)
        return prop; // no proper url addressed
    url = uri.toNormalform(true);
    prop.put("url", url);

    // get the document from the index
    SolrDocument doc;
    try {
        doc = segment.fulltext().getDefaultConnector().getDocumentById(hash,
                CollectionSchema.title.getSolrFieldName(), CollectionSchema.text_t.getSolrFieldName());
    } catch (final IOException e1) {
        return prop;
    }
    @SuppressWarnings("unchecked")
    ArrayList<String> title = (ArrayList<String>) doc.getFieldValue(CollectionSchema.title.getSolrFieldName());
    String text = (String) doc.getFieldValue(CollectionSchema.text_t.getSolrFieldName());

    ArrayList<String> sentences = new ArrayList<String>();
    if (title != null)
        for (String s : title)
            if (s.length() > 0)
                sentences.add(s);
    SentenceReader sr = new SentenceReader(text);
    StringBuilder line;
    while (sr.hasNext()) {
        line = sr.next();
        if (line.length() > 0)
            sentences.add(line.toString());
    }

    // for each line make a statistic about the number of occurrences somewhere else
    OrderedScoreMap<String> scores = new OrderedScoreMap<String>(null); // accumulates scores for citating urls
    LinkedHashMap<String, Set<DigestURL>> sentenceOcc = new LinkedHashMap<String, Set<DigestURL>>();
    for (String sentence : sentences) {
        if (sentence == null || sentence.length() < 40) {
            // do not count the very short sentences
            sentenceOcc.put(sentence, null);
            continue;
        }
        try {
            sentence = sentence.replace('"', '\'');
            SolrDocumentList doclist = connector.getDocumentListByQuery("text_t:\"" + sentence + "\"",
                    CollectionSchema.url_chars_i.getSolrFieldName() + " asc", 0, 100,
                    CollectionSchema.sku.getSolrFieldName());
            int count = (int) doclist.getNumFound();
            if (count > 0) {
                Set<DigestURL> list = new TreeSet<DigestURL>();
                for (SolrDocument d : doclist) {
                    String u = (String) d.getFieldValue(CollectionSchema.sku.getSolrFieldName());
                    if (u == null || u.equals(url))
                        continue;
                    scores.inc(u);
                    try {
                        list.add(new DigestURL(u, null));
                    } catch (final MalformedURLException e) {
                    }
                }
                sentenceOcc.put(sentence, list);
            }
        } catch (final Throwable ee) {

        }
    }
    sentences.clear(); // we do not need this again

    // iterate the sentences
    int i = 0;
    int sentenceNr = 0;
    for (Map.Entry<String, Set<DigestURL>> se : sentenceOcc.entrySet()) {
        Set<DigestURL> app = se.getValue();
        if (filter) { // prepare list, only include sentence with citation
            if (app != null && app.size() > 0) {
                StringBuilder dd = new StringBuilder(se.getKey());
                prop.put("sentences_" + i + "_dt", sentenceNr);
                dd.append("<br/>appears in:");
                for (DigestURL u : app) {
                    if (u != null) {
                        dd.append(" <a href=\"").append(u.toNormalform(false)).append("\">").append(u.getHost())
                                .append("</a>");
                    }
                }
                prop.put("sentences_" + i + "_dd", dd.toString());
                i++;
            }
        } else { // prepare list, include all sentences
            StringBuilder dd = new StringBuilder(se.getKey());
            prop.put("sentences_" + i + "_dt", sentenceNr);
            if (app != null && app.size() > 0) {
                dd.append("<br/>appears in:");
                for (DigestURL u : app) {
                    if (u != null) {
                        dd.append(" <a href=\"").append(u.toNormalform(false)).append("\">").append(u.getHost())
                                .append("</a>");
                    }
                }
            }
            prop.put("sentences_" + i + "_dd", dd.toString());
            i++;
        }
        sentenceNr++;
    }
    prop.put("sentences", i);

    // iterate the citations in order of number of citations
    i = 0;
    for (String u : scores.keyList(false)) {
        try {
            DigestURL uu = new DigestURL(u, null);
            prop.put("citations_" + i + "_dt", "<a href=\"" + u + "\">" + u + "</a>");
            StringBuilder dd = new StringBuilder();
            dd.append("makes ").append(Integer.toString(scores.get(u))).append(" citations: of ").append(url);
            for (Map.Entry<String, Set<DigestURL>> se : sentenceOcc.entrySet()) {
                Set<DigestURL> occurls = se.getValue();
                if (occurls != null && occurls.contains(uu))
                    dd.append("<br/><a href=\"/solr/select?q=text_t:%22").append(se.getKey().replace('"', '\''))
                            .append("%22&rows=100&grep=&wt=grephtml\">").append(se.getKey()).append("</a>");
            }
            prop.put("citations_" + i + "_dd", dd.toString());
            i++;
        } catch (final MalformedURLException e) {
        }
    }
    prop.put("citations", i);

    // find similar documents from different hosts
    i = 0;
    for (String u : scores.keyList(false)) {
        if (scores.get(u) < ch)
            continue;
        try {
            DigestURL uu = new DigestURL(u, null);
            if (uu.getOrganization().equals(uri.getOrganization()))
                continue;
            prop.put("similar_links_" + i + "_url", u);
            i++;
        } catch (final MalformedURLException e) {
        }
    }
    prop.put("similar_links", i);
    prop.put("similar", i > 0 ? 1 : 0);

    // return rewrite properties
    return prop;
}

From source file:at.pagu.soldockr.core.SolrTemplateTest.java

License:Apache License

@Test
public void testCount() throws SolrServerException {
    ArgumentCaptor<SolrQuery> captor = ArgumentCaptor.forClass(SolrQuery.class);
    QueryResponse responseMock = Mockito.mock(QueryResponse.class);
    SolrDocumentList resultList = new SolrDocumentList();
    resultList.setNumFound(10);//from   www .ja v  a  2s . c o  m
    Mockito.when(responseMock.getResults()).thenReturn(resultList);
    Mockito.when(solrServerMock.query(Mockito.any(SolrQuery.class))).thenReturn(responseMock);

    long result = solrTemplate.executeCount(new SimpleQuery(new Criteria("field_1").is("value1")));
    Assert.assertEquals(resultList.getNumFound(), result);

    Mockito.verify(solrServerMock, Mockito.times(1)).query(captor.capture());

    Assert.assertEquals(Integer.valueOf(0), captor.getValue().getStart());
    Assert.assertEquals(Integer.valueOf(0), captor.getValue().getRows());
}

From source file:at.pagu.soldockr.core.SolrTemplateTest.java

License:Apache License

@Test
public void testCountWhenPagingSet() throws SolrServerException {
    ArgumentCaptor<SolrQuery> captor = ArgumentCaptor.forClass(SolrQuery.class);
    QueryResponse responseMock = Mockito.mock(QueryResponse.class);
    SolrDocumentList resultList = new SolrDocumentList();
    resultList.setNumFound(10);/*ww w.  j a v  a 2s  .  co m*/
    Mockito.when(responseMock.getResults()).thenReturn(resultList);
    Mockito.when(solrServerMock.query(Mockito.any(SolrQuery.class))).thenReturn(responseMock);

    Query query = new SimpleQuery(new Criteria("field_1").is("value1"));
    query.setPageRequest(new PageRequest(0, 5));
    long result = solrTemplate.executeCount(query);
    Assert.assertEquals(resultList.getNumFound(), result);

    Mockito.verify(solrServerMock, Mockito.times(1)).query(captor.capture());

    Assert.assertEquals(Integer.valueOf(0), captor.getValue().getStart());
    Assert.assertEquals(Integer.valueOf(0), captor.getValue().getRows());
}

From source file:au.org.ala.biocache.dao.SearchDAOImpl.java

License:Open Source License

/**
 * Process the {@see org.apache.solr.client.solrj.response.QueryResponse} from a SOLR search and return
 * a {@link au.org.ala.biocache.dto.SearchResultDTO}
 *
 * @param qr/*from ww w  .  j  a va 2  s .  c  o m*/
 * @param solrQuery
 * @return
 */
private SearchResultDTO processSolrResponse(SearchRequestParams params, QueryResponse qr, SolrQuery solrQuery,
        Class resultClass) {
    SearchResultDTO searchResult = new SearchResultDTO();
    SolrDocumentList sdl = qr.getResults();
    // Iterator it = qr.getResults().iterator() // Use for download
    List<FacetField> facets = qr.getFacetFields();
    List<FacetField> facetDates = qr.getFacetDates();
    Map<String, Integer> facetQueries = qr.getFacetQuery();
    if (facetDates != null) {
        if (logger.isDebugEnabled()) {
            logger.debug("Facet dates size: " + facetDates.size());
        }
        facets.addAll(facetDates);
    }

    List<OccurrenceIndex> results = qr.getBeans(resultClass);

    //facet results
    searchResult.setTotalRecords(sdl.getNumFound());
    searchResult.setStartIndex(sdl.getStart());
    searchResult.setPageSize(solrQuery.getRows()); //pageSize
    searchResult.setStatus("OK");
    String[] solrSort = StringUtils.split(solrQuery.getSortField(), " "); // e.g. "taxon_name asc"
    if (logger.isDebugEnabled()) {
        logger.debug("sortField post-split: " + StringUtils.join(solrSort, "|"));
    }
    searchResult.setSort(solrSort[0]); // sortField
    searchResult.setDir(solrSort[1]); // sortDirection
    searchResult.setQuery(params.getUrlParams()); //this needs to be the original URL>>>>
    searchResult.setOccurrences(results);

    List<FacetResultDTO> facetResults = buildFacetResults(facets);

    //all belong to uncertainty range for now
    if (facetQueries != null && !facetQueries.isEmpty()) {
        Map<String, String> rangeMap = rangeBasedFacets.getRangeMap("uncertainty");
        List<FieldResultDTO> fqr = new ArrayList<FieldResultDTO>();
        for (String value : facetQueries.keySet()) {
            if (facetQueries.get(value) > 0)
                fqr.add(new FieldResultDTO(rangeMap.get(value), facetQueries.get(value), value));
        }
        facetResults.add(new FacetResultDTO("uncertainty", fqr));
    }

    //handle all the range based facets
    if (qr.getFacetRanges() != null) {
        for (RangeFacet rfacet : qr.getFacetRanges()) {
            List<FieldResultDTO> fqr = new ArrayList<FieldResultDTO>();
            if (rfacet instanceof Numeric) {
                Numeric nrfacet = (Numeric) rfacet;
                List<RangeFacet.Count> counts = nrfacet.getCounts();
                //handle the before
                if (nrfacet.getBefore().intValue() > 0) {
                    fqr.add(new FieldResultDTO("[* TO "
                            + getUpperRange(nrfacet.getStart().toString(), nrfacet.getGap(), false) + "]",
                            nrfacet.getBefore().intValue()));
                }
                for (RangeFacet.Count count : counts) {
                    String title = getRangeValue(count.getValue(), nrfacet.getGap());
                    fqr.add(new FieldResultDTO(title, count.getCount()));
                }
                //handle the after
                if (nrfacet.getAfter().intValue() > 0) {
                    fqr.add(new FieldResultDTO("[" + nrfacet.getEnd().toString() + " TO *]",
                            nrfacet.getAfter().intValue()));
                }
                facetResults.add(new FacetResultDTO(nrfacet.getName(), fqr));
            }
        }
    }

    //update image URLs
    for (OccurrenceIndex oi : results) {
        updateImageUrls(oi);
    }

    searchResult.setFacetResults(facetResults);
    // The query result is stored in its original format so that all the information
    // returned is available later on if needed
    searchResult.setQr(qr);
    return searchResult;
}

From source file:au.org.ala.biocache.web.DownloadController.java

License:Open Source License

private Object download(DownloadRequestParams requestParams, String ip, String apiKey,
        HttpServletResponse response, HttpServletRequest request, DownloadType downloadType) throws Exception {

    boolean sensitive = false;
    if (apiKey != null) {
        if (shouldPerformOperation(apiKey, response, false)) {
            sensitive = true;/*from w w  w  .j ava 2s .  c  om*/
        }
    } else if (StringUtils.isEmpty(requestParams.getEmail())) {
        response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED,
                "Unable to perform an offline download without an email address");
    }

    //get the fq that includes only the sensitive data that the userId ROLES permits
    String sensitiveFq = null;
    if (!sensitive) {
        sensitiveFq = getSensitiveFq(request);
    }

    ip = ip == null ? request.getRemoteAddr() : ip;

    //create a new task
    DownloadDetailsDTO dd = new DownloadDetailsDTO(requestParams, ip, downloadType);
    dd.setIncludeSensitive(sensitive);
    dd.setSensitiveFq(sensitiveFq);

    //get query (max) count for queue priority
    requestParams.setPageSize(0);
    requestParams.setFacet(false);
    SolrDocumentList result = searchDAO.findByFulltext(requestParams);
    dd.setTotalRecords(result.getNumFound());

    Map<String, Object> status = new LinkedHashMap<>();
    DownloadDetailsDTO d = persistentQueueDAO.isInQueue(dd);

    if (d != null) {
        status.put("message", "Already in queue.");
        status.put("status", "inQueue");
        status.put("queueSize", persistentQueueDAO.getTotalDownloads());
        status.put("statusUrl",
                downloadService.webservicesRoot + "/occurrences/offline/status/" + dd.getUniqueId());
    } else if (dd.getTotalRecords() > downloadService.dowloadOfflineMaxSize) {
        //identify this download as too large
        File file = new File(downloadService.biocacheDownloadDir + File.separator
                + UUID.nameUUIDFromBytes(dd.getEmail().getBytes()) + File.separator + dd.getStartTime()
                + File.separator + "tooLarge");
        FileUtils.forceMkdir(file.getParentFile());
        FileUtils.writeStringToFile(file, "", "UTF-8");
        status.put("downloadUrl", downloadService.biocacheDownloadUrl);
        status.put("status", "skipped");
        status.put("message", downloadService.downloadOfflineMsg);
        status.put("error", "Requested to many records (" + dd.getTotalRecords() + "). The maximum is ("
                + downloadService.dowloadOfflineMaxSize + ")");
    } else {
        persistentQueueDAO.addDownloadToQueue(dd);
        status.put("status", "inQueue");
        status.put("queueSize", persistentQueueDAO.getTotalDownloads());
        status.put("statusUrl",
                downloadService.webservicesRoot + "/occurrences/offline/status/" + dd.getUniqueId());
    }

    status.put("searchUrl", downloadService.biocacheUiUrl + "/occurrences/search"
            + ((SpatialSearchRequestParams) dd.getRequestParams()).getUrlParams());

    writeStatusFile(dd.getUniqueId(), status);

    return status;
}

From source file:au.org.ala.biocache.web.ValidationRuleController.java

License:Open Source License

/**
 * Example expected payload/* w w  w . j  av a2s  . c  om*/
 *
 * {
 *    "status": "new",
 *    "ignored": false,
 *    "apiKey": "XXXXXXXXXX",
 *    "user": {
 *    "isAdmin": true,
 *    "email": "xxxxxxxx@gmail.com",
 *    "authority": 1000
 *    },
 *    "classification": "invalid",
 *    "area": "MULTIPOLYGON(((137.5 -26,137.5 -25.5,138 -25.5,138 -26,137.5 -26)),((134.5 -29.5,134.5 -29,135 -29,135 -29.5,134.5 -29.5)))",
 *    "lastModified": "2013-01-01T09:05:19",
 *    "id": 5090,
 *    "comment": "",
 *    "species": "Trichoglossus haematodus"
 * }
 *
 * @param request
 * @param response
 * @throws Exception
 */
@RequestMapping(value = { "/assertions/query/add", "/validation/rule/add" }, method = RequestMethod.POST)
public void addValidationRule(HttpServletRequest request, HttpServletResponse response) throws Exception {

    try {
        String rawValue = org.apache.commons.io.IOUtils.toString(request.getInputStream(), "UTF-8");
        logger.debug("The raw value :" + rawValue);

        try {
            ObjectMapper om = new ObjectMapper();
            ValidationRuleDTO validationRuleDTO = om.readValue(rawValue, ValidationRuleDTO.class);

            //we know that it is a JCU assertion
            if (shouldPerformOperation(validationRuleDTO.getApiKey(), response)) {
                //delete
                if (validationRuleDTO.getStatus().equals("deleted")) {
                    Store.deleteValidationRule(validationRuleDTO.getApiKey() + "|" + validationRuleDTO.getId(),
                            validationRuleDTO.getLastModified());
                } else {
                    //new or update
                    //does the species exist
                    String guid = speciesLookupService.getGuidForName(validationRuleDTO.getSpecies());
                    if ((guid != null || validationRuleDTO.getQuery() != null)
                            && validationRuleDTO.getId() != null) {
                        //check to see if the area is well formed.
                        SpatialSearchRequestParams ssr = new SpatialSearchRequestParams();
                        String query = guid != null ? "lsid:" + guid : validationRuleDTO.getQuery();
                        ssr.setQ(query);
                        ssr.setWkt(validationRuleDTO.getArea());
                        ssr.setFacet(false);
                        try {
                            SolrDocumentList list = searchDAO.findByFulltext(ssr);
                            Long recordCount = list.getNumFound();
                            logger.debug("Validation rule should apply to records: " + recordCount);
                            //now create the query assertion
                            ValidationRule validationRule = new ValidationRule();
                            //NQ: need the id to be populated to construct the correct validation rowkey to allow for updates
                            validationRule.setId(validationRuleDTO.getId().toString());
                            //copy form DTO -> model object for storage
                            validationRule.setApiKey(validationRuleDTO.apiKey);
                            validationRule.setRawAssertion(rawValue);
                            validationRule.setWkt(validationRuleDTO.area);
                            validationRule.setComment(validationRuleDTO.getComment());

                            //auth details
                            String userId = authService.getMapOfEmailToId()
                                    .get(validationRuleDTO.user.getEmail());
                            validationRule.setUserId(userId);
                            validationRule.setUserEmail(validationRuleDTO.user.getEmail());
                            validationRule.setAuthority(validationRuleDTO.user.getAuthority().toString());

                            validationRule.setRawQuery(getRawQuery(validationRuleDTO.getQuery(), guid,
                                    validationRuleDTO.getArea()));
                            if (validationRuleDTO.getStatus().equals("new")) {
                                validationRule.setCreatedDate(validationRuleDTO.getLastModified());
                            }

                            Store.addValidationRule(validationRule);
                        } catch (Exception e) {
                            response.sendError(HttpServletResponse.SC_BAD_REQUEST,
                                    "Unable to construct a valid validation rule from the provided information. "
                                            + validationRuleDTO.getId());
                            logger.error("Error constructing query or adding to datastore", e);
                        }
                    } else {
                        response.sendError(HttpServletResponse.SC_BAD_REQUEST,
                                "Unable to locate species " + validationRuleDTO.getSpecies()
                                        + " for validation rule " + validationRuleDTO.getId());
                    }
                }
            }
        } catch (Exception e) {
            response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
            logger.error("Unable to resolve message to known type", e);
        }
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        response.sendError(HttpURLConnection.HTTP_BAD_REQUEST);
    }
}

From source file:au.org.ala.biocache.web.WMSController.java

License:Open Source License

@RequestMapping(value = { "/ogc/getFeatureInfo" }, method = RequestMethod.GET)
public String getFeatureInfo(
        @RequestParam(value = "CQL_FILTER", required = false, defaultValue = "") String cql_filter,
        @RequestParam(value = "ENV", required = false, defaultValue = "") String env,
        @RequestParam(value = "BBOX", required = true, defaultValue = "0,-90,180,0") String bboxString,
        @RequestParam(value = "WIDTH", required = true, defaultValue = "256") Integer width,
        @RequestParam(value = "HEIGHT", required = true, defaultValue = "256") Integer height,
        @RequestParam(value = "STYLES", required = false, defaultValue = "") String styles,
        @RequestParam(value = "SRS", required = false, defaultValue = "") String srs,
        @RequestParam(value = "QUERY_LAYERS", required = false, defaultValue = "") String queryLayers,
        @RequestParam(value = "X", required = true, defaultValue = "0") Double x,
        @RequestParam(value = "Y", required = true, defaultValue = "0") Double y, HttpServletRequest request,
        HttpServletResponse response, Model model) throws Exception {

    logger.debug("WMS - GetFeatureInfo requested for: " + queryLayers);

    if ("EPSG:4326".equals(srs))
        bboxString = convertBBox4326To900913(bboxString); // to work around a UDIG bug

    WmsEnv vars = new WmsEnv(env, styles);
    double[] mbbox = new double[4];
    double[] bbox = new double[4];
    double[] pbbox = new double[4];
    double[] tilebbox = new double[4];
    int size = vars.size + (vars.highlight != null ? HIGHLIGHT_RADIUS * 2 + (int) (vars.size * 0.2) : 0) + 5; //bounding box buffer

    //what is the size of the dot in degrees
    double resolution = getBBoxes(bboxString, width, height, size, vars.uncertainty, mbbox, bbox, pbbox,
            tilebbox);//  w  w  w. j  a  v  a  2s . c o m

    //resolution should be a value < 1
    PointType pointType = getPointTypeForDegreesPerPixel(resolution);

    double longitude = bbox[0] + (((bbox[2] - bbox[0]) / width) * x);
    double latitude = bbox[3] - (((bbox[3] - bbox[1]) / height) * y);

    //round to the correct point size
    double roundedLongitude = pointType.roundToPointType(longitude);
    double roundedLatitude = pointType.roundToPointType(latitude);

    //get the pixel size of the circles
    double minLng = pointType.roundDownToPointType(roundedLongitude - (pointType.getValue() * 2 * (size + 3)));
    double maxLng = pointType.roundUpToPointType(roundedLongitude + (pointType.getValue() * 2 * (size + 3)));
    double minLat = pointType.roundDownToPointType(roundedLatitude - (pointType.getValue() * 2 * (size + 3)));
    double maxLat = pointType.roundUpToPointType(roundedLatitude + (pointType.getValue() * 2 * (size + 3)));

    //do the SOLR query
    SpatialSearchRequestParams requestParams = new SpatialSearchRequestParams();
    String q = convertLayersParamToQ(queryLayers);
    requestParams.setQ(convertLayersParamToQ(queryLayers)); //need to derive this from the layer name
    logger.debug("WMS GetFeatureInfo for " + queryLayers + ", longitude:[" + minLng + " TO " + maxLng
            + "],  latitude:[" + minLat + " TO " + maxLat + "]");

    String[] fqs = new String[] { "longitude:[" + minLng + " TO " + maxLng + "]",
            "latitude:[" + minLat + " TO " + maxLat + "]" };
    requestParams.setFq(fqs);
    //requestParams.setFq(new String[]{"point-"+pointType.getValue()+":"+roundedLatitude+","+roundedLongitude});
    requestParams.setFacet(false);

    //TODO: paging
    SolrDocumentList sdl = searchDAO.findByFulltext(requestParams);
    //send back the results.
    String body = "";
    if (sdl != null && sdl.size() > 0) {
        SolrDocument doc = sdl.get(0);
        model.addAttribute("record", doc.getFieldValueMap());
        model.addAttribute("totalRecords", sdl.getNumFound());
    }

    model.addAttribute("uriUrl",
            "http://biocache.ala.org.au/occurrences/search?q=" + URLEncoder.encode(q, "UTF-8") + "&fq="
                    + URLEncoder.encode(fqs[0], "UTF-8") + "&fq=" + URLEncoder.encode(fqs[1], "UTF-8"));

    model.addAttribute("pointType", pointType.name());
    model.addAttribute("minLng", minLng);
    model.addAttribute("maxLng", maxLng);
    model.addAttribute("minLat", minLat);
    model.addAttribute("maxLat", maxLat);
    model.addAttribute("latitudeClicked", latitude);
    model.addAttribute("longitudeClicked", longitude);

    return "metadata/getFeatureInfo";
}

From source file:au.org.ala.biocache.web.WMSController.java

License:Open Source License

private Integer getCachedCount(boolean docCount, SpatialSearchRequestParams requestParams, String q,
        PointType pointType, boolean[] useBbox) throws Exception {

    Integer count = null;//from w  w  w .j  a  v  a 2 s.  c om

    String tag = docCount ? "" : pointType.getLabel();

    synchronized (countLock) {
        count = (Integer) countsCache.get(q + tag);
    }
    if (count == null) {
        requestParams.setPageSize(0);
        requestParams.setFacet(true);
        requestParams.setFlimit(0);
        requestParams.setFacets(new String[] { pointType.getLabel() });
        requestParams.setFormattedQuery(null);
        if (docCount) {
            SolrDocumentList result = searchDAO.findByFulltext(requestParams);
            if (result != null) {
                synchronized (countLock) {
                    count = (int) result.getNumFound();
                    countsCache.put(q + tag, count);
                }
            }
        } else {
            List<GroupFacetResultDTO> result = searchDAO.searchGroupedFacets(requestParams);
            if (result != null && result.size() > 0) {
                synchronized (countLock) {
                    count = result.get(0).getCount();
                    countsCache.put(q + tag, count);
                }
            }
        }
    } else {
        queryFormatUtils.formatSearchQuery(requestParams, false);
    }

    return count;
}

From source file:au.org.intersect.dms.catalogue.db.SolrIndexFacade.java

License:Open Source License

/**
 * Returns all datasets owned by the user matching specified full text search query
 * //w  ww .  j a va  2s  . c o  m
 * @param username
 *            username of the owner in DMS system
 * @param userProjects
 *            list of project codes from booking system this username belongs to
 * @param query
 *            full text search query
 * @param startIndex
 *            index of the first dataset to display on the curect page
 * @param pageSize
 *            max number of datasets to display on each page
 * @return datasets of this user (one page)
 */
public DatasetSearchResult findDatasets(String username, List<Long> projects, String query, int startIndex,
        int pageSize) {
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setFields(ID);
    solrQuery.setStart(startIndex);
    solrQuery.setRows(pageSize);
    solrQuery.setSortField(ID, ORDER.asc);
    StringBuilder queryString = new StringBuilder();
    if (query == null || "".equals(query)) {
        queryString.append("dataset.metadata_t:*");
    } else {
        queryString.append(query);
    }
    queryString.append(" AND (dataset.owner_s:").append(username);

    String projectQuery = buildProjectCriteria(projects);
    if (!projectQuery.isEmpty()) {
        queryString.append(" OR ").append(projectQuery);
    }
    queryString.append(")");

    solrQuery.setQuery(queryString.toString());
    QueryResponse solrResponse = DbDataset.search(solrQuery);
    SolrDocumentList docs = solrResponse.getResults();

    List<Dataset> datasets = convertSolrDocuments2Datasets(docs);

    DatasetSearchResult result = new DatasetSearchResult();
    result.setDatasets(datasets);
    result.setTotalSize(docs != null ? docs.getNumFound() : 0);
    return result;
}

From source file:bamboo.trove.rule.RuleChangeUpdateManager.java

License:Apache License

private void processQuery(SolrQuery query, WorkLog workLog) throws SolrServerException, IOException {
    log.debug("Query for rule : {}", query.toString());
    Timer.Context context = getTimer(getName() + ".processQuery").time();
    // need to commit here so that we can ignore documents just processed
    client.commit();/*www.j a  v  a 2  s . c  o  m*/

    boolean more = true;
    String cursor = CursorMarkParams.CURSOR_MARK_START;
    while (more) {
        query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursor);
        Timer.Context contextQuery = getTimer(getName() + ".query").time();

        QueryResponse response = client.query(query);
        workLog.ranSearch();
        SolrDocumentList results = response.getResults();
        log.debug("Found {} (of {} docs) in QT = {} ms", results.size(), results.getNumFound(),
                response.getQTime());
        String nextCursor = response.getNextCursorMark();
        if (nextCursor == null || cursor.equals(nextCursor)) {
            more = false;
        }
        distributeResponse(results, workLog);
        cursor = nextCursor;
        contextQuery.stop();
    }

    // We do this at a higher level too, so this would seem redundant. There is a trade-off. Allowing parallelism
    // between rules means rules can sometimes be re-processed redundantly. The higher level waitUntilCaughtUp() will
    // ensure we never process rules at the same time rules are being changed.
    // By doing a wait here as well however, we can collect accurate statistics about how much actual write activity we
    // are really generating by passing the workLog into the work pool.
    // When we have a better awareness of the typical work patterns it might be worth disabling this method call and
    // then stop collecting the metrics to improve throughput.
    waitUntilCaughtUp();
    context.stop();
}