List of usage examples for org.apache.solr.common SolrDocumentList getNumFound
public long getNumFound()
From source file:org.zaizi.sensefy.api.service.SearchService.java
License:Open Source License
public SearchResponse getSearchResponse(String query, String fields, String filters, int start, Integer rows, String order, boolean facet, boolean spellcheck, boolean clustering, String clusterSort, boolean security, Principal user) { SearchResponse response = new SearchResponse(); response.setQuery(query);/* w ww. j a v a 2 s . c om*/ SearchResults responseContent = new SearchResults(); responseContent.setStart(start); Long startTime = System.currentTimeMillis(); try { FacetConfigurationList facetConfiguration = null; if (facet) facetConfiguration = facetConfigurers.getFacetConfiguration(); SolrQuery documentsQuery = QueryBuilder.getSolrQuery(query, fields, facet, facetConfiguration, filters, start, rows, order, security, spellcheck, clustering, user); QueryResponse primaryIndexResponse; primaryIndexResponse = this.getPrimaryIndex().query(documentsQuery); Map<String, Map<String, List<String>>> highlightingSnippets = primaryIndexResponse.getHighlighting(); SpellCheckResponse spellCheckResponse = primaryIndexResponse.getSpellCheckResponse(); String collationQuery; if (spellCheckResponse != null) { collationQuery = spellCheckResponse.getCollatedResult(); responseContent.setCollationQuery(collationQuery); } SolrDocumentList primaryIndexResults = primaryIndexResponse.getResults(); responseContent.setNumFound(primaryIndexResults.getNumFound()); responseContent.setDocuments(primaryIndexResults); responseContent.setHighlight(highlightingSnippets); response.setSearchResults(responseContent); if (clustering) { if (clusterSort != null && clusterSort.equals("size")) ClusterParser.parseClusters(primaryIndexResponse, response, true); else ClusterParser.parseClusters(primaryIndexResponse, response, false); } if (facet) FacetParser.parseFacets(primaryIndexResponse, response, facetConfiguration); } catch (SolrServerException e) { processAPIException(response, e, "[Keyword Based Search] Error - stacktrace follows", 500, ComponentCode.SOLR); } Long elapsedTime = System.currentTimeMillis() - startTime; response.setTime(elapsedTime); return response; }
From source file:org.zaizi.sensefy.api.service.SemanticSearchService.java
License:Open Source License
/** * Returns a set of relevant documents to an entity or an entity group in * input<br>//from w w w. j av a2 s .c om * Is possible to search for a specific entity instance, for an entity type * and possibly an entity type with some specific attribute<br> * This service is called with the information retrieved from the Smart * Autocomplete phases. * * @param entityId * The unique id for the entity of interest * @param entityType * The unique id for the entity type of interest * @param entityAttribute * An attribute for the entity type in input eg. nationality for * the entity type: person * @param entityAttributeValue * A value of interest for the entity attribute in input eg. * italian for the attribute nationality * @param fields * The list of fields to return in the output documents * @param filters * A filter query to obtain a subset of the documents relevant to * the main query * @param start * The first document to return in the list of relevant documents * @param rows * The number of documents to return * @param order * The sorting order for the results : <field> <direction> eg: * title_sort desc * @param facet * If enabled the relevant results will contain the facet * countings * @param security * If enabled the relevant results will be filtered based on user * permissions * @param sensefyToken * The Sensefy Token that contains relevant information for the * user running the query * @return A json representation of the list of relevant documents for the * input query */ public SearchResponse entityDrivenSearch(String entityId, String entityType, String entityAttribute, String entityAttributeValue, String fields, String filters, int start, Integer rows, String order, boolean facet, boolean security, Principal user, boolean clustering) { String stringQuery; QueryResponse queryResponse; SearchResponse response = new SearchResponse(); SearchResults responseContent = new SearchResults(); Long startTime = System.currentTimeMillis(); try { if ((entityType == null || entityType.isEmpty()) && (entityId == null || entityId.isEmpty())) { throw new SensefyException(400, "<entityType> or <entityId> param required"); } stringQuery = QueryBuilder.getQueryString(entityId, entityType, entityAttribute, entityAttributeValue, response); SolrQuery solrQuery = QueryBuilder.getSolrQuery(stringQuery, fields, facet, facetConfigurer.getFacetConfiguration(), filters, start, rows, order, security, false, clustering, user); // retrieve the entity SolrDocument extractedEntity = getEntity(entityId); responseContent.setEntity(extractedEntity); // // retrieve the entity type // EntityType extractedEntityType = getEntityType(entityType); // responseContent.setEntityType(extractedEntityType); String labelVal = (String) extractedEntity.getFieldValue(ENTITY_DRIVEN_HIGHLIGH_FIELD); logger.debug("Entity label for the entity driven search : " + labelVal); solrQuery.set(ENTITY_DRIVEN_HIGHLIGH_QUERY_PARAM, labelVal); queryResponse = this.getPrimaryIndex().query(solrQuery); //get the highlights Map<String, Map<String, List<String>>> highlightingSnippets = queryResponse.getHighlighting(); SolrDocumentList docsRetrievedFromEntity = queryResponse.getResults(); FacetParser.parseFacets(queryResponse, response, facetConfigurer.getFacetConfiguration()); responseContent.setStart(start); responseContent.setNumFound(docsRetrievedFromEntity.getNumFound()); responseContent.setDocuments(docsRetrievedFromEntity); responseContent.setHighlight(highlightingSnippets); response.setSearchResults(responseContent); } catch (SensefyException e) { processAPIException(response, e, "[Entity Driven Search] Error - stacktrace follows", 400, ComponentCode.QUERY); } catch (SolrServerException e) { processAPIException(response, e, "[Entity Driven Search] Error - stacktrace follows", 500, ComponentCode.SOLR); } Long elapsedTime = System.currentTimeMillis() - startTime; response.setTime(elapsedTime); return response; }
From source file:org.zaizi.sensefy.api.service.SemanticSearchService.java
License:Open Source License
/** * Returns all the entities occurring in a document in input.<br> * If a selection over the entities was placed with a filter, the entities * returned will be filtered accordingly to that. * * @param docId// w w w . ja v a2 s. c om * The unique id for the document in input * @param fields * The list of fields to return in the output documents * @param filters * A filter query to obtain a subset of the documents relevant to * the main query * @param start * The first document to return in the list of relevant documents * @param rows * The number of documents to return * @param sort * The sorting order for the results : <field> <direction> eg: * title_sort desc * @param security * If enabled the relevant results will be filtered based on user * permissions * @param sensefyToken * The Sensefy Token that contains relevant information for the * user running the query * @return A json representation of the list of relevant entities for the * input document id */ public SearchResponse showEntitiesByDocId(String docId, String fields, String filters, int start, Integer rows, String sort, boolean security, Principal user) { SearchResponse response = new SearchResponse(); SearchResults responseContent = new SearchResults(); Long startTime = System.currentTimeMillis(); try { docId = escapeDocId(docId); if (docId == null || docId.isEmpty()) { throw new SensefyException(400, "<query> param required - The query is missing"); } SolrQuery solrQuery = QueryBuilder.getSolrQuery(DOC_ID_FIELD + ":\"" + docId + "\"", fields, false, null, null, start, rows, sort, security, false, false, user); if (filters != null && !filters.equals("")) { solrQuery.setFilterQueries(filters); } QueryResponse queryResponse; queryResponse = this.getEntityCore().query(solrQuery); SolrDocumentList entitiesRetrieved = queryResponse.getResults(); responseContent.setDocuments(entitiesRetrieved); responseContent.setNumFound(entitiesRetrieved.getNumFound()); response.setSearchResults(responseContent); } catch (SensefyException e) { processAPIException(response, e, "[Show Entity By Doc Id] Error - stacktrace follows", 400, ComponentCode.QUERY); } catch (MalformedURLException e) { processAPIException(response, e, "[Show Entity By Doc Id] Error - stacktrace follows", 500, ComponentCode.QUERY); } catch (SolrServerException e) { processAPIException(response, e, "[Show Entity By Doc Id] Error - stacktrace follows", 500, ComponentCode.SOLR); } Long elapsedTime = System.currentTimeMillis() - startTime; response.setTime(elapsedTime); return response; }
From source file:org.zaizi.sensefy.api.service.SolrSmartAutoCompleteService.java
License:Open Source License
/** * This part is retrieving Infix Title suggestions. It's using a specific * request handler.//from w w w. j a va 2s. co m * * @param numberOfSuggestions * @param termToComplete * @param solrCore * @throws SolrServerException */ private List<SolrDocument> getTitleSuggestions(int numberOfSuggestions, String termToComplete, SolrServer solrCore, Principal user, boolean security) throws SolrServerException, SolrException, IOException { SensefyUser sensefyUser = SensefyUserMapper.getSensefyUserFromPrincipal(user); SolrDocumentList titleSuggestions = new SolrDocumentList(); SolrQuery titleSuggestionsQuery = new SolrQuery("\"" + termToComplete + "\""); titleSuggestionsQuery.setRequestHandler("/suggest"); titleSuggestionsQuery.setRows(numberOfSuggestions); if (security) { String filterQueryACLs = SecurityQueryBuilder.getSecurityFilterQuery(sensefyUser); titleSuggestionsQuery.setFilterQueries(filterQueryACLs); } QueryResponse titleSuggestionResponse; titleSuggestionResponse = solrCore.query(titleSuggestionsQuery); titleSuggestions = titleSuggestionResponse.getResults(); if (titleSuggestions.getNumFound() == 0) { this.buildSpellcheckQuery(titleSuggestionsQuery, termToComplete); titleSuggestionResponse = solrCore.query(titleSuggestionsQuery); titleSuggestions = titleSuggestionResponse.getResults(); } this.filterDocumentSuggestions(titleSuggestions, titleSuggestionResponse.getHighlighting()); return titleSuggestions; }
From source file:ru.org.linux.search.SearchController.java
License:Apache License
@RequestMapping(value = "/search.jsp", method = { RequestMethod.GET, RequestMethod.HEAD }) public String search(HttpServletRequest request, Model model, @ModelAttribute("query") SearchRequest query, BindingResult bindingResult) throws Exception { Map<String, Object> params = model.asMap(); boolean initial = query.isInitial(); if (!initial && !bindingResult.hasErrors()) { if (!query.getQ().equals(query.getOldQ())) { query.setSection(null);/*www . j a va 2 s . c o m*/ query.setGroup(0); } query.setOldQ(query.getQ()); if (query.getQ().trim().isEmpty()) { return "redirect:/search.jsp"; } SearchViewer sv = new SearchViewer(query); if (query.getGroup() != 0) { Group group = groupDao.getGroup(query.getGroup()); if ("wiki".equals(query.getSection()) || group.getSectionId() != Integer.valueOf(query.getSection())) { query.setGroup(0); } } QueryResponse response = sv.performSearch(solrServer); long current = System.currentTimeMillis(); SolrDocumentList list = response.getResults(); Collection<SearchItem> res = new ArrayList<>(list.size()); for (SolrDocument doc : list) { res.add(new SearchItem(doc, userDao, msgbaseDao, lorCodeService, request.isSecure())); } FacetField sectionFacet = response.getFacetField("section"); if (sectionFacet != null && sectionFacet.getValueCount() > 1) { params.put("sectionFacet", buildSectionFacet(sectionFacet)); } else if (sectionFacet != null && sectionFacet.getValueCount() == 1) { Count first = sectionFacet.getValues().get(0); query.setSection(first.getName()); } FacetField groupFacet = response.getFacetField("group_id"); if (groupFacet != null && groupFacet.getValueCount() > 1) { params.put("groupFacet", buildGroupFacet(query.getSection(), groupFacet)); } long time = System.currentTimeMillis() - current; params.put("result", res); params.put("searchTime", response.getElapsedTime()); params.put("numFound", list.getNumFound()); if (list.getNumFound() > query.getOffset() + SearchViewer.SEARCH_ROWS) { params.put("nextLink", "/search.jsp?" + query.getQuery(query.getOffset() + SearchViewer.SEARCH_ROWS)); } if (query.getOffset() - SearchViewer.SEARCH_ROWS >= 0) { params.put("prevLink", "/search.jsp?" + query.getQuery(query.getOffset() - SearchViewer.SEARCH_ROWS)); } params.put("time", time); } return "search"; }
From source file:ru.org.linux.spring.SearchController.java
License:Apache License
@RequestMapping(value = "/search.jsp", method = { RequestMethod.GET, RequestMethod.HEAD }) public ModelAndView search(@RequestParam(value = "q", defaultValue = "") String q, @RequestParam(value = "include", required = false) String includeString, @RequestParam(value = "noinclude_title", defaultValue = "false") boolean noinclude_title, @RequestParam(value = "date", required = false) String dateString, @RequestParam(value = "section", required = false) Integer section, @RequestParam(value = "sort", required = false) Integer sort, @RequestParam(value = "username", required = false) String username, @RequestParam(value = "usertopic", defaultValue = "false") boolean usertopic) throws Exception { Map<String, Object> params = new HashMap<String, Object>(); boolean initial = q.isEmpty(); params.put("initial", initial); params.put("usertopic", usertopic); params.put("q", q); int include = parseInclude(includeString); params.put("include", include); params.put("noinclude_title", noinclude_title); SearchViewer.SearchInterval date = parseInterval(dateString); params.put("date", date); if (section == null) { section = 0;//www. j av a2 s . com } params.put("section", section); if (sort == null) { sort = SearchViewer.SORT_R; } params.put("sort", sort); if (username == null) { username = ""; } params.put("username", username); if (!initial) { SearchViewer sv = new SearchViewer(q); sv.setInterval(date); sv.setInclude(include, noinclude_title); sv.setSection(section); sv.setSort(sort); sv.setUser(username); sv.setUserTopic(usertopic); Connection db = null; try { long current = System.currentTimeMillis(); db = LorDataSource.getConnection(); QueryResponse response = sv.performSearch(solrServer, db); SolrDocumentList list = response.getResults(); List<SearchItem> res = new ArrayList<SearchItem>(list.size()); for (SolrDocument doc : list) { res.add(new SearchItem(db, doc)); } long time = System.currentTimeMillis() - current; params.put("result", res); params.put("searchTime", response.getElapsedTime()); params.put("numFound", list.getNumFound()); params.put("time", time); } finally { JdbcUtils.closeConnection(db); } } return new ModelAndView("search", params); }
From source file:se.nrm.dina.inventory.client.solr.SolrClient.java
private TaxonStatisticData buildTaxonStatisticDataBySolrDocuments(SolrDocumentList documents) { SolrDocument document = documents.get(0); StringBuilder sb = new StringBuilder(); String fullName = (String) document.getFieldValue("ftx"); sb.append("+ftx:"); sb.append(fullName.trim());/*from www. j av a 2s . co m*/ sb.append(" +cln:"); sb.append(COLLECTION_ID); Integer[] counts = addTrapAndEventCount(sb.toString().trim()); return new TaxonStatisticData(fullName, fullName, (int) documents.getNumFound(), counts[0], counts[1]); }
From source file:se.nrm.dina.naturarv.portal.solr.SolrSearch.java
private CollectionData buildCollectionData(NamedList d) { SolrDocumentList sdl = (SolrDocumentList) d.get("doclist"); SolrDocument document = sdl.get(0);/*www . j ava2 s . c o m*/ String code = (String) d.getAll("groupValue").get(0); int totalDocsInThisGroup = (int) sdl.getNumFound(); return new CollectionData(code, (String) document.getFieldValue("clnm"), totalDocsInThisGroup); }
From source file:se.nrm.dina.naturarv.portal.solr.SolrSearch.java
/** * getLastTenYearsRegistedData - get registered data from last ten years. * * @param fromYear - the start year/*from w w w. j a v a2 s. co m*/ * @param toYear - the current year * @return Map<String, Integer> */ public Map<String, Integer> getLastTenYearsRegistereddData(int fromYear, int toYear, String collectionCode) { log.info("getLastTenYearsRegistedData : {} - {}", fromYear, toYear); buildText(collectionCode); Map<String, Integer> map = new HashMap<>(); int accumlateCount = getPreviousRegisteredData(fromYear, collectionCode); map.put(String.valueOf(fromYear), accumlateCount); String searchText; for (int i = fromYear + 1; i <= toYear; i++) { searchText = "ctdyr:" + i; query = new SolrQuery(); query.setQuery(searchText); try { SolrDocumentList documents = client.query(query).getResults(); accumlateCount += (int) documents.getNumFound(); map.put(String.valueOf(i), accumlateCount); } catch (SolrServerException | IOException ex) { log.warn(ex.getMessage()); } } return map; }
From source file:se.simonsoft.cms.indexing.xml.HandlerXmlIntegrationTest.java
License:Apache License
@Test public void testTinyInline() throws Exception { FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath( "se/simonsoft/cms/indexing/xml/datasets/tiny-inline"); CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/tiny-inline", repoSource); FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo); indexing.enable(new ReposTestBackendFilexml(filexml)); SolrServer reposxml = indexing.getCore("reposxml"); SolrDocumentList x1 = reposxml.query(new SolrQuery("*:*")).getResults(); assertEquals(4, x1.getNumFound()); assertEquals("should get 'repoid' from repositem", "localtesthost/svn/tiny-inline", x1.get(0).getFieldValue("repoid")); SolrServer repositem = indexing.getCore("repositem"); SolrDocumentList flagged = repositem.query(new SolrQuery("flag:hasxml")).getResults(); assertEquals("Documents that got added to reposxml should be flagged 'hasxml' in repositem", 1, flagged.getNumFound());//from w ww .j av a 2s. co m Collection<Object> flags = flagged.get(0).getFieldValues("flag"); assertFalse("Flag - not empty string", flagged.get(0).getFieldValues("flag").contains("")); assertTrue("Flag 'hasxml'", flagged.get(0).getFieldValues("flag").contains("hasxml")); assertFalse("Flag 'hasridduplicate'", flagged.get(0).getFieldValues("flag").contains("hasridduplicate")); assertEquals("Issue with duplicate flag?", 1, flags.size()); //Statistics in repositem schema assertEquals("Should count elements", 4L, flagged.get(0).getFieldValue("count_elements")); assertEquals("Should count words", 3L, flagged.get(0).getFieldValue("count_words_text")); // Reposxml assertEquals("Should index all elements", 4, reposxml.query(new SolrQuery("*:*")).getResults().size()); assertEquals("document/root element name", "doc", x1.get(3).getFieldValue("name")); assertEquals("word count identical to repositem (document element)", 3, x1.get(3).getFieldValue("words_text")); // The "typename" is quite debatable because the test document has an incorrect DOCTYPE declaration (root element is "doc" not "document"). assertEquals("should set root element name", "document", x1.get(0).getFieldValue("typename")); assertEquals("should set systemid", "techdoc.dtd", x1.get(0).getFieldValue("typesystem")); assertEquals("should set publicid", "-//Simonsoft//DTD TechDoc Base V1.0 Techdoc//EN", x1.get(0).getFieldValue("typepublic")); assertEquals("should extract source", "<elem>text</elem>", x1.get(0).getFieldValue("source")); }