List of usage examples for org.apache.solr.common SolrDocumentList getNumFound
public long getNumFound()
From source file:org.springframework.data.solr.core.SolrTemplateTest.java
License:Apache License
@Test public void testCountWhenPagingSet() throws SolrServerException { ArgumentCaptor<SolrQuery> captor = ArgumentCaptor.forClass(SolrQuery.class); QueryResponse responseMock = Mockito.mock(QueryResponse.class); SolrDocumentList resultList = new SolrDocumentList(); resultList.setNumFound(10);//from w w w.jav a 2 s. co m Mockito.when(responseMock.getResults()).thenReturn(resultList); Mockito.when(solrServerMock.query(Mockito.any(SolrQuery.class))).thenReturn(responseMock); Query query = new SimpleQuery(new Criteria("field_1").is("value1")); query.setPageRequest(new PageRequest(0, 5)); long result = solrTemplate.count(query); Assert.assertEquals(resultList.getNumFound(), result); Mockito.verify(solrServerMock, Mockito.times(1)).query(captor.capture()); Assert.assertEquals(Integer.valueOf(0), captor.getValue().getStart()); Assert.assertEquals(Integer.valueOf(0), captor.getValue().getRows()); }
From source file:org.string_db.psicquic.index.SolrServerConnection.java
License:Apache License
@Override public Long countIndexedDocuments() { try {/*from w w w . j a v a2 s.c om*/ final SolrDocumentList results = solrServer.query(new SolrQuery("*:*")).getResults(); return results.getNumFound(); } catch (SolrServerException e) { throw new RuntimeException(e); } }
From source file:org.swissbib.sru.targets.solr.SolrStringRepresentation.java
License:Open Source License
@Override public Representation getRepresentation() { SolrDocumentList result = qR.getResults(); startPage = result.getStart();/*from w ww . j a v a2 s .c o m*/ long incrementalStart = result.getStart(); Iterator<SolrDocument> iterator = qR.getResults().iterator(); String uH = queryParams.getFirstValue("x-info-10-get-holdings"); boolean useHoldings = uH != null ? Boolean.valueOf(uH) : false; StringBuilder sB = new StringBuilder(); sB.append(schema != RequestedSchema.jsonswissbib ? this.createSRUXMLHeader(String.valueOf(qR.getResults().getNumFound())) : this.createSRUJsonHeader(String.valueOf(qR.getResults().getNumFound()), String.valueOf(qR.getResults().getStart()))); while (iterator.hasNext()) { SolrDocument doc = iterator.next(); switch (schema) { case dcswissbib: sB.append(createDCswissbib(doc)); break; case dcOCLC: sB.append(createDCoclc(doc)); break; case marcswissbib: sB.append(createMarcNoNS(doc, incrementalStart, useHoldings)); break; case marcOCLC: sB.append(createMarcNS(doc, incrementalStart, useHoldings)); break; case jsonswissbib: sB.append(createJson(doc, incrementalStart, useHoldings)).append(", "); break; default: } incrementalStart++; } if (schema == RequestedSchema.jsonswissbib && qR.getResults().getNumFound() > 0 && qR.getResults().getStart() < qR.getResults().getNumFound()) { String t = sB.toString(); //remove the latest comma as seperator only in case of JSON entities sB = new StringBuilder(t.substring(0, t.length() - 2)); } String nextPage = (startPage + result.size() < result.getNumFound() - 1) ? String.valueOf(startPage + result.size()) : null; sB.append(schema != RequestedSchema.jsonswissbib ? this.createSRUXMLFooter(String.valueOf(String.valueOf(result.size())), nextPage) : this.createSRUJsonFooter(String.valueOf(String.valueOf(result.size())), nextPage)); MediaType mt = schema != RequestedSchema.jsonswissbib ? MediaType.TEXT_XML : MediaType.APPLICATION_JSON; return new StringRepresentation(sB.toString(), mt); }
From source file:org.teiid.translator.solr.SolrQueryExecution.java
License:Open Source License
public void nextBatch() throws TranslatorException { SolrQuery query = this.visitor.getSolrQuery(); if (!this.visitor.isLimitInUse()) { query.setStart(this.offset); query.setRows(this.executionContext.getBatchSize()); }/*from ww w .j a v a2s. c om*/ QueryResponse queryResponse = connection.query(this.visitor.getSolrQuery()); SolrDocumentList docList = queryResponse.getResults(); this.resultSize = docList.getNumFound(); this.resultsItr = docList.iterator(); }
From source file:org.vootoo.client.netty.NettySolrClientTest.java
License:Apache License
protected void assertIdResult(QueryResponse queryResponse, String idValue) { SolrDocumentList results = queryResponse.getResults(); Assert.assertEquals(results.getNumFound(), 1); Assert.assertEquals(idValue, results.get(0).getFieldValue("id")); }
From source file:org.wso2.carbon.registry.indexing.IndexingHandler.java
License:Open Source License
@Override public Collection searchContent(RequestContext requestContext) throws RegistryException { String searchQuery = requestContext.getKeywords(); UserRegistry registry = CurrentSession.getUserRegistry(); SolrClient client;//w w w . j a v a 2s. c om List<String> filteredResults = new ArrayList<String>(); try { client = SolrClient.getInstance(); SolrDocumentList results = client.query(searchQuery, CurrentSession.getTenantId()); if (log.isDebugEnabled()) { log.debug("result received " + results); } for (int i = 0; i < results.getNumFound(); i++) { SolrDocument solrDocument = results.get(i); String path = getPathFromId((String) solrDocument.getFirstValue("id")); //if (AuthorizationUtils.authorize(path, ActionConstants.GET)){ if (isAuthorized(registry, path, ActionConstants.GET)) { filteredResults.add(path); } } if (log.isDebugEnabled()) { log.debug("filtered results " + filteredResults + " for user " + registry.getUserName()); } } catch (IndexerException e) { log.error("Unable to do Content Search", e); } String[] resourcePaths = filteredResults.toArray(new String[filteredResults.size()]); Collection searchResults = new CollectionImpl(); searchResults.setContent(resourcePaths); return searchResults; }
From source file:org.xwiki.query.solr.internal.SolrQueryExecutor.java
License:Open Source License
/** * Filter out results from the response that the current user does not have access to view. * /* ww w . j ava 2 s. c o m*/ * @param response the Solr response to filter */ protected void filterResponse(QueryResponse response) { SolrDocumentList results = response.getResults(); long numFound = results.getNumFound(); // Since we are modifying the results collection, we need to iterate over its copy. for (SolrDocument result : new ArrayList<SolrDocument>(results)) { try { DocumentReference resultDocumentReference = new DocumentReference( (String) result.get(FieldUtils.WIKI), (String) result.get(FieldUtils.SPACE), (String) result.get(FieldUtils.NAME)); if (!documentAccessBridge.exists(resultDocumentReference) || !documentAccessBridge.isDocumentViewable(resultDocumentReference)) { // Remove the current incompatible result. results.remove(result); // Decrement the number of results. numFound--; // FIXME: We should update maxScore as well when removing the top scored item. How do we do that? // Sorting based on score might be a not so expensive option. // FIXME: What about highlighting, facets and all the other data inside the QueryResponse? } } catch (Exception e) { this.logger.warn("Skipping bad result: {}", result, e); } } // Update the new number of results, excluding the filtered ones. if (numFound < 0) { // Lower bound guard for the total number of results. numFound = 0; } results.setNumFound(numFound); }
From source file:org.xwiki.repository.internal.resources.SearchRESTResource.java
License:Open Source License
@POST public ExtensionsSearchResult searchPost(ExtensionQuery query) throws QueryException { ExtensionsSearchResult result = this.extensionObjectFactory.createExtensionsSearchResult(); Query solrQuery = this.queryManager.createQuery(toSolrStatement(query.getQuery()), "solr"); // ///////////////// // Search only in the current wiki // ///////////////// solrQuery.setWiki(this.xcontextProvider.get().getWikiId()); // ///////////////// // Limit and offset // ///////////////// solrQuery.setLimit(query.getLimit()); solrQuery.setOffset(query.getOffset()); // ///////////////// // Rights/*from w ww.j av a2 s .co m*/ // ///////////////// if (query instanceof SecureQuery) { // Show only what the current user has the right to see ((SecureQuery) query).checkCurrentUser(true); } // ///////////////// // Boost // ///////////////// solrQuery.bindValue("qf", DEFAULT_BOOST); // ///////////////// // Fields // ///////////////// solrQuery.bindValue("fl", DEFAULT_FL); // ///////////////// // Ordering // ///////////////// List<String> sortClauses = new ArrayList<String>(query.getSortClauses().size() + 1); for (SortClause sortClause : query.getSortClauses()) { String solrField = XWikiRepositoryModel.toSolrField(sortClause.getField()); if (solrField != null) { sortClauses.add(solrField + ' ' + sortClause.getOrder().name().toLowerCase()); } } // Sort by score by default sortClauses.add("score desc"); solrQuery.bindValue("sort", sortClauses); // ///////////////// // Filtering // ///////////////// List<String> fq = new ArrayList<String>(query.getFilters().size() + 1); // TODO: should be filter only on current wiki ? // We want only valid extensions documents fq.add(XWikiRepositoryModel.SOLRPROP_EXTENSION_VALIDEXTENSION + ":true"); // Request filters for (Filter fiter : query.getFilters()) { String solrField = XWikiRepositoryModel.toSolrField(fiter.getField()); if (solrField != null) { StringBuilder builder = new StringBuilder(); builder.append(solrField); builder.append(':'); if (fiter.getComparison() == COMPARISON.EQUAL) { builder.append(fiter.getValueString()); } else { builder.append('*' + fiter.getValueString() + '*'); } fq.add(builder.toString()); } } solrQuery.bindValue("fq", fq); // ///////////////// // Execute // ///////////////// QueryResponse response = (QueryResponse) solrQuery.execute().get(0); SolrDocumentList documents = response.getResults(); result.setOffset((int) documents.getStart()); result.setTotalHits((int) documents.getNumFound()); // O means unset for solr but we want it to be literally interpreted to be consistent with previous behavior and // other searches behavior if (query.getLimit() != 0) { for (SolrDocument document : documents) { result.getExtensions().add(createExtensionVersionFromSolrDocument(document)); } } return result; }
From source file:org.xwiki.search.solr.internal.job.IndexerJob.java
License:Open Source License
/** * Remove Solr documents not in the database anymore. * //from w w w . j a v a2s . c o m * @throws SolrIndexerException when failing to clean the Solr index * @throws SolrServerException when failing to clean the Solr index * @throws IllegalArgumentException when failing to clean the Solr index */ private void removeMissing() throws SolrIndexerException, SolrServerException, IllegalArgumentException { this.logger.info("Remove Solr documents not in the database anymore"); SolrInstance solrInstance = this.solrInstanceProvider.get(); // Clean existing index SolrQuery solrQuery = new SolrQuery(this.solrResolver.getQuery(getRequest().getRootReference())); solrQuery.setFields(FieldUtils.NAME, FieldUtils.SPACE, FieldUtils.WIKI, FieldUtils.DOCUMENT_LOCALE); solrQuery.addFilterQuery(FieldUtils.TYPE + ':' + EntityType.DOCUMENT.name()); // TODO: be nicer with the memory when there is a lot of indexed documents and do smaller batches or stream the // results QueryResponse response = solrInstance.query(solrQuery); SolrDocumentList results = response.getResults(); notifyPushLevelProgress((int) results.getNumFound()); XWikiContext xcontext = xcontextProvider.get(); try { for (SolrDocument solrDocument : results) { DocumentReference reference = createDocumentReference((String) solrDocument.get(FieldUtils.WIKI), (String) solrDocument.get(FieldUtils.SPACE), (String) solrDocument.get(FieldUtils.NAME), (String) solrDocument.get(FieldUtils.DOCUMENT_LOCALE)); if (!xcontext.getWiki().exists(reference, xcontext)) { this.indexer.delete(reference, true); } notifyStepPropress(); } } finally { notifyPopLevelProgress(); } }
From source file:org.xwiki.search.solr.internal.job.SolrDocumentIteratorTest.java
License:Open Source License
@Test public void size() throws Exception { SolrDocumentList results = mock(SolrDocumentList.class); when(results.getNumFound()).thenReturn(12L); QueryResponse response = mock(QueryResponse.class); when(response.getResults()).thenReturn(results); when(solr.query(any(SolrQuery.class))).thenReturn(response); DocumentIterator<String> iterator = mocker.getComponentUnderTest(); WikiReference rootReference = new WikiReference("wiki"); iterator.setRootReference(rootReference); assertEquals(12, iterator.size());/*from w ww.ja v a 2 s . c o m*/ SolrReferenceResolver resolver = mocker.getInstance(SolrReferenceResolver.class); verify(resolver).getQuery(rootReference); }