List of usage examples for org.apache.solr.common SolrDocumentList setNumFound
public void setNumFound(long numFound)
From source file:edu.cornell.mannlib.vitro.webapp.searchengine.solr.SolrSearchResultDocumentList.java
License:Open Source License
public SolrSearchResultDocumentList(SolrDocumentList solrDocs) { if (solrDocs == null) { SolrDocumentList list = new SolrDocumentList(); list.setStart(0L);/* ww w . j a va2 s . c om*/ list.setNumFound(0L); list.setMaxScore(0.0F); this.solrDocs = list; } else { this.solrDocs = solrDocs; } }
From source file:fr.mcc.ginco.tests.solr.SearcherServiceUtilTest.java
License:CeCILL license
@Test public void testGetSearchResultList() { SolrDocumentList fakeDocList = new SolrDocumentList(); SolrDocument fakeDoc = new SolrDocument(); fakeDoc.addField(SolrField.IDENTIFIER, "id1"); fakeDoc.addField(SolrField.LEXICALVALUE, "lex1"); fakeDoc.addField(SolrField.THESAURUSID, "th1"); fakeDoc.addField(SolrField.THESAURUSTITLE, "title1"); fakeDoc.addField(SolrField.TYPE, ThesaurusTerm.class.getSimpleName()); fakeDoc.addField(SolrField.EXT_TYPE, ExtEntityType.TERM_NON_PREF); fakeDoc.addField(SolrField.MODIFIED, DateUtil.dateFromString("2013-11-21 18:19:47")); fakeDoc.addField(SolrField.CREATED, DateUtil.dateFromString("2013-11-21 15:51:00")); fakeDoc.addField(SolrField.STATUS, 0); fakeDoc.addField(SolrField.LANGUAGE, "lang1"); fakeDocList.add(fakeDoc);//w w w . j a v a 2s.c o m fakeDocList.setNumFound(1); SearchResultList searchResultList = searcherServiceUtil.getSearchResultList(fakeDocList); Assert.assertEquals(1, searchResultList.getNumFound()); SearchResult searchResult = searchResultList.get(0); Assert.assertEquals(searchResult.getIdentifier(), "id1"); Assert.assertEquals(searchResult.getLexicalValue(), "lex1"); Assert.assertEquals(searchResult.getThesaurusId(), "th1"); Assert.assertEquals(searchResult.getThesaurusTitle(), "title1"); Assert.assertEquals(searchResult.getType(), ThesaurusTerm.class.getSimpleName()); Assert.assertEquals(searchResult.getTypeExt(), String.valueOf(ExtEntityType.TERM_NON_PREF)); Assert.assertEquals(searchResult.getModified(), "2013-11-21 18:19:47"); Assert.assertEquals(searchResult.getCreated(), "2013-11-21 15:51:00"); Assert.assertEquals(searchResult.getStatus(), Integer.valueOf(0)); Assert.assertEquals(searchResult.getLanguages().get(0), "lang1"); }
From source file:net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector.java
License:Open Source License
/** * conversion from a SolrQueryResponse (which is a solr-internal data format) to SolrDocumentList (which is a solrj-format) * The conversion is done inside the solrj api using the BinaryResponseWriter and a very complex unfolding process * via org.apache.solr.common.util.JavaBinCodec.marshal. * @param request//from www . j a v a 2 s . co m * @param sqr * @return */ public SolrDocumentList SolrQueryResponse2SolrDocumentList(final SolrQueryRequest req, final SolrQueryResponse rsp) { SolrDocumentList sdl = new SolrDocumentList(); NamedList<?> nl = rsp.getValues(); ResultContext resultContext = (ResultContext) nl.get("response"); DocList response = resultContext == null ? new DocSlice(0, 0, new int[0], new float[0], 0, 0.0f) : resultContext.docs; sdl.setNumFound(response == null ? 0 : response.matches()); sdl.setStart(response == null ? 0 : response.offset()); String originalName = Thread.currentThread().getName(); if (response != null) { try { SolrIndexSearcher searcher = req.getSearcher(); final int responseCount = response.size(); DocIterator iterator = response.iterator(); for (int i = 0; i < responseCount; i++) { int docid = iterator.nextDoc(); Thread.currentThread() .setName("EmbeddedSolrConnector.SolrQueryResponse2SolrDocumentList: " + docid); Document responsedoc = searcher.doc(docid, (Set<String>) null); SolrDocument sordoc = doc2SolrDoc(responsedoc); sdl.add(sordoc); } } catch (IOException e) { ConcurrentLog.logException(e); } } Thread.currentThread().setName(originalName); return sdl; }
From source file:net.yacy.cora.federate.solr.logic.AbstractTerm.java
License:Open Source License
/** * create a hit subset of the given SolrDocumentList according to the conjunction defined * in this object/*from w ww . j a v a2 s . co m*/ * @param sdl the SolrDocumentList * @return a manufactured subset-clone of the given SolrDocumentList where document match with the term as given in this object */ @Override public SolrDocumentList apply(SolrDocumentList sdl) { SolrDocumentList r = new SolrDocumentList(); int numFound = 0; for (SolrDocument d : sdl) { if (matches(d)) { r.add(d); numFound++; } } r.setNumFound(numFound); return r; }
From source file:opennlp.tools.similarity.apps.solr.IterativeSearchRequestHandler.java
License:Apache License
public void handleRequestBody1(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { // extract params from request SolrParams params = req.getParams(); String q = params.get(CommonParams.Q); String[] fqs = params.getParams(CommonParams.FQ); int start = 0; try {//from w w w . ja va 2 s .c o m start = Integer.parseInt(params.get(CommonParams.START)); } catch (Exception e) { /* default */ } int rows = 0; try { rows = Integer.parseInt(params.get(CommonParams.ROWS)); } catch (Exception e) { /* default */ } //SolrPluginUtils.setReturnFields(req, rsp); // build initial data structures SolrDocumentList results = new SolrDocumentList(); SolrIndexSearcher searcher = req.getSearcher(); Map<String, SchemaField> fields = req.getSchema().getFields(); int ndocs = start + rows; Filter filter = buildFilter(fqs, req); Set<Integer> alreadyFound = new HashSet<Integer>(); // invoke the various sub-handlers in turn and return results doSearch1(results, searcher, q, filter, ndocs, req, fields, alreadyFound); // ... more sub-handler calls here ... // build and write response float maxScore = 0.0F; int numFound = 0; List<SolrDocument> slice = new ArrayList<SolrDocument>(); for (Iterator<SolrDocument> it = results.iterator(); it.hasNext();) { SolrDocument sdoc = it.next(); Float score = (Float) sdoc.getFieldValue("score"); if (maxScore < score) { maxScore = score; } if (numFound >= start && numFound < start + rows) { slice.add(sdoc); } numFound++; } results.clear(); results.addAll(slice); results.setNumFound(numFound); results.setMaxScore(maxScore); results.setStart(start); rsp.add("response", results); }
From source file:org.apache.blur.slur.BlurResultHelper.java
License:Apache License
private static void convertMetadata(BlurResults results, SolrDocumentList docResults) { docResults.setNumFound(results.getTotalResults()); docResults.setStart(results.getQuery().getStart()); }
From source file:org.apache.jackrabbit.oak.plugins.index.solr.query.LMSEstimatorTest.java
License:Apache License
@Test public void testMultipleUpdates() throws Exception { LMSEstimator lmsEstimator = new LMSEstimator(); Filter filter = mock(Filter.class); FullTextExpression fte = new FullTextTerm("foo", "bar", false, false, ""); when(filter.getFullTextConstraint()).thenReturn(fte); SolrDocumentList docs = new SolrDocumentList(); lmsEstimator.update(filter, docs);/*from w w w .ja va 2 s .c o m*/ long actualCount = 10; docs.setNumFound(actualCount); long estimate = lmsEstimator.estimate(filter); long diff = actualCount - estimate; // update causes weights adjustment lmsEstimator.update(filter, docs); long estimate2 = lmsEstimator.estimate(filter); long diff2 = actualCount - estimate2; assertTrue(diff2 < diff); // new estimate is more accurate than previous one // update doesn't cause weight adjustments therefore estimates stays unchanged lmsEstimator.update(filter, docs); long estimate3 = lmsEstimator.estimate(filter); assertEquals(estimate3, estimate2); }
From source file:org.apache.metron.solr.dao.SolrSearchDaoTest.java
License:Apache License
@Test public void buildSearchResponseShouldReturnSearchResponse() { SearchRequest searchRequest = new SearchRequest(); searchRequest.setFields(Collections.singletonList("id")); searchRequest.setFacetFields(Collections.singletonList("facetField")); QueryResponse queryResponse = mock(QueryResponse.class); SolrDocument solrDocument1 = new SolrDocument(); solrDocument1.setField(Constants.GUID, "id1"); solrDocument1.setField("id", "id1"); SolrDocument solrDocument2 = new SolrDocument(); solrDocument2.setField(Constants.GUID, "id2"); solrDocument2.setField("id", "id2"); solrSearchDao = spy(new SolrSearchDao(client, accessConfig)); SolrDocumentList solrDocumentList = new SolrDocumentList(); solrDocumentList.add(solrDocument1); solrDocumentList.add(solrDocument2); solrDocumentList.setNumFound(100); when(queryResponse.getResults()).thenReturn(solrDocumentList); SearchResult searchResult1 = new SearchResult(); searchResult1.setId("id1"); HashMap<String, Object> source1 = new HashMap<>(); source1.put("id", "id1"); searchResult1.setSource(source1);/* w w w. ja v a2s . c om*/ SearchResult searchResult2 = new SearchResult(); searchResult2.setId("id2"); HashMap<String, Object> source2 = new HashMap<>(); source2.put("id", "id2"); searchResult2.setSource(source2); Map<String, Map<String, Long>> facetCounts = new HashMap<String, Map<String, Long>>() { { put("id", new HashMap<String, Long>() { { put("id1", 1L); put("id2", 1L); } }); } }; doReturn(facetCounts).when(solrSearchDao).getFacetCounts(Collections.singletonList("facetField"), queryResponse); SearchResponse expectedSearchResponse = new SearchResponse(); SearchResult expectedSearchResult1 = new SearchResult(); expectedSearchResult1.setId("id1"); expectedSearchResult1.setSource(source1); SearchResult expectedSearchResult2 = new SearchResult(); expectedSearchResult2.setId("id2"); expectedSearchResult2.setSource(source2); expectedSearchResponse.setResults(Arrays.asList(expectedSearchResult1, expectedSearchResult2)); expectedSearchResponse.setTotal(100); expectedSearchResponse.setFacetCounts(facetCounts); assertEquals(expectedSearchResponse, solrSearchDao.buildSearchResponse(searchRequest, queryResponse)); }
From source file:org.opencms.search.solr.CmsSolrIndex.java
License:Open Source License
/** * Performs the actual search.<p>//ww w .ja va2 s .c o m * * @param cms the current OpenCms context * @param ignoreMaxRows <code>true</code> to return all all requested rows, <code>false</code> to use max rows * @param query the OpenCms Solr query * @param response the servlet response to write the query result to, may also be <code>null</code> * @param ignoreSearchExclude if set to false, only contents with search_exclude unset or "false" will be found - typical for the the non-gallery case * @param filter the resource filter to use * * @return the found documents * * @throws CmsSearchException if something goes wrong * * @see #search(CmsObject, CmsSolrQuery, boolean) */ @SuppressWarnings("unchecked") public CmsSolrResultList search(CmsObject cms, final CmsSolrQuery query, boolean ignoreMaxRows, ServletResponse response, boolean ignoreSearchExclude, CmsResourceFilter filter) throws CmsSearchException { // check if the user is allowed to access this index checkOfflineAccess(cms); if (!ignoreSearchExclude) { query.addFilterQuery(CmsSearchField.FIELD_SEARCH_EXCLUDE + ":\"false\""); } int previousPriority = Thread.currentThread().getPriority(); long startTime = System.currentTimeMillis(); // remember the initial query SolrQuery initQuery = query.clone(); query.setHighlight(false); LocalSolrQueryRequest solrQueryRequest = null; try { // initialize the search context CmsObject searchCms = OpenCms.initCmsObject(cms); // change thread priority in order to reduce search impact on overall system performance if (getPriority() > 0) { Thread.currentThread().setPriority(getPriority()); } // the lists storing the found documents that will be returned List<CmsSearchResource> resourceDocumentList = new ArrayList<CmsSearchResource>(); SolrDocumentList solrDocumentList = new SolrDocumentList(); // Initialize rows, offset, end and the current page. int rows = query.getRows() != null ? query.getRows().intValue() : CmsSolrQuery.DEFAULT_ROWS.intValue(); if (!ignoreMaxRows && (rows > ROWS_MAX)) { rows = ROWS_MAX; } int start = query.getStart() != null ? query.getStart().intValue() : 0; int end = start + rows; int page = 0; if (rows > 0) { page = Math.round(start / rows) + 1; } // set the start to '0' and expand the rows before performing the query query.setStart(new Integer(0)); query.setRows(new Integer((5 * rows * page) + start)); // perform the Solr query and remember the original Solr response QueryResponse queryResponse = m_solr.query(query); long solrTime = System.currentTimeMillis() - startTime; // initialize the counts long hitCount = queryResponse.getResults().getNumFound(); start = -1; end = -1; if ((rows > 0) && (page > 0) && (hitCount > 0)) { // calculate the final size of the search result start = rows * (page - 1); end = start + rows; // ensure that both i and n are inside the range of foundDocuments.size() start = new Long((start > hitCount) ? hitCount : start).intValue(); end = new Long((end > hitCount) ? hitCount : end).intValue(); } else { // return all found documents in the search result start = 0; end = new Long(hitCount).intValue(); } long visibleHitCount = hitCount; float maxScore = 0; // If we're using a postprocessor, (re-)initialize it before using it if (m_postProcessor != null) { m_postProcessor.init(); } // process found documents List<CmsSearchResource> allDocs = new ArrayList<CmsSearchResource>(); int cnt = 0; for (int i = 0; (i < queryResponse.getResults().size()) && (cnt < end); i++) { try { SolrDocument doc = queryResponse.getResults().get(i); CmsSolrDocument searchDoc = new CmsSolrDocument(doc); if (needsPermissionCheck(searchDoc)) { // only if the document is an OpenCms internal resource perform the permission check CmsResource resource = filter == null ? getResource(searchCms, searchDoc) : getResource(searchCms, searchDoc, filter); if (resource != null) { // permission check performed successfully: the user has read permissions! if (cnt >= start) { if (m_postProcessor != null) { doc = m_postProcessor.process(searchCms, resource, (SolrInputDocument) searchDoc.getDocument()); } resourceDocumentList.add(new CmsSearchResource(resource, searchDoc)); if (null != doc) { solrDocumentList.add(doc); } maxScore = maxScore < searchDoc.getScore() ? searchDoc.getScore() : maxScore; } allDocs.add(new CmsSearchResource(resource, searchDoc)); cnt++; } else { visibleHitCount--; } } else { // if permission check is not required for this index, // add a pseudo resource together with document to the results resourceDocumentList.add(new CmsSearchResource(PSEUDO_RES, searchDoc)); solrDocumentList.add(doc); maxScore = maxScore < searchDoc.getScore() ? searchDoc.getScore() : maxScore; cnt++; } } catch (Exception e) { // should not happen, but if it does we want to go on with the next result nevertheless LOG.warn(Messages.get().getBundle().key(Messages.LOG_SOLR_ERR_RESULT_ITERATION_FAILED_0), e); } } // the last documents were all secret so let's take the last found docs if (resourceDocumentList.isEmpty() && (allDocs.size() > 0)) { page = Math.round(allDocs.size() / rows) + 1; int showCount = allDocs.size() % rows; showCount = showCount == 0 ? rows : showCount; start = allDocs.size() - new Long(showCount).intValue(); end = allDocs.size(); if (allDocs.size() > start) { resourceDocumentList = allDocs.subList(start, end); for (CmsSearchResource r : resourceDocumentList) { maxScore = maxScore < r.getDocument().getScore() ? r.getDocument().getScore() : maxScore; solrDocumentList.add(((CmsSolrDocument) r.getDocument()).getSolrDocument()); } } } long processTime = System.currentTimeMillis() - startTime - solrTime; // create and return the result solrDocumentList.setStart(start); solrDocumentList.setMaxScore(new Float(maxScore)); solrDocumentList.setNumFound(visibleHitCount); queryResponse.getResponse().setVal(queryResponse.getResponse().indexOf(QUERY_RESPONSE_NAME, 0), solrDocumentList); queryResponse.getResponseHeader().setVal(queryResponse.getResponseHeader().indexOf(QUERY_TIME_NAME, 0), new Integer(new Long(System.currentTimeMillis() - startTime).intValue())); long highlightEndTime = System.currentTimeMillis(); SolrCore core = m_solr instanceof EmbeddedSolrServer ? ((EmbeddedSolrServer) m_solr).getCoreContainer().getCore(getCoreName()) : null; CmsSolrResultList result = null; try { SearchComponent highlightComponenet = null; if (core != null) { highlightComponenet = core.getSearchComponent("highlight"); solrQueryRequest = new LocalSolrQueryRequest(core, queryResponse.getResponseHeader()); } SolrQueryResponse solrQueryResponse = null; if (solrQueryRequest != null) { // create and initialize the solr response solrQueryResponse = new SolrQueryResponse(); solrQueryResponse.setAllValues(queryResponse.getResponse()); int paramsIndex = queryResponse.getResponseHeader().indexOf(HEADER_PARAMS_NAME, 0); NamedList<Object> header = null; Object o = queryResponse.getResponseHeader().getVal(paramsIndex); if (o instanceof NamedList) { header = (NamedList<Object>) o; header.setVal(header.indexOf(CommonParams.ROWS, 0), new Integer(rows)); header.setVal(header.indexOf(CommonParams.START, 0), new Long(start)); } // set the OpenCms Solr query as parameters to the request solrQueryRequest.setParams(initQuery); // perform the highlighting if ((header != null) && (initQuery.getHighlight()) && (highlightComponenet != null)) { header.add(HighlightParams.HIGHLIGHT, "on"); if ((initQuery.getHighlightFields() != null) && (initQuery.getHighlightFields().length > 0)) { header.add(HighlightParams.FIELDS, CmsStringUtil.arrayAsString(initQuery.getHighlightFields(), ",")); } String formatter = initQuery.getParams(HighlightParams.FORMATTER) != null ? initQuery.getParams(HighlightParams.FORMATTER)[0] : null; if (formatter != null) { header.add(HighlightParams.FORMATTER, formatter); } if (initQuery.getHighlightFragsize() != 100) { header.add(HighlightParams.FRAGSIZE, new Integer(initQuery.getHighlightFragsize())); } if (initQuery.getHighlightRequireFieldMatch()) { header.add(HighlightParams.FIELD_MATCH, new Boolean(initQuery.getHighlightRequireFieldMatch())); } if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(initQuery.getHighlightSimplePost())) { header.add(HighlightParams.SIMPLE_POST, initQuery.getHighlightSimplePost()); } if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(initQuery.getHighlightSimplePre())) { header.add(HighlightParams.SIMPLE_PRE, initQuery.getHighlightSimplePre()); } if (initQuery.getHighlightSnippets() != 1) { header.add(HighlightParams.SNIPPETS, new Integer(initQuery.getHighlightSnippets())); } ResponseBuilder rb = new ResponseBuilder(solrQueryRequest, solrQueryResponse, Collections.singletonList(highlightComponenet)); try { rb.doHighlights = true; DocListAndSet res = new DocListAndSet(); SchemaField idField = OpenCms.getSearchManager().getSolrServerConfiguration() .getSolrSchema().getUniqueKeyField(); int[] luceneIds = new int[rows]; int docs = 0; for (SolrDocument doc : solrDocumentList) { String idString = (String) doc.getFirstValue(CmsSearchField.FIELD_ID); int id = solrQueryRequest.getSearcher().getFirstMatch( new Term(idField.getName(), idField.getType().toInternal(idString))); luceneIds[docs++] = id; } res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0); rb.setResults(res); rb.setQuery(QParser.getParser(initQuery.getQuery(), null, solrQueryRequest).getQuery()); rb.setQueryString(initQuery.getQuery()); highlightComponenet.prepare(rb); highlightComponenet.process(rb); highlightComponenet.finishStage(rb); } catch (Exception e) { LOG.error(e.getMessage() + " in query: " + initQuery, new Exception(e)); } // Make highlighting also available via the CmsSolrResultList queryResponse.setResponse(solrQueryResponse.getValues()); highlightEndTime = System.currentTimeMillis(); } } result = new CmsSolrResultList(initQuery, queryResponse, solrDocumentList, resourceDocumentList, start, new Integer(rows), end, page, visibleHitCount, new Float(maxScore), startTime, highlightEndTime); if (LOG.isDebugEnabled()) { Object[] logParams = new Object[] { new Long(System.currentTimeMillis() - startTime), new Long(result.getNumFound()), new Long(solrTime), new Long(processTime), new Long(result.getHighlightEndTime() != 0 ? result.getHighlightEndTime() - startTime : 0) }; LOG.debug(query.toString() + "\n" + Messages.get().getBundle().key(Messages.LOG_SOLR_SEARCH_EXECUTED_5, logParams)); } if (response != null) { writeResp(response, solrQueryRequest, solrQueryResponse); } } finally { if (solrQueryRequest != null) { solrQueryRequest.close(); } if (core != null) { core.close(); } } return result; } catch (Exception e) { throw new CmsSearchException(Messages.get().container(Messages.LOG_SOLR_ERR_SEARCH_EXECUTION_FAILD_1, CmsEncoder.decode(query.toString()), e), e); } finally { if (solrQueryRequest != null) { solrQueryRequest.close(); } // re-set thread to previous priority Thread.currentThread().setPriority(previousPriority); } }
From source file:org.opencommercesearch.RuleManagerTest.java
License:Apache License
@Test public void testSetRuleParamsAndSetFilterQueries() throws RepositoryException, SolrServerException { // make sure that the facetManager gets addFacet called when we supply facets final FacetManager facetManager = mock(FacetManager.class); RuleManager mgr = new RuleManager(repository, builder, server) { @Override//from ww w . j av a2 s .c o m public FacetManager getFacetManager() { return facetManager; } }; // we need to make sure that we test filterQueries here... SolrDocumentList solrDocumentList = new SolrDocumentList(); // ---------- set up docs with a rule type ----------- setUpRuleData(null, "description facetRule1", "facetRule1", facetRule, facetRuleItem1, false, solrDocumentList); // note that we do NOT add this into the Repository so that we have a null rule in loadRules, this causes this document to not go into the rules SolrDocument rule = new SolrDocument(); rule.addField("description", "description facetRule2"); rule.addField("id", "facetRule2"); solrDocumentList.add(rule); setUpRuleData(null, "description facetRule3", "facetRule3", boostRule, boostRuleItem1, false, solrDocumentList); // ----------- set up doclist attributes ---------- solrDocumentList.setNumFound(solrDocumentList.size()); solrDocumentList.setStart(0L); // solrDocumentList.setMaxScore(1000.0); when(queryResponse.getResults()).thenReturn(solrDocumentList); when(server.query(any(SolrParams.class))).thenReturn(queryResponse); // ------------ make the call to load the rules etc ------------- RepositoryItem catalog = mock(RepositoryItem.class); when(catalog.getRepositoryId()).thenReturn("bobcatalog"); SolrQuery query = mock(SolrQuery.class); when(query.getQuery()).thenReturn("jackets"); FilterQuery[] filterQueries = new FilterQuery[] { new FilterQuery("category", "jackets"), // is a multi new FilterQuery("category", "12.jackets"), // is a multi new FilterQuery("hasPinStripes", "redstripes"), new FilterQuery("hasFeathers", "socks&stuff"), new FilterQuery("hasLaces", "raingear"), // is a multi new FilterQuery("chopsticks", "lookout below") }; // set up the facet items to catch all conditions RepositoryItem categoryFacetItem = mock(RepositoryItem.class); when(facetManager.getFacetItem("category")).thenReturn(categoryFacetItem); when(categoryFacetItem.getPropertyValue((FacetProperty.IS_MULTI_SELECT))).thenReturn(true); RepositoryItem hasPinStripesFacetItem = mock(RepositoryItem.class); when(facetManager.getFacetItem("hasPinStripes")).thenReturn(hasPinStripesFacetItem); when(hasPinStripesFacetItem.getPropertyValue((FacetProperty.IS_MULTI_SELECT))).thenReturn(false); RepositoryItem hasFeathersFacetItem = mock(RepositoryItem.class); when(facetManager.getFacetItem("hasFeathers")).thenReturn(hasFeathersFacetItem); // don't support multi for hasFeathers... RepositoryItem hasLacesFacetItem = mock(RepositoryItem.class); when(facetManager.getFacetItem("hasLaces")).thenReturn(hasLacesFacetItem); when(hasLacesFacetItem.getPropertyValue((FacetProperty.IS_MULTI_SELECT))).thenReturn(true); // and nothing for chopsticks mgr.setRuleParams(query, true, false, null, filterQueries, catalog, false, null); verify(query).setFacetPrefix("category", "1.bobcatalog."); verify(query).addFilterQuery("category:0.bobcatalog"); verify(query).getQuery(); verify(query, times(2)).getSortFields(); verify(query).setSortField("isToos", ORDER.asc); verify(query).addSortField("score", ORDER.desc); verify(query).addSortField("_version_", ORDER.desc); verify(query).setFacetPrefix("category", "13.jackets."); // verify the single calls to addFilterQuery verify(query).addFilterQuery("hasPinStripes:redstripes"); // this will have a facet verify(query).addFilterQuery("hasFeathers:socks&stuff"); // this will have a facet, but not MULTI verify(query).addFilterQuery("chopsticks:lookout below"); // no facet for this one (test null path) // now verify the multi calls to addFilterQuery verify(query).addFilterQuery("{!tag=category}category:jackets OR category:12.jackets"); verify(query).addFilterQuery("{!tag=hasLaces}hasLaces:raingear"); verify(query).getParams("excludeRules"); verify(query).getParams("includeRules"); verifyNoMoreInteractions(query); }