List of usage examples for org.apache.solr.common SolrDocumentList getNumFound
public long getNumFound()
From source file:org.ambraproject.search.service.IndexingServiceTest.java
License:Apache License
@Test(dataProvider = "articleData", groups = { "originalConfig" }) public void testArticlePublished(Article article) throws Exception { String articleId = article.getDoi(); indexingService.articlePublished(articleId, DEFAULT_ADMIN_AUTHID); String solrID = articleId.replaceAll("info:doi/", ""); SolrQuery query = new SolrQuery("id:\"" + solrID + "\""); QueryResponse solrRes = solrServerFactory.getServer().query(query); SolrDocumentList sdl = solrRes.getResults(); assertEquals(1l, sdl.getNumFound(), "didn't send article to solr server"); }
From source file:org.ambraproject.search.service.IndexingServiceTest.java
License:Apache License
@Test(dataProvider = "articleData", groups = { "originalConfig" }, dependsOnMethods = { "testIndexArticle" }) public void testArticleDeleted(Article article) throws Exception { String articleId = article.getDoi(); indexingService.indexArticle(articleId); String solrID = articleId.replaceAll("info:doi/", ""); //delete it./* ww w. ja v a 2s . c om*/ indexingService.articleDeleted(articleId); //confirm it was removed. SolrQuery query = new SolrQuery("id:\"" + solrID + "\""); QueryResponse solrRes = solrServerFactory.getServer().query(query); SolrDocumentList sdl = solrRes.getResults(); assertEquals(0, sdl.getNumFound(), "failed to remove article from solr server"); }
From source file:org.ambraproject.search.service.SolrSearchService.java
License:Apache License
private SearchResultSinglePage readQueryResults(QueryResponse queryResponse, SolrQuery query) { SolrDocumentList documentList = queryResponse.getResults(); if (log.isInfoEnabled()) { StringBuilder filterQueriesForLog = new StringBuilder(); if (query.getFilterQueries() != null && query.getFilterQueries().length > 0) { for (String filterQuery : query.getFilterQueries()) { filterQueriesForLog.append(filterQuery).append(" , "); }/*ww w.jav a 2s. c o m*/ if (filterQueriesForLog.length() > 3) { filterQueriesForLog.replace(filterQueriesForLog.length() - 3, filterQueriesForLog.length(), ""); } else { filterQueriesForLog.append("No Filter Queries"); } } log.info("query.getQuery():{ " + query.getQuery() + " }" + ", query.getSortFields():{ " + (query.getSortFields() == null ? null : Arrays.asList(query.getSortFields())) + " }" + ", query.getFilterQueries():{ " + filterQueriesForLog.toString() + " }" + ", found:" + documentList.getNumFound() + ", start:" + documentList.getStart() + ", max_score:" + documentList.getMaxScore() + ", QTime:" + queryResponse.getQTime() + "ms"); // TODO: implement spell-checking in a meaningful manner. This loop exists only to generate log output. // TODO: Add "spellcheckAlternatives" or something like it to the SearchHits class so it can be displayed to the user like Google's "did you mean..." // TODO: Turn off spellchecking for the "author" field. if (queryResponse.getSpellCheckResponse() != null && queryResponse.getSpellCheckResponse().getSuggestionMap() != null && queryResponse.getSpellCheckResponse().getSuggestionMap().keySet().size() > 0) { StringBuilder sb = new StringBuilder("Spellcheck alternative suggestions:"); for (String token : queryResponse.getSpellCheckResponse().getSuggestionMap().keySet()) { sb.append(" { ").append(token).append(" : "); if (queryResponse.getSpellCheckResponse().getSuggestionMap().get(token).getAlternatives() .size() < 1) { sb.append("NO ALTERNATIVES"); } else { for (String alternative : queryResponse.getSpellCheckResponse().getSuggestionMap() .get(token).getAlternatives()) { sb.append(alternative).append(", "); } sb.replace(sb.length() - 2, sb.length(), ""); // Remove last comma and space. } sb.append(" } ,"); } log.info(sb.replace(sb.length() - 2, sb.length(), "").toString()); // Remove last comma and space. } else { log.info("Solr thinks everything in the query is spelled correctly."); } } Map<String, Map<String, List<String>>> highlightings = queryResponse.getHighlighting(); List<SearchHit> searchResults = new ArrayList<SearchHit>(); for (SolrDocument document : documentList) { String id = getFieldValue(document, "id", String.class, query.toString()); String message = id == null ? query.toString() : id; Float score = getFieldValue(document, "score", Float.class, message); String title = getFieldValue(document, "title_display", String.class, message); Date publicationDate = getFieldValue(document, "publication_date", Date.class, message); String eissn = getFieldValue(document, "eissn", String.class, message); String journal = getFieldValue(document, "journal", String.class, message); String articleType = getFieldValue(document, "article_type", String.class, message); List<String> authorList = getFieldMultiValue(document, message, String.class, "author_display"); String highlights = null; if (query.getHighlight()) { highlights = getHighlights(highlightings.get(id)); } SearchHit hit = new SearchHit(score, id, title, highlights, authorList, publicationDate, eissn, journal, articleType); if (log.isDebugEnabled()) log.debug(hit.toString()); searchResults.add(hit); } //here we assume that number of hits is always going to be withing range of int SearchResultSinglePage results = new SearchResultSinglePage((int) documentList.getNumFound(), -1, searchResults, query.getQuery()); if (queryResponse.getFacetField("subject_facet") != null) { results.setSubjectFacet(facetCountsToHashMap(queryResponse.getFacetField("subject_facet"))); } if (queryResponse.getFacetField("author_facet") != null) { results.setAuthorFacet(facetCountsToHashMap(queryResponse.getFacetField("author_facet"))); } if (queryResponse.getFacetField("editor_facet") != null) { results.setEditorFacet(facetCountsToHashMap(queryResponse.getFacetField("editor_facet"))); } if (queryResponse.getFacetField("article_type_facet") != null) { results.setArticleTypeFacet(facetCountsToHashMap(queryResponse.getFacetField("article_type_facet"))); } if (queryResponse.getFacetField("affiliate_facet") != null) { results.setInstitutionFacet(facetCountsToHashMap(queryResponse.getFacetField("affiliate_facet"))); } if (queryResponse.getFacetField("cross_published_journal_key") != null) { results.setJournalFacet( facetCountsToHashMap(queryResponse.getFacetField("cross_published_journal_key"))); } return results; }
From source file:org.ambraproject.service.article.BrowseServiceImpl.java
License:Apache License
/** * Returns list of articles in a given date range, from newest to oldest * @param params the collection class of parameters. * @return the articles/* w w w .j a v a2s . c o m*/ */ private BrowseResult getArticlesByDateViaSolr(BrowseParameters params) { BrowseResult result = new BrowseResult(); ArrayList<SearchHit> articles = new ArrayList<SearchHit>(); long totalSize = 0; SolrQuery query = createCommonQuery(params.getJournalKey()); query.addField("title_display"); query.addField("author_display"); query.addField("article_type"); query.addField("publication_date"); query.addField("id"); query.addField("abstract_primary_display"); query.addField("eissn"); query.addField("striking_image"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); String sDate = sdf.format(params.getStartDate().getTime()); String eDate = sdf.format(params.getEndDate().getTime()); sDate = sDate + "T00:00:00Z"; eDate = eDate + "T00:00:00Z"; query.addFilterQuery("publication_date:[" + sDate + " TO " + eDate + "]"); StringBuffer sb = new StringBuffer(); if (params.getArticleTypes() != null && params.getArticleTypes().size() > 0) { for (URI uri : params.getArticleTypes()) { String path = uri.getPath(); int index = path.lastIndexOf("/"); if (index != -1) { String articleType = path.substring(index + 1); sb.append("\"").append(articleType).append("\"").append(" OR "); } } String articleTypesQuery = sb.substring(0, sb.length() - 4); if (articleTypesQuery.length() > 0) { query.addFilterQuery("article_type_facet:" + articleTypesQuery); } } setSort(query, params); query.setStart(params.getPageNum() * params.getPageSize()); query.setRows(params.getPageSize()); log.info("getArticlesByDate Solr Query:" + query.toString()); try { QueryResponse response = this.serverFactory.getServer().query(query); SolrDocumentList documentList = response.getResults(); totalSize = documentList.getNumFound(); for (SolrDocument document : documentList) { SearchHit sh = createArticleBrowseDisplay(document, query.toString()); articles.add(sh); } } catch (SolrServerException e) { log.error("Unable to execute a query on the Solr Server.", e); } result.setArticles(articles); result.setTotal(totalSize); return result; }
From source file:org.ambraproject.service.search.EmbeddedSolrServerFactoryTest.java
License:Apache License
@Test(dependsOnMethods = "testAddDocument") public void testBasicQuery() throws SolrServerException { SolrServer server = solrServerFactory.getServer(); SolrParams params = new SolrQuery("*:*"); SolrDocumentList results = server.query(params).getResults(); assertTrue(results.getNumFound() > 0, "didn't return any results"); }
From source file:org.ambraproject.service.search.EmbeddedSolrServerFactoryTest.java
License:Apache License
@Test(dependsOnMethods = "testBasicQuery", dataProvider = "documentMap") public void testIdQuery(Map<String, String[]> document) throws SolrServerException { String id = document.get("id")[0]; SolrServer server = solrServerFactory.getServer(); SolrParams params = new SolrQuery("id:" + id); SolrDocumentList results = server.query(params).getResults(); assertEquals(results.getNumFound(), 1, "didn't find article by id"); }
From source file:org.ambraproject.service.search.SolrSearchService.java
License:Apache License
@SuppressWarnings("unchecked") private SearchResultSinglePage readQueryResults(QueryResponse queryResponse, SolrQuery query) { SolrDocumentList documentList = queryResponse.getResults(); if (log.isInfoEnabled()) { StringBuilder filterQueriesForLog = new StringBuilder(); if (query.getFilterQueries() != null && query.getFilterQueries().length > 0) { for (String filterQuery : query.getFilterQueries()) { filterQueriesForLog.append(filterQuery).append(" , "); }/*w w w. jav a 2s . com*/ if (filterQueriesForLog.length() > 3) { filterQueriesForLog.replace(filterQueriesForLog.length() - 3, filterQueriesForLog.length(), ""); } else { filterQueriesForLog.append("No Filter Queries"); } } log.info("query.getQuery():{ " + query.getQuery() + " }" + ", query.getSortFields():{ " + (query.getSortFields() == null ? null : Arrays.asList(query.getSortFields())) + " }" + ", query.getFilterQueries():{ " + filterQueriesForLog.toString() + " }" + ", found:" + documentList.getNumFound() + ", start:" + documentList.getStart() + ", max_score:" + documentList.getMaxScore() + ", QTime:" + queryResponse.getQTime() + "ms"); // TODO: implement spell-checking in a meaningful manner. This loop exists only to generate log output. // TODO: Add "spellcheckAlternatives" or something like it to the SearchHits class so it can be displayed to the user like Google's "did you mean..." // TODO: Turn off spellchecking for the "author" field. if (queryResponse.getSpellCheckResponse() != null && queryResponse.getSpellCheckResponse().getSuggestionMap() != null && queryResponse.getSpellCheckResponse().getSuggestionMap().keySet().size() > 0) { StringBuilder sb = new StringBuilder("Spellcheck alternative suggestions:"); for (String token : queryResponse.getSpellCheckResponse().getSuggestionMap().keySet()) { sb.append(" { ").append(token).append(" : "); if (queryResponse.getSpellCheckResponse().getSuggestionMap().get(token).getAlternatives() .size() < 1) { sb.append("NO ALTERNATIVES"); } else { for (String alternative : queryResponse.getSpellCheckResponse().getSuggestionMap() .get(token).getAlternatives()) { sb.append(alternative).append(", "); } sb.replace(sb.length() - 2, sb.length(), ""); // Remove last comma and space. } sb.append(" } ,"); } log.info(sb.replace(sb.length() - 2, sb.length(), "").toString()); // Remove last comma and space. } else { log.info("Solr thinks everything in the query is spelled correctly."); } } List<SearchHit> searchResults = new ArrayList<SearchHit>(); for (SolrDocument document : documentList) { String id = SolrServiceUtil.getFieldValue(document, "id", String.class, query.toString()); String message = id == null ? query.toString() : id; Float score = SolrServiceUtil.getFieldValue(document, "score", Float.class, message); String title = SolrServiceUtil.getFieldValue(document, "title_display", String.class, message); Date publicationDate = SolrServiceUtil.getFieldValue(document, "publication_date", Date.class, message); String eissn = SolrServiceUtil.getFieldValue(document, "eissn", String.class, message); String journal = SolrServiceUtil.getFieldValue(document, "journal", String.class, message); String articleType = SolrServiceUtil.getFieldValue(document, "article_type", String.class, message); String strikingImage = SolrServiceUtil.getFieldValue(document, "striking_image", String.class, message); List<String> abstractText = SolrServiceUtil.getFieldMultiValue(document, "abstract", String.class, message); List<String> abstractPrimary = SolrServiceUtil.getFieldMultiValue(document, "abstract_primary_display", String.class, message); List<String> authorList = SolrServiceUtil.getFieldMultiValue(document, "author_display", String.class, message); // TODO create a dedicated field for checking the existence of assets for a given article. List<String> figureTableCaptions = SolrServiceUtil.getFieldMultiValue(document, "figure_table_caption", String.class, message); List<String> subjects = SolrServiceUtil.getFieldMultiValue(document, "subject", String.class, message); List<String> expressionOfconcern = SolrServiceUtil.getFieldMultiValue(document, "expression_of_concern", String.class, message); String retraction = SolrServiceUtil.getFieldValue(document, "retraction", String.class, message); String abstractResult = ""; //Use the primary abstract if it exists if (abstractPrimary.size() > 0) { abstractResult = StringUtils.join(abstractPrimary, ", "); } else { if (abstractText.size() > 0) { abstractResult = StringUtils.join(abstractText, ", "); } } //Flatten the list of subjects to a unique set Set<String> flattenedSubjects = new HashSet<String>(); for (String subject : subjects) { for (String temp : subject.split("/")) { if (temp.trim().length() > 0) { flattenedSubjects.add(temp); } } } SearchHit hit = SearchHit.builder().setHitScore(score).setUri(id).setTitle(title) .setListOfCreators(authorList).setDate(publicationDate).setIssn(eissn).setJournalTitle(journal) .setArticleTypeForDisplay(articleType).setAbstractText(abstractResult) .setStrikingImage(strikingImage).setHasAssets(figureTableCaptions.size() > 0) .setSubjects(flattenedSubjects).setSubjectsPolyhierarchy(subjects) .setExpressionOfConcern(expressionOfconcern).setRetraction(retraction).build(); if (log.isDebugEnabled()) log.debug(hit.toString()); searchResults.add(hit); } //here we assume that number of hits is always going to be withing range of int SearchResultSinglePage results = new SearchResultSinglePage((int) documentList.getNumFound(), -1, searchResults, query.getQuery()); if (queryResponse.getFacetField("subject_facet") != null) { List<Map> subjects = facetCountsToHashMap(queryResponse.getFacetField("subject_facet")); if (subjects != null) { List<Map> subjectResult = new ArrayList<Map>(); SortedMap<String, Long> topSubjects = null; try { topSubjects = getTopSubjects(); } catch (ApplicationException ex) { throw new RuntimeException(ex.getMessage(), ex); } //Remove top level 1 subjects from list, FEND-805 for (Map<String, Object> m : subjects) { if (!topSubjects.containsKey(m.get("name"))) { HashMap<String, Object> hm = new HashMap<String, Object>(); hm.put("name", m.get("name")); hm.put("count", m.get("count")); subjectResult.add(hm); } } results.setSubjectFacet(subjectResult); } else { results.setSubjectFacet(null); } } if (queryResponse.getFacetField("author_facet") != null) { results.setAuthorFacet(facetCountsToHashMap(queryResponse.getFacetField("author_facet"))); } if (queryResponse.getFacetField("editor_facet") != null) { results.setEditorFacet(facetCountsToHashMap(queryResponse.getFacetField("editor_facet"))); } if (queryResponse.getFacetField("article_type_facet") != null) { results.setArticleTypeFacet(facetCountsToHashMap(queryResponse.getFacetField("article_type_facet"))); } if (queryResponse.getFacetField("affiliate_facet") != null) { results.setInstitutionFacet(facetCountsToHashMap(queryResponse.getFacetField("affiliate_facet"))); } if (queryResponse.getFacetField("cross_published_journal_key") != null) { results.setJournalFacet( facetCountsToHashMap(queryResponse.getFacetField("cross_published_journal_key"))); } return results; }
From source file:org.apache.coheigea.bigdata.solr.SolrCloudTest.java
License:Apache License
@Test public void testAddAndQuery() throws Exception { CloudSolrClient cloudSolrClient = server.getSolrClient(); cloudSolrClient.setDefaultCollection("docs"); // Add document SolrInputDocument doc = new SolrInputDocument(); doc.addField("title", "Title of Doc"); doc.addField("content", "Test Content"); cloudSolrClient.add(doc);//from w ww .j av a 2 s. c om cloudSolrClient.commit(); ModifiableSolrParams params = new ModifiableSolrParams(); // Test it's uploaded params.set("q", "*"); QueryResponse qResp = cloudSolrClient.query(params); SolrDocumentList foundDocs = qResp.getResults(); Assert.assertEquals(1, foundDocs.getNumFound()); SolrDocument foundDoc = foundDocs.get(0); Assert.assertEquals("Title of Doc", foundDoc.getFieldValue("title")); }
From source file:org.apache.coheigea.bigdata.solr.SolrTest.java
License:Apache License
@Test public void testAddAndQuery() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); // Add document SolrInputDocument doc = new SolrInputDocument(); doc.addField("title", "Title of Doc"); doc.addField("content", "Test Content"); server.add(doc);//from ww w . j a v a2s . com server.commit(); // Test it's uploaded params.set("q", "*"); QueryResponse qResp = server.query(params); SolrDocumentList foundDocs = qResp.getResults(); Assert.assertEquals(1, foundDocs.getNumFound()); SolrDocument foundDoc = foundDocs.get(0); Assert.assertEquals("Title of Doc", foundDoc.getFieldValue("title")); }
From source file:org.apache.jackrabbit.oak.plugins.index.solr.query.LMSEstimator.java
License:Apache License
synchronized void update(Filter filter, SolrDocumentList docs) { double[] updatedWeights = new double[weights.length]; long estimate = estimate(filter); long numFound = docs.getNumFound(); double delta = Math.pow(numFound - estimate, 2) / 2; if (Math.abs(delta) > threshold) { for (int i = 0; i < updatedWeights.length; i++) { double errors = delta * getInput(filter, i); updatedWeights[i] = weights[i] + alpha * errors; }/* w w w .j a v a 2 s.co m*/ // weights updated weights = Arrays.copyOf(updatedWeights, 5); } }