Example usage for org.apache.solr.common SolrDocumentList SolrDocumentList

List of usage examples for org.apache.solr.common SolrDocumentList SolrDocumentList

Introduction

In this page you can find the example usage for org.apache.solr.common SolrDocumentList SolrDocumentList.

Prototype

SolrDocumentList

Source Link

Usage

From source file:org.apache.jackrabbit.oak.plugins.index.solr.query.LMSEstimatorTest.java

License:Apache License

@Test
public void testMultipleUpdates() throws Exception {
    LMSEstimator lmsEstimator = new LMSEstimator();
    Filter filter = mock(Filter.class);
    FullTextExpression fte = new FullTextTerm("foo", "bar", false, false, "");
    when(filter.getFullTextConstraint()).thenReturn(fte);
    SolrDocumentList docs = new SolrDocumentList();
    lmsEstimator.update(filter, docs);/*from  ww  w  .ja v a  2s  . co  m*/

    long actualCount = 10;
    docs.setNumFound(actualCount);

    long estimate = lmsEstimator.estimate(filter);
    long diff = actualCount - estimate;

    // update causes weights adjustment
    lmsEstimator.update(filter, docs);
    long estimate2 = lmsEstimator.estimate(filter);
    long diff2 = actualCount - estimate2;
    assertTrue(diff2 < diff); // new estimate is more accurate than previous one

    // update doesn't cause weight adjustments therefore estimates stays unchanged
    lmsEstimator.update(filter, docs);
    long estimate3 = lmsEstimator.estimate(filter);
    assertEquals(estimate3, estimate2);
}

From source file:org.apache.metron.solr.dao.SolrSearchDaoTest.java

License:Apache License

@Test
public void getAllLatestShouldProperlyReturnDocuments() throws Exception {
    GetRequest broRequest1 = new GetRequest("bro-1", "bro");
    GetRequest broRequest2 = new GetRequest("bro-2", "bro");
    GetRequest snortRequest1 = new GetRequest("snort-1", "snort");
    GetRequest snortRequest2 = new GetRequest("snort-2", "snort");
    SolrDocument broSolrDoc1 = mock(SolrDocument.class);
    SolrDocument broSolrDoc2 = mock(SolrDocument.class);
    SolrDocument snortSolrDoc1 = mock(SolrDocument.class);
    SolrDocument snortSolrDoc2 = mock(SolrDocument.class);
    Document broDoc1 = SolrUtilities.toDocument(broSolrDoc1);
    Document broDoc2 = SolrUtilities.toDocument(broSolrDoc2);
    Document snortDoc1 = SolrUtilities.toDocument(snortSolrDoc1);
    Document snortDoc2 = SolrUtilities.toDocument(snortSolrDoc2);

    solrSearchDao = spy(new SolrSearchDao(client, accessConfig));
    SolrDocumentList broList = new SolrDocumentList();
    broList.add(broSolrDoc1);/*from w ww  .  j a va  2s . co  m*/
    broList.add(broSolrDoc2);
    SolrDocumentList snortList = new SolrDocumentList();
    snortList.add(snortSolrDoc1);
    snortList.add(snortSolrDoc2);
    when(client.getById((Collection<String>) argThat(hasItems("bro-1", "bro-2")),
            argThat(new ModifiableSolrParamsMatcher(new ModifiableSolrParams().set("collection", "bro")))))
                    .thenReturn(broList);
    when(client.getById((Collection<String>) argThat(hasItems("snort-1", "snort-2")),
            argThat(new ModifiableSolrParamsMatcher(new ModifiableSolrParams().set("collection", "snort")))))
                    .thenReturn(snortList);
    assertEquals(Arrays.asList(broDoc1, broDoc2, snortDoc1, snortDoc2), solrRetrieveLatestDao
            .getAllLatest(Arrays.asList(broRequest1, broRequest2, snortRequest1, snortRequest2)));
}

From source file:org.apache.metron.solr.dao.SolrSearchDaoTest.java

License:Apache License

@Test
public void buildSearchResponseShouldReturnSearchResponse() {
    SearchRequest searchRequest = new SearchRequest();
    searchRequest.setFields(Collections.singletonList("id"));
    searchRequest.setFacetFields(Collections.singletonList("facetField"));
    QueryResponse queryResponse = mock(QueryResponse.class);
    SolrDocument solrDocument1 = new SolrDocument();
    solrDocument1.setField(Constants.GUID, "id1");
    solrDocument1.setField("id", "id1");
    SolrDocument solrDocument2 = new SolrDocument();
    solrDocument2.setField(Constants.GUID, "id2");
    solrDocument2.setField("id", "id2");

    solrSearchDao = spy(new SolrSearchDao(client, accessConfig));
    SolrDocumentList solrDocumentList = new SolrDocumentList();
    solrDocumentList.add(solrDocument1);
    solrDocumentList.add(solrDocument2);
    solrDocumentList.setNumFound(100);// w ww . j av a 2  s  .c  o  m
    when(queryResponse.getResults()).thenReturn(solrDocumentList);
    SearchResult searchResult1 = new SearchResult();
    searchResult1.setId("id1");
    HashMap<String, Object> source1 = new HashMap<>();
    source1.put("id", "id1");
    searchResult1.setSource(source1);
    SearchResult searchResult2 = new SearchResult();
    searchResult2.setId("id2");
    HashMap<String, Object> source2 = new HashMap<>();
    source2.put("id", "id2");
    searchResult2.setSource(source2);
    Map<String, Map<String, Long>> facetCounts = new HashMap<String, Map<String, Long>>() {
        {
            put("id", new HashMap<String, Long>() {
                {
                    put("id1", 1L);
                    put("id2", 1L);
                }
            });
        }
    };
    doReturn(facetCounts).when(solrSearchDao).getFacetCounts(Collections.singletonList("facetField"),
            queryResponse);
    SearchResponse expectedSearchResponse = new SearchResponse();
    SearchResult expectedSearchResult1 = new SearchResult();
    expectedSearchResult1.setId("id1");
    expectedSearchResult1.setSource(source1);
    SearchResult expectedSearchResult2 = new SearchResult();
    expectedSearchResult2.setId("id2");
    expectedSearchResult2.setSource(source2);

    expectedSearchResponse.setResults(Arrays.asList(expectedSearchResult1, expectedSearchResult2));
    expectedSearchResponse.setTotal(100);
    expectedSearchResponse.setFacetCounts(facetCounts);

    assertEquals(expectedSearchResponse, solrSearchDao.buildSearchResponse(searchRequest, queryResponse));
}

From source file:org.apache.sentry.tests.e2e.solr.AbstractSolrSentryTestBase.java

License:Apache License

/**
 * Method to validate Solr deletedocs passes
 * (This function doesn't check if there is at least one Solr document present in Solr)
 * @param solrUserName - User authenticated into Solr
 * @param collectionName - Name of the collection to which the data has to be updated
 * @param allowZeroDocs - Boolean for running this method only if there is atleast one Solr doc present.
 * @throws MalformedURLException, SolrServerException, IOException
 */// w w w . ja  v a  2  s .c o  m
protected void verifyDeletedocsPass(String solrUserName, String collectionName, boolean allowZeroDocs)
        throws Exception {
    String originalUser = getAuthenticatedUser();
    try {
        SolrDocumentList orginalSolrDocs = getSolrDocs(collectionName, ALL_DOCS, true);
        if (allowZeroDocs == false) {
            assertTrue("Solr should contain atleast one solr doc to run this test.",
                    orginalSolrDocs.size() > 0);
        }

        setAuthenticationUser(solrUserName);
        CloudSolrServer cloudSolrServer = getCloudSolrServer(collectionName);
        try {
            cloudSolrServer.deleteByQuery(ALL_DOCS);
            cloudSolrServer.commit();
        } finally {
            cloudSolrServer.shutdown();
        }

        // Validate Solr doc count is zero
        SolrDocumentList solrRespDocs = getSolrDocs(collectionName, ALL_DOCS, true);
        validateSolrDocCountAndContent(new SolrDocumentList(), solrRespDocs);
    } finally {
        setAuthenticationUser(originalUser);
    }
}

From source file:org.apache.sentry.tests.e2e.solr.AbstractSolrSentryTestCase.java

License:Apache License

/**
 * Method to validate Solr deletedocs passes
 * (This function doesn't check if there is at least one Solr document present in Solr)
 * @param solrUserName - User authenticated into Solr
 * @param collectionName - Name of the collection to which the data has to be updated
 * @param allowZeroDocs - Boolean for running this method only if there is atleast one Solr doc present.
 * @throws MalformedURLException, SolrServerException, IOException
 *///  w w w.  j a v  a  2s  .c o m
protected void verifyDeletedocsPass(String solrUserName, String collectionName, boolean allowZeroDocs)
        throws Exception {
    String originalUser = getAuthenticatedUser();
    try {
        SolrDocumentList orginalSolrDocs = getSolrDocs(solrUserName, collectionName, ALL_DOCS);
        if (!allowZeroDocs) {
            assertTrue("Solr should contain atleast one solr doc to run this test.",
                    orginalSolrDocs.size() > 0);
        }

        setAuthenticationUser(solrUserName);
        cluster.getSolrClient().deleteByQuery(collectionName, ALL_DOCS);
        cluster.getSolrClient().commit(collectionName);

        // Validate Solr doc count is zero
        SolrDocumentList solrRespDocs = getSolrDocs(solrUserName, collectionName, ALL_DOCS);
        validateSolrDocCountAndContent(new SolrDocumentList(), solrRespDocs);
    } finally {
        setAuthenticationUser(originalUser);
    }
}

From source file:org.apache.sentry.tests.e2e.solr.AbstractSolrSentryTestCase.java

License:Apache License

protected SolrDocumentList expectedDocs(SolrInputDocument... docs) {
    SolrDocumentList result = new SolrDocumentList();

    for (SolrInputDocument doc : docs) {
        SolrDocument r = new SolrDocument();
        for (SolrInputField field : doc) {
            r.setField(field.getName(), field.getValue());
        }/*from w w w  .j a v a  2s.  com*/
        result.add(r);
    }
    return result;
}

From source file:org.codice.ddf.persistence.internal.PersistentStoreImplTest.java

License:Open Source License

private SolrDocumentList getSolrDocuments(int numDocuments) {
    final SolrDocumentList docList = new SolrDocumentList();

    for (int i = 0; i < numDocuments; i++) {
        SolrDocument solrDocument = new SolrDocument();
        solrDocument.addField("id_txt", String.format("idvalue%d", i + 1));
        docList.add(solrDocument);/*  w ww. ja  v  a2 s  .  co m*/
    }

    return docList;
}

From source file:org.dataconservancy.dcs.lineage.impl.LineageServiceImpl.java

License:Apache License

private Lineage getLineageForLineageId(String lineage_id) {
    Lineage lineage;/*from  ww w .j  a  v a 2 s  .c o m*/
    QueryResponse response;
    SolrDocumentList docs = new SolrDocumentList();
    List<LineageEntry> entryList = new ArrayList<LineageEntry>();

    String query = SolrQueryUtil.createLiteralQuery(DcsSolrField.DeliverableUnitField.LINEAGE.solrName(),
            lineage_id);
    // we must anticipate having very large result sets
    // have tested this code with several small values of MAX_MATCHES
    try {
        int chunkSize = MAX_MATCHES;
        int offset = 0;
        String[] params = { "sort",
                DcsSolrField.EventField.DYNAMIC_DATE_TYPE_PREFIX.solrName() + "ingest.complete" + " desc" }; //get results in descending order time-wise (latest first)
        while (chunkSize == MAX_MATCHES) {
            response = solrService.search(query, offset, MAX_MATCHES, params);
            SolrDocumentList chunk = response.getResults();
            docs.addAll(chunk);
            offset += MAX_MATCHES;
            chunkSize = chunk.size();
        }
    } catch (SolrServerException e) {
        log.error("SolrService search error for query " + query, e);
    }

    //have query response
    if (!docs.isEmpty()) {
        for (SolrDocument doc : docs) {
            try {
                DcsEntity entity = solrService.asEntity(doc);
                if (entity instanceof DcsDeliverableUnit) {
                    DcsDeliverableUnit du = (DcsDeliverableUnit) entity;
                    String duId = du.getId();
                    long timestamp = solrService.lookupEntityLastModified(duId);
                    LineageEntry lineageEntry = new LineageEntryImpl(duId, lineage_id, timestamp);
                    entryList.add(lineageEntry);
                }
            } catch (IOException e) {
                log.error("IO error using SolrService", e);
            } catch (SolrServerException e) {
                log.error("SolrService error", e);
            }
        }
        lineage = (entryList.isEmpty()) ? null : new LineageImpl(entryList);
    } else {
        lineage = null;
    }
    return lineage;
}

From source file:org.mousephenotype.cda.solr.service.ExpressionService.java

License:Apache License

/**
 *
 * @param acc//from w ww  .  jav a 2  s. c o m
 *            mgi_accession for gene
 * @param topMaNameFilter
 *            Only include images under the top level ma term specified here
 * @param parameterStableId TODO
 * @param imagesOverview
 *            If imagesOverview true then restrict response to only certain
 *            fields as we are only displaying annotations for a dataset not
 *            a specific thumbnail
 * @param imagesOverview
 *            If true we want some images data/stats added to the model for
 *            display in the tabbed pane on the gene page.
 * @throws SolrServerException, IOException
 * @throws SQLException
 */
public ExpressionImagesBean getLacImageDataForGene(String acc, String topMaNameFilter, String parameterStableId,
        boolean imagesOverview) throws SolrServerException, IOException {

    QueryResponse laczResponse = null;
    String noTopTermId = "";
    String topLevelField = "";// type ma or emap imageDTO field for top
    // level terms
    String termIdField = "";
    if (parameterStableId != null && parameterStableId.contains("ELZ")) { // use EMAP terms and top level terms
        noTopTermId = "TS20 embryo or Unassigned";// currently if unassigned they either have embryo TS20 as there EMAP id but our system doesn't find any selected_top_level emap or nothing is assigned but we know they are embryo so assign this id to unassigned
        topLevelField = ImageDTO.SELECTED_TOP_LEVEL_ANATOMY_TERM;
        termIdField = ImageDTO.ANATOMY_ID;
        if (imagesOverview) {
            laczResponse = getEmbryoLaczImageFacetsForGene(acc, parameterStableId, ImageDTO.OMERO_ID,
                    ImageDTO.JPEG_URL, ImageDTO.THUMBNAIL_URL, topLevelField,
                    ImageDTO.PARAMETER_ASSOCIATION_NAME, ImageDTO.PARAMETER_ASSOCIATION_VALUE,
                    ImageDTO.ANATOMY_ID, ImageDTO.ANATOMY_TERM);
        } else {
            laczResponse = getEmbryoLaczImageFacetsForGene(acc, parameterStableId, ImageDTO.OMERO_ID,
                    ImageDTO.JPEG_URL, ImageDTO.THUMBNAIL_URL, topLevelField,
                    ImageDTO.PARAMETER_ASSOCIATION_NAME, ImageDTO.PARAMETER_ASSOCIATION_VALUE,
                    ImageDTO.ZYGOSITY, ImageDTO.SEX, ImageDTO.ALLELE_SYMBOL, ImageDTO.DOWNLOAD_URL,
                    ImageDTO.IMAGE_LINK, ImageDTO.ANATOMY_ID, ImageDTO.ANATOMY_TERM);
        }

    } else {
        noTopTermId = "Unassigned Top Level MA";
        topLevelField = ImageDTO.SELECTED_TOP_LEVEL_ANATOMY_TERM;
        termIdField = ImageDTO.ANATOMY_ID;
        if (imagesOverview) {
            laczResponse = getAdultLaczImageFacetsForGene(acc, parameterStableId, ImageDTO.OMERO_ID,
                    ImageDTO.JPEG_URL, ImageDTO.THUMBNAIL_URL, topLevelField,
                    ImageDTO.PARAMETER_ASSOCIATION_NAME, ImageDTO.PARAMETER_ASSOCIATION_VALUE,
                    ImageDTO.ANATOMY_ID, ImageDTO.UBERON_ID, ImageDTO.EFO_ID);
        } else {
            laczResponse = getAdultLaczImageFacetsForGene(acc, parameterStableId, ImageDTO.OMERO_ID,
                    ImageDTO.JPEG_URL, ImageDTO.THUMBNAIL_URL, topLevelField,
                    ImageDTO.PARAMETER_ASSOCIATION_NAME, ImageDTO.PARAMETER_ASSOCIATION_VALUE,
                    ImageDTO.ZYGOSITY, ImageDTO.SEX, ImageDTO.ALLELE_SYMBOL, ImageDTO.DOWNLOAD_URL,
                    ImageDTO.IMAGE_LINK, ImageDTO.ANATOMY_ID, ImageDTO.UBERON_ID, ImageDTO.EFO_ID);
        }

    }
    SolrDocumentList imagesResponse = laczResponse.getResults();
    List<FacetField> fields = laczResponse.getFacetFields();
    // we have the unique ma top level terms associated and all the images
    // now we need lists of images with these top level ma terms in their
    // annotation
    Map<String, SolrDocumentList> expFacetToDocs = new HashMap<>();
    Map<String, Boolean> haveImpcImages = new HashMap<>();
    expFacetToDocs.put(noTopTermId, new SolrDocumentList());

    for (SolrDocument doc : imagesResponse) {
        List<String> tops = getListFromCollection(doc.getFieldValues(topLevelField));

        // work out list of uberon/efo ids with/without expressions
        // noTopLevelCount.setCount(c);
        if (tops.isEmpty()) {// if no top level found this image then add it to the "No top level" term docs so we can display orphaned terms and images
            expFacetToDocs.get(noTopTermId).add(doc);
        } else {

            for (String top : tops) {
                SolrDocumentList list = null;
                if (!expFacetToDocs.containsKey(top)) {
                    expFacetToDocs.put(top, new SolrDocumentList());
                }
                list = expFacetToDocs.get(top);
                list.add(doc);
            }
        }
    }

    List<Count> topLevelAnatomyTerms = new ArrayList<>();
    List<Count> filteredTopLevelAnatomyTerms = new ArrayList<>();
    // if (fields.get(0).getValues().size() > 0) {

    topLevelAnatomyTerms.addAll(fields.get(0).getValues());
    if (expFacetToDocs.get(noTopTermId).size() > 0) {// only add this
        // facet for no
        // top levels
        // found if
        // there are any
        Count dummyCountForImagesWithNoHigherLevelAnatomy = new Count(new FacetField(noTopTermId), noTopTermId,
                expFacetToDocs.get(noTopTermId).size());
        topLevelAnatomyTerms.add(dummyCountForImagesWithNoHigherLevelAnatomy);
    }

    if (topMaNameFilter != null) {
        for (Count topLevel : topLevelAnatomyTerms) {
            if (topLevel.getName().equals(topMaNameFilter)) {
                filteredTopLevelAnatomyTerms.add(topLevel);
            }
        }
    } else {
        filteredTopLevelAnatomyTerms = topLevelAnatomyTerms;
    }

    for (Count count : filteredTopLevelAnatomyTerms) {
        Boolean hasImages = false;
        if (count.getCount() > 0) {
            hasImages = true;
        }
        haveImpcImages.put(count.getName(), hasImages);

    }

    ImageServiceUtil.sortHigherLevelTermCountsAlphabetically(filteredTopLevelAnatomyTerms);
    ImageServiceUtil.sortDocsByExpressionAlphabetically(expFacetToDocs);

    ExpressionImagesBean bean = null;
    //      if (parameterStableId.contains("EZ")) {//embryo

    //         model.addAttribute("impcEmbryoExpressionImageFacets", filteredTopLevelAnatomyTerms);
    //         model.addAttribute("haveImpcEmbryoImages", haveImpcImages);
    //         model.addAttribute("impcEmbryoExpressionFacetToDocs", expFacetToDocs);
    bean = new ExpressionImagesBean(filteredTopLevelAnatomyTerms, haveImpcImages, expFacetToDocs);
    //      } else {
    //         model.addAttribute("impcAdultExpressionImageFacets", filteredTopLevelAnatomyTerms);
    //         model.addAttribute("haveImpcAdultImages", haveImpcImages);
    //         model.addAttribute("impcAdultExpressionFacetToDocs", expFacetToDocs);
    //      }
    // }
    return bean;
}

From source file:org.mousephenotype.cda.solr.service.ExpressionService.java

License:Apache License

private Map<String, SolrDocumentList> getAnatomyToDocs(SolrDocumentList controlResponse) {

    Map<String, SolrDocumentList> anatomyToDocs = new HashMap<>();

    for (SolrDocument doc : controlResponse) {

        List<String> anatomies = getListFromCollection(doc.getFieldValues(ImageDTO.PARAMETER_ASSOCIATION_NAME));
        if (anatomies != null) {

            SolrDocumentList anatomyList = null;
            for (String anatomy : anatomies) {
                anatomy = anatomy.toLowerCase();

                if (!anatomyToDocs.containsKey(anatomy)) {
                    anatomyToDocs.put(anatomy, new SolrDocumentList());
                }//  w ww .  ja  v a2 s  . c o m
                anatomyList = anatomyToDocs.get(anatomy);

                anatomyList.add(doc);
            }
        }

    }
    return anatomyToDocs;
}