Example usage for org.apache.solr.common SolrDocumentList getNumFound

List of usage examples for org.apache.solr.common SolrDocumentList getNumFound

Introduction

In this page you can find the example usage for org.apache.solr.common SolrDocumentList getNumFound.

Prototype

public long getNumFound() 

Source Link

Usage

From source file:se.simonsoft.cms.indexing.xml.HandlerXmlIntegrationTest.java

License:Apache License

/** 
 * This kind of join is not used, just work in progress.
 * @throws SolrServerException//w  ww  .j  a  v  a  2 s .co m
 */
@Test
@Ignore
public void testJoinReleasetranslationRid() throws SolrServerException {
    FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath(
            "se/simonsoft/cms/indexing/xml/datasets/releasetranslation");
    CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/testaut1", repoSource);
    FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo);

    SolrServer reposxml = indexing.enable(new ReposTestBackendFilexml(filexml)).getCore("reposxml");

    // search for the first title
    SolrDocumentList findUsingRid = reposxml
            .query(new SolrQuery("a_cms.rid:2gyvymn15kv0001 AND -prop_abx.TranslationLocale:*")).getResults();
    assertEquals("Should find the first title in the release (though actually a future one)", 1,
            findUsingRid.getNumFound());
    String wantedReleaseSha1 = (String) findUsingRid.get(0).getFieldValue("c_sha1_source_reuse");

    SolrQuery q = new SolrQuery(
            "c_sha1_source_reuse:" + wantedReleaseSha1 + " AND -prop_abx.TranslationLocale:*" // probably as fq for performance, needed because we join on same field so translations would match themselves            
                    + " AND {!join to=a_cms.rid from=a_cms.rid}reusevaluelocale:1sv-SE");
    SolrDocumentList findReusevalue = reposxml.query(q).getResults();
    assertEquals(1, findReusevalue.getNumFound());
    String ridForSourceAndReusereadyLookup = (String) findReusevalue.get(0).getFieldValue("a_cms.rid");
    assertEquals("2gyvymn15kv0001", ridForSourceAndReusereadyLookup);

    // TODO we dont't get a cartesian product, so can we sort on released first (as we do with xincludes)
    // otherwise we would risk getting lots of reuseready=0 hits first, and the benefit of joining would be gone
    // The following query syntax fails
    //q.addSort(new SortClause("{!join to=pathfull from=reuserelease}reuseready", ORDER.desc));
    //SolrDocumentList findReusevalueReleasedFirst = reposxml.query(q).getResults();
    //assertEquals(1, findReusevalueReleasedFirst.getNumFound());
}

From source file:se.simonsoft.cms.indexing.xml.HandlerXmlIntegrationTest.java

License:Apache License

/**
 * Test covering the search algorithm actually implemented in CMS 3.0.
 * The joins is performed on RID to match the Sha1 on the Release side while the Translation is the "primary" side of the join.
 * @throws SolrServerException//from  w  ww.ja va2s .  c om
 */
@Test
public void testJoinReleasetranslationRidSha1() throws SolrServerException {
    FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath(
            "se/simonsoft/cms/indexing/xml/datasets/releasetranslation");
    CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/testaut1", repoSource);
    FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo);

    SolrServer reposxml = indexing.enable(new ReposTestBackendFilexml(filexml)).getCore("reposxml");

    // search for the first title
    SolrDocumentList findUsingRid = reposxml
            .query(new SolrQuery("a_cms.rid:2gyvymn15kv0001 AND -prop_abx.TranslationLocale:*")).getResults();
    assertEquals("Should find the first title in the release (though actually a future one)", 1,
            findUsingRid.getNumFound());
    String wantedReleaseSha1 = (String) findUsingRid.get(0).getFieldValue("c_sha1_source_reuse");

    String locale = "sv-SE";
    // this join does not know that the remote element is actually in a Release
    // it could be another not-yet-translated translation, but that would typically not be an issue.
    SolrQuery query = new SolrQuery("prop_abx.TranslationLocale:" + locale
            + " AND {!join from=a_cms.rid to=a_cms.rid}c_sha1_source_reuse:" + wantedReleaseSha1);

    query.addFilterQuery("reusevalue:[1 TO *]");
    query.addFilterQuery("patharea:translation");
    // Filter on repository and parent path.
    query.addFilterQuery("repo:" + repo.getName());
    query.addFilterQuery("repoparent:" + "\\/svn");

    // Prefer higher reuseready integer, prefers Released over other status values.
    query.addSort(SolrQuery.SortClause.desc("reuseready"));
    // Prefer the highest RID, i.e. latest finalized.
    query.addSort(SolrQuery.SortClause.desc("a_cms.rid"));

    SolrDocumentList findReusevalue = reposxml.query(query).getResults();
    assertEquals(1, findReusevalue.getNumFound());
    String ridForSourceAndReusereadyLookup = (String) findReusevalue.get(0).getFieldValue("a_cms.rid");
    assertEquals("2gyvymn15kv0001", ridForSourceAndReusereadyLookup);
    assertEquals(1, findReusevalue.get(0).getFieldValue("reuseready"));
}

From source file:se.simonsoft.cms.indexing.xml.HandlerXmlLargeFileTest.java

License:Apache License

@Test
public void testSingle860k() throws Exception {

    // NOTE: The test will be skipped if T501007.xml is not provided.
    FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath(classPath.concat("single-860k"));
    assumeResourceExists(repoSource, "/T501007.xml");
    CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/flir", repoSource);
    FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo);

    indexing.enable(new ReposTestBackendFilexml(filexml), injector);

    SolrServer reposxml = indexing.getCore("reposxml");
    SolrDocumentList all = reposxml.query(new SolrQuery("*:*").setRows(1)/*.addSort("depth", ORDER.asc)*/)
            .getResults();// w  w w  .j  av  a2  s . c om
    assertEquals(11488, all.getNumFound()); // haven't verified this number, got it from first test

    SolrDocument e1 = all.get(0);

    //assertEquals(80, e1.getFieldNames().size());
    //assertEquals("...", e1.getFieldValue("pathname"));
    /* Can not assert on props since repositem is not involved.
    assertEquals("xml", e1.getFieldValue("prop_abx.ContentType"));
    assertNull(e1.getFieldValue("prop_abx.Dependencies"));
    */

    assertChecksums(reposxml);
}

From source file:se.simonsoft.cms.indexing.xml.HandlerXmlNamespaceTest.java

License:Apache License

@Test
public void testNamespaceXhtml() throws Exception {
    FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath(
            "se/simonsoft/cms/indexing/xml/datasets/namespace-xhtml");
    assumeResourceExists(repoSource, "/test1.xml");
    CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/namespace", repoSource);
    FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo);

    indexing.enable(new ReposTestBackendFilexml(filexml));

    SolrServer reposxml = indexing.getCore("reposxml");
    SolrDocumentList all = reposxml.query(new SolrQuery("*:*").setRows(2).setSort("pos", ORDER.asc))
            .getResults();/*from  ww w .  j  a  va 2  s  . c o m*/
    assertEquals(13, all.getNumFound());

    SolrDocument e1 = all.get(0);
    assertEquals("html", e1.getFieldValue("name"));
    // Solr allows the wildcard part of dynamic fields to be empty.
    // TODO: Is ns_ what we want or would we like to define as "ns"? 
    assertEquals("declared ns", "http://www.w3.org/1999/xhtml", e1.getFieldValue("ns_"));
    assertEquals("inherited and declared ns", "http://www.w3.org/1999/xhtml", e1.getFieldValue("ins_"));

    SolrDocument e2 = all.get(1);
    assertEquals("head", e2.getFieldValue("name"));
    assertNull("not declared here", e2.getFieldValue("ns_"));
    assertEquals("inherited ns", "http://www.w3.org/1999/xhtml", e2.getFieldValue("ins_"));

}

From source file:se.simonsoft.cms.indexing.xml.HandlerXmlNamespaceTest.java

License:Apache License

@Test
public void testNamespaceXml() throws Exception {
    FilexmlSourceClasspath repoSource = new FilexmlSourceClasspath(
            "se/simonsoft/cms/indexing/xml/datasets/namespace-xml");
    assumeResourceExists(repoSource, "/test1.xml");
    CmsRepositoryFilexml repo = new CmsRepositoryFilexml("http://localtesthost/svn/namespace", repoSource);
    FilexmlRepositoryReadonly filexml = new FilexmlRepositoryReadonly(repo);

    indexing.enable(new ReposTestBackendFilexml(filexml));

    SolrServer reposxml = indexing.getCore("reposxml");
    SolrDocumentList all = reposxml.query(new SolrQuery("*:*").setRows(5).setSort("pos", ORDER.asc))
            .getResults();/*  www. j  av a  2s.  c o m*/
    assertEquals(5, all.getNumFound());

    SolrDocument e1 = all.get(0);
    assertEquals("doc", e1.getFieldValue("name"));

    assertEquals("declared ns", "http://www.simonsoft.se/namespace/cms", e1.getFieldValue("ns_cms"));
    assertEquals("inherited and declared ns", "http://www.simonsoft.se/namespace/cms",
            e1.getFieldValue("ins_cms"));

    assertEquals("declared ns", "http://www.simonsoft.se/namespace/test1", e1.getFieldValue("ns_cms1"));
    assertEquals("declared ns", "http://www.simonsoft.se/namespace/test2", e1.getFieldValue("ns_cms2"));
    assertEquals("declared ns", "http://www.simonsoft.se/namespace/test3", e1.getFieldValue("ns_cms3"));
    e1 = null;

    SolrDocument e2 = all.get(1);
    assertEquals("elem", e2.getFieldValue("name"));
    assertNull("not declared here", e2.getFieldValue("ns_cms"));
    assertNull("not declared here", e2.getFieldValue("ns_cms1"));
    assertNull("not declared here", e2.getFieldValue("ns_cms2"));
    assertNull("not declared here", e2.getFieldValue("ns_cms3"));

    assertNotNull("inherited", e2.getFieldValue("ins_cms"));
    assertNotNull("inherited", e2.getFieldValue("ins_cms1"));
    assertNotNull("inherited", e2.getFieldValue("ins_cms2"));
    assertNotNull("inherited", e2.getFieldValue("ins_cms3"));

    //System.out.println(e2.getFieldValue("ns_unused"));
    assertEquals("unused namespaces",
            "[http://www.simonsoft.se/namespace/cms\nhttp://www.simonsoft.se/namespace/test2\nhttp://www.simonsoft.se/namespace/test3\n]",
            e2.getFieldValue("ns_unused").toString());
    e2 = null;

    SolrDocument e3 = all.get(2);
    assertEquals("cms1:elem", e3.getFieldValue("name"));
    assertNull("not declared here", e3.getFieldValue("ns_cms"));
    assertNull("not declared here", e3.getFieldValue("ns_cms1"));
    assertNull("not declared here", e3.getFieldValue("ns_cms2"));
    assertNull("not declared here", e3.getFieldValue("ns_cms3"));

    assertNotNull("inherited", e3.getFieldValue("ins_cms"));
    assertNotNull("inherited", e3.getFieldValue("ins_cms1"));
    assertNotNull("inherited", e3.getFieldValue("ins_cms2"));
    assertNotNull("inherited", e3.getFieldValue("ins_cms3"));

    //System.out.println(e3.getFieldValue("ns_unused"));
    assertEquals("unused namespaces",
            "[http://www.simonsoft.se/namespace/cms\nhttp://www.simonsoft.se/namespace/test2\nhttp://www.simonsoft.se/namespace/test3\n]",
            e3.getFieldValue("ns_unused").toString());
    e3 = null;

    SolrDocument e4 = all.get(3);
    assertEquals("elem", e4.getFieldValue("name"));
    assertNull("not declared here", e4.getFieldValue("ns_cms"));
    assertNull("not declared here", e4.getFieldValue("ns_cms1"));
    assertNull("not declared here", e4.getFieldValue("ns_cms2"));
    assertNull("not declared here", e4.getFieldValue("ns_cms3"));

    assertNotNull("inherited", e4.getFieldValue("ins_cms"));
    assertNotNull("inherited", e4.getFieldValue("ins_cms1"));
    assertNotNull("inherited", e4.getFieldValue("ins_cms2"));
    assertNotNull("inherited", e4.getFieldValue("ins_cms3"));

    //System.out.println(e4.getFieldValue("ns_unused"));
    assertEquals("unused namespaces",
            "[http://www.simonsoft.se/namespace/cms\nhttp://www.simonsoft.se/namespace/test1\nhttp://www.simonsoft.se/namespace/test3\n]",
            e4.getFieldValue("ns_unused").toString());
    e4 = null;

}

From source file:Solr.SolrManager.java

License:Open Source License

public SolrDocumentList getSolrDocs() throws SolrServerException, IOException {
    LOG.info("Fetching SolDocs");

    int MAX_FETCH_ROWS = conf.getInt("solr.fetch.size");

    String SolrCatalogUrl = conf.getString("solr.url");
    CommonsHttpSolrServer solrCatalog = new CommonsHttpSolrServer(SolrCatalogUrl);

    SolrDocumentList docsCatalog;

    SolrQuery solrQuery = new SolrQuery().setQuery(/*
                                                   * " (searchable:false AND
                                                   */"keywords:[ 1 TO *] "/*
                                                                           * )" *OR ( searchable:true) "
                                                                           */).setRows(MAX_FETCH_ROWS)
            .addSortField("book_id", SolrQuery.ORDER.asc);

    QueryResponse rsp = solrCatalog.query(solrQuery);

    docsCatalog = rsp.getResults();/*from ww w. j a v a  2s  .com*/

    //        SolrInputDocument InputDoc;
    //        for (SolrDocument CurrentSolrDoc : docsCatalog) {
    //            CurrentSolrDoc.removeFields("searchable");
    //            CurrentSolrDoc.addField("searchable", false);
    //            InputDoc = ClientUtils.toSolrInputDocument(CurrentSolrDoc);
    //            solrCatalog.add(InputDoc);
    //        }
    //        solrCatalog.commit();

    LOG.info("Doc Number : " + docsCatalog.getNumFound());

    return docsCatalog;
}

From source file:solrbook.ch11.solrj.cli.command.SearchCommand.java

License:Apache License

@Override
public void mainProcess(Map<String, Object> parameters) throws Exception {
    try {//from w w  w  .  j  a v  a2s .  c  o m
        /*
         * SolrQuery ?
         */
        SolrQuery solrQuery = new SolrQuery(queryString);

        /*
         * ??
         */
        solrQuery.setStart(start);
        solrQuery.setRows(rows);

        /*
         * ??????
         */
        if (StringUtils.isNotEmpty(sortField) && StringUtils.isNotEmpty(sortOrder)) {
            /*
             * ?????
             */
            solrQuery.setSort(sortField,
                    Enum.valueOf(org.apache.solr.client.solrj.SolrQuery.ORDER.class, sortOrder));
        }

        /*
         * ?????
         */
        for (String f : fieldList.split(",")) {
            if (StringUtils.isNotEmpty(f)) {
                solrQuery.addField(f.trim());
            }
        }

        /*
         * SolrClient ??
         */
        QueryResponse queryResponse = solrClient.query(solrQuery);

        /*
         * ??????? List ?
         */
        List<Map<String, Object>> documentList = new LinkedList<Map<String, Object>>();

        /*
         * ???
         */
        SolrDocumentList solrDocumentList = queryResponse.getResults();

        /*
         * ???
         */
        for (SolrDocument solrDocument : solrDocumentList) {
            /*
             * ???? Map ?
             */
            Map<String, Object> documentMap = new HashMap<String, Object>();

            /*
             * ???????
             */
            for (String fieldName : solrDocument.getFieldNames()) {
                /*
                 * ??? Map ?
                 */
                Object fieldValue = solrDocument.getFieldValue(fieldName);
                documentMap.put(fieldName, fieldValue);
            }

            /*
             * ? Map ?
             */
            documentMap.put("score", solrDocument.getFieldValue("score"));

            /*
             * ???
             */
            documentList.add(documentMap);
        }

        /*
         * ?????
         */
        response.put("QTime", queryResponse.getQTime());

        /*
         * ????
         */
        response.put("maxScore", solrDocumentList.getMaxScore());

        /*
         * ????
         */
        response.put("numFound", solrDocumentList.getNumFound());

        /*
         * ????
         */
        response.put("result", documentList);

        status = STATUS_SUCCESS;
        message = SUCCESS_MESSAGE;
    } catch (Exception e) {
        /*
         * ????
         */
        status = STATUS_ERROR;
        message = e.getMessage();
    }
}

From source file:uk.ac.ebi.phenotype.imaging.springrest.images.dao.ImagesSolrJ.java

License:Apache License

private List<String> getIds(String query, int start, int length) throws SolrServerException {

    SolrDocumentList result = runQuery(query, start, length);

    log.debug("number found=" + result.getNumFound());
    this.setNumberFound(result.getNumFound());
    if (result.size() > 0) {
        List<String> ids = new ArrayList<String>();
        for (int i = 0; i < result.size(); i++) {
            SolrDocument doc = result.get(i);
            ids.add((String) doc.getFieldValue("id"));
        }/*from  ww  w  .  j a  v a 2  s  .  c o m*/
        return ids;

    }
    return Collections.emptyList();

}

From source file:uk.ac.ebi.phenotype.service.ObservationService.java

License:Apache License

public Map<String, List<DiscreteTimePoint>> getTimeSeriesMutantData(String parameter, List<String> genes,
        ArrayList<String> strains, String[] center, String[] sex) throws SolrServerException {

    Map<String, List<DiscreteTimePoint>> finalRes = new HashMap<String, List<DiscreteTimePoint>>(); // <allele_accession,
    // timeSeriesData>

    SolrQuery query = new SolrQuery().addFilterQuery(ObservationDTO.BIOLOGICAL_SAMPLE_GROUP + ":experimental")
            .addFilterQuery(ObservationDTO.PARAMETER_STABLE_ID + ":" + parameter);

    String q = (strains.size() > 1) ? "(" + ObservationDTO.STRAIN_ACCESSION_ID + ":\""
            + StringUtils.join(strains.toArray(), "\" OR " + ObservationDTO.STRAIN_ACCESSION_ID + ":\"") + "\")"
            : ObservationDTO.STRAIN_ACCESSION_ID + ":\"" + strains.get(0) + "\"";

    if (genes != null && genes.size() > 0) {
        q += " AND (";
        q += (genes.size() > 1) ? ObservationDTO.GENE_ACCESSION_ID + ":\""
                + StringUtils.join(genes.toArray(), "\" OR " + ObservationDTO.GENE_ACCESSION_ID + ":\"") + "\""
                : ObservationDTO.GENE_ACCESSION_ID + ":\"" + genes.get(0) + "\"";
        q += ")";
    }//from   w w w. j  a  v  a2s  . c  o  m

    if (center != null && center.length > 0) {
        q += " AND (";
        q += (center.length > 1)
                ? ObservationDTO.PHENOTYPING_CENTER + ":\""
                        + StringUtils.join(center, "\" OR " + ObservationDTO.PHENOTYPING_CENTER + ":\"") + "\""
                : ObservationDTO.PHENOTYPING_CENTER + ":\"" + center[0] + "\"";
        q += ")";
    }

    if (sex != null && sex.length == 1) {
        q += " AND " + ObservationDTO.SEX + ":\"" + sex[0] + "\"";
    }

    query.setQuery(q);
    query.set("group.field", ObservationDTO.GENE_SYMBOL);
    query.set("group", true);
    query.set("fl", ObservationDTO.DATA_POINT + "," + ObservationDTO.DISCRETE_POINT);
    query.set("group.limit", 100000); // number of documents to be returned
    // per group
    query.set("group.sort", ObservationDTO.DISCRETE_POINT + " asc");
    query.setRows(10000);

    // System.out.println("+_+_+ " + solr.getBaseURL() + "/select?" +
    // query);
    List<Group> groups = solr.query(query).getGroupResponse().getValues().get(0).getValues();
    // for mutants it doesn't seem we need binning
    // groups are the alleles
    for (Group gr : groups) {
        SolrDocumentList resDocs = gr.getResult();
        DescriptiveStatistics stats = new DescriptiveStatistics();
        float discreteTime = (float) resDocs.get(0).getFieldValue(ObservationDTO.DISCRETE_POINT);
        ArrayList<DiscreteTimePoint> res = new ArrayList<DiscreteTimePoint>();
        for (int i = 0; i < resDocs.getNumFound(); i++) {
            SolrDocument doc = resDocs.get(i);
            stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            if (discreteTime != (float) doc.getFieldValue(ObservationDTO.DISCRETE_POINT)
                    || i == resDocs.getNumFound() - 1) { // we
                // are
                // at
                // the
                // end
                // of
                // the
                // document
                // list
                // add to list
                float discreteDataPoint = (float) stats.getMean();
                DiscreteTimePoint dp = new DiscreteTimePoint(discreteTime, discreteDataPoint,
                        new Float(stats.getStandardDeviation()));
                List<Float> errorPair = new ArrayList<>();
                Float lower = new Float(discreteDataPoint);
                Float higher = new Float(discreteDataPoint);
                errorPair.add(lower);
                errorPair.add(higher);
                dp.setErrorPair(errorPair);
                res.add(dp);
                // update discrete point
                discreteTime = Float.valueOf(doc.getFieldValue(ObservationDTO.DISCRETE_POINT).toString());
                // update stats
                stats = new DescriptiveStatistics();
            }
        }
        // add list
        finalRes.put(gr.getGroupValue(), res);
    }
    return finalRes;
}

From source file:uk.ac.ebi.phenotype.service.ObservationService.java

License:Apache License

public List<DiscreteTimePoint> getTimeSeriesControlData(String parameter, ArrayList<String> strains,
        String[] center, String[] sex) throws SolrServerException {

    ArrayList<DiscreteTimePoint> res = new ArrayList<DiscreteTimePoint>();
    SolrQuery query = new SolrQuery().addFilterQuery(ObservationDTO.BIOLOGICAL_SAMPLE_GROUP + ":control")
            .addFilterQuery(ObservationDTO.PARAMETER_STABLE_ID + ":" + parameter);
    String q = (strains.size() > 1) ? "(" + ObservationDTO.STRAIN_ACCESSION_ID + ":\""
            + StringUtils.join(strains.toArray(), "\" OR " + ObservationDTO.STRAIN_ACCESSION_ID + ":\"") + "\")"
            : ObservationDTO.STRAIN_ACCESSION_ID + ":\"" + strains.get(0) + "\"";

    if (center != null && center.length > 0) {
        q += " AND (";
        q += (center.length > 1)// w w  w. j  ava 2s.c  o m
                ? ObservationDTO.PHENOTYPING_CENTER + ":\""
                        + StringUtils.join(center, "\" OR " + ObservationDTO.PHENOTYPING_CENTER + ":\"") + "\""
                : ObservationDTO.PHENOTYPING_CENTER + ":\"" + center[0] + "\"";
        q += ")";
    }

    if (sex != null && sex.length == 1) {
        q += " AND " + ObservationDTO.SEX + ":\"" + sex[0] + "\"";
    }

    query.setQuery(q);
    query.set("group.field", ObservationDTO.DISCRETE_POINT);
    query.set("group", true);
    query.set("fl", ObservationDTO.DATA_POINT + "," + ObservationDTO.DISCRETE_POINT);
    query.set("group.limit", 100000); // number of documents to be returned
    // per group
    query.set("sort", ObservationDTO.DISCRETE_POINT + " asc");
    query.setRows(10000);

    // System.out.println("+_+_+ " + solr.getBaseURL() + "/select?" +
    // query);
    List<Group> groups = solr.query(query).getGroupResponse().getValues().get(0).getValues();
    boolean rounding = false;
    // decide if binning is needed i.e. is the increment points are too
    // scattered, as for calorimetry
    if (groups.size() > 30) { // arbitrary value, just piced it because it
        // seems reasonable for the size of our
        // graphs
        if (Float.valueOf(groups.get(groups.size() - 1).getGroupValue())
                - Float.valueOf(groups.get(0).getGroupValue()) <= 30) { // then
            // rounding
            // will
            // be
            // enough
            rounding = true;
        }
    }
    if (rounding) {
        int bin = Math.round(Float.valueOf(groups.get(0).getGroupValue()));
        for (Group gr : groups) {
            int discreteTime = Math.round(Float.valueOf(gr.getGroupValue()));
            // for calormetry ignore what's before -5 and after 16
            if (parameter.startsWith("IMPC_CAL") || parameter.startsWith("ESLIM_003_001")
                    || parameter.startsWith("M-G-P_003_001")) {
                if (discreteTime < -5) {
                    continue;
                } else if (discreteTime > 16) {
                    break;
                }
            }
            float sum = 0;
            SolrDocumentList resDocs = gr.getResult();
            DescriptiveStatistics stats = new DescriptiveStatistics();
            for (SolrDocument doc : resDocs) {
                sum += (float) doc.getFieldValue(ObservationDTO.DATA_POINT);
                stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            }
            if (bin < discreteTime || groups.indexOf(gr) == groups.size() - 1) { // finished
                // the
                // groups
                // of
                // filled
                // the
                // bin
                float discreteDataPoint = sum / resDocs.getNumFound();
                DiscreteTimePoint dp = new DiscreteTimePoint((float) discreteTime, discreteDataPoint,
                        new Float(stats.getStandardDeviation()));
                List<Float> errorPair = new ArrayList<>();
                double std = stats.getStandardDeviation();
                Float lower = new Float(discreteDataPoint - std);
                Float higher = new Float(discreteDataPoint + std);
                errorPair.add(lower);
                errorPair.add(higher);
                dp.setErrorPair(errorPair);
                res.add(dp);
                bin = discreteTime;
            }
        }
    } else {
        for (Group gr : groups) {
            Float discreteTime = Float.valueOf(gr.getGroupValue());
            float sum = 0;
            SolrDocumentList resDocs = gr.getResult();
            DescriptiveStatistics stats = new DescriptiveStatistics();
            for (SolrDocument doc : resDocs) {
                sum += (float) doc.getFieldValue(ObservationDTO.DATA_POINT);
                stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            }
            float discreteDataPoint = sum / resDocs.getNumFound();
            DiscreteTimePoint dp = new DiscreteTimePoint(discreteTime, discreteDataPoint,
                    new Float(stats.getStandardDeviation()));
            List<Float> errorPair = new ArrayList<>();
            double std = stats.getStandardDeviation();
            Float lower = new Float(discreteDataPoint - std);
            Float higher = new Float(discreteDataPoint + std);
            errorPair.add(lower);
            errorPair.add(higher);
            dp.setErrorPair(errorPair);
            res.add(dp);
        }
    }
    return res;
}