Example usage for org.apache.lucene.search Sort Sort

List of usage examples for org.apache.lucene.search Sort Sort

Introduction

In this page you can find the example usage for org.apache.lucene.search Sort Sort.

Prototype

public Sort(SortField... fields) 

Source Link

Document

Sets the sort to the given criteria in succession: the first SortField is checked first, but if it produces a tie, then the second SortField is used to break the tie, etc.

Usage

From source file:org.hibernate.search.test.sorting.SortingTest.java

License:LGPL

private HSQuery queryForValueNullAndSorting(String fieldName, SortField.Type sortType) {
    ExtendedSearchIntegrator integrator = factoryHolder.getSearchFactory();
    QueryBuilder queryBuilder = integrator.buildQueryBuilder().forEntity(Person.class).get();
    Query query = queryBuilder.keyword().onField(fieldName).matching(null).createQuery();

    HSQuery hsQuery = integrator.createHSQuery(query, Person.class);
    Sort sort = new Sort(new SortField(fieldName, sortType));
    hsQuery.sort(sort);/*w w w .  ja v  a2s.c  o  m*/
    return hsQuery;
}

From source file:org.hibernate.search.test.spatial.SpatialIndexingTest.java

License:Open Source License

public void testDistanceSort() throws Exception {
    POI poi = new POI(1, "Distance to 24,32 : 0", 24.0d, 32.0d, "");
    POI poi2 = new POI(2, "Distance to 24,32 : 24.45", 24.2d, 31.9d, "");
    POI poi3 = new POI(3, "Distance to 24,32 : 10.16", 24.0d, 31.9d, "");
    POI poi4 = new POI(4, "Distance to 24,32 : 15.06", 23.9d, 32.1d, "");
    POI poi5 = new POI(5, "Distance to 24,32 : 11.12", 23.9d, 32.0d, "");
    POI poi6 = new POI(6, "Distance to 24,32 : 22.24", 24.2d, 32.0d, "");

    FullTextSession fullTextSession = Search.getFullTextSession(openSession());

    Transaction tx = fullTextSession.beginTransaction();
    fullTextSession.save(poi);//from   w  w w.j  a v  a 2  s . c  om
    fullTextSession.save(poi2);
    fullTextSession.save(poi3);
    tx.commit();
    tx = fullTextSession.beginTransaction();
    fullTextSession.save(poi4);
    fullTextSession.save(poi5);
    fullTextSession.save(poi6);
    tx.commit();

    tx = fullTextSession.beginTransaction();
    double centerLatitude = 24.0d;
    double centerLongitude = 32.0d;

    final QueryBuilder builder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(POI.class)
            .get();

    org.apache.lucene.search.Query luceneQuery = builder.spatial().onCoordinates("location")
            .within(100, Unit.KM).ofLatitude(centerLatitude).andLongitude(centerLongitude).createQuery();

    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(luceneQuery, POI.class);
    Sort distanceSort = new Sort(new DistanceSortField(centerLatitude, centerLongitude, "location"));
    hibQuery.setSort(distanceSort);
    hibQuery.setProjection(FullTextQuery.THIS, FullTextQuery.SPATIAL_DISTANCE);
    hibQuery.setSpatialParameters(centerLatitude, centerLongitude, "location");
    List results = hibQuery.list();

    List<?> pois = fullTextSession.createQuery("from " + POI.class.getName()).list();
    for (Object entity : pois) {
        fullTextSession.delete(entity);
    }
    tx.commit();
    fullTextSession.close();
}

From source file:org.hibernate.search.test.spatial.SpatialIndexingTest.java

License:Open Source License

public void testNonGeoDistanceSort() throws Exception {
    NonGeoPOI poi = new NonGeoPOI(1, "Distance to 24,32 : 0", 24.0d, null, "");
    NonGeoPOI poi2 = new NonGeoPOI(2, "Distance to 24,32 : 24.45", 24.2d, 31.9d, "");
    NonGeoPOI poi3 = new NonGeoPOI(3, "Distance to 24,32 : 10.16", 24.0d, 31.9d, "");
    NonGeoPOI poi4 = new NonGeoPOI(4, "Distance to 24,32 : 15.06", 23.9d, 32.1d, "");
    NonGeoPOI poi5 = new NonGeoPOI(5, "Distance to 24,32 : 11.12", 23.9d, 32.0d, "");
    NonGeoPOI poi6 = new NonGeoPOI(6, "Distance to 24,32 : 22.24", 24.2d, 32.0d, "");

    FullTextSession fullTextSession = Search.getFullTextSession(openSession());

    Transaction tx = fullTextSession.beginTransaction();
    fullTextSession.save(poi);/* w  ww  . ja v  a 2  s  .c om*/
    fullTextSession.save(poi2);
    fullTextSession.save(poi3);
    tx.commit();
    tx = fullTextSession.beginTransaction();
    fullTextSession.save(poi4);
    fullTextSession.save(poi5);
    fullTextSession.save(poi6);
    tx.commit();

    tx = fullTextSession.beginTransaction();
    double centerLatitude = 24.0d;
    double centerLongitude = 32.0d;

    final QueryBuilder builder = fullTextSession.getSearchFactory().buildQueryBuilder()
            .forEntity(NonGeoPOI.class).get();

    org.apache.lucene.search.Query luceneQuery = builder.all().createQuery();

    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(luceneQuery, NonGeoPOI.class);
    Sort distanceSort = new Sort(new DistanceSortField(centerLatitude, centerLongitude, "location"));
    hibQuery.setSort(distanceSort);
    hibQuery.setProjection(FullTextQuery.THIS, FullTextQuery.SPATIAL_DISTANCE);
    hibQuery.setSpatialParameters(centerLatitude, centerLongitude, "location");
    List results = hibQuery.list();

    List<?> pois = fullTextSession.createQuery("from " + NonGeoPOI.class.getName()).list();
    for (Object entity : pois) {
        fullTextSession.delete(entity);
    }
    tx.commit();
    fullTextSession.close();
}

From source file:org.hibernate.search.test.spatial.SpatialSearchSortByDistanceAndPaging.java

License:Open Source License

/**
 * Search GeoEntities starting from startLat and startLon within distance
 *//*from w w w  . ja  v a 2  s .  co m*/
private List distanceSearch(double startLat, double startLon, double distance, int firstResult, int maxResult,
        boolean sortByDistance) {

    List resultList = new ArrayList();

    Session sessionHbn = openSession();
    sessionHbn.beginTransaction();

    FullTextSession fTxtSess = Search.getFullTextSession(sessionHbn);

    QueryBuilder builder = fTxtSess.getSearchFactory().buildQueryBuilder().forEntity(GeoEntity.class).get();

    org.apache.lucene.search.Query luceneQuery = builder.spatial().onDefaultCoordinates()
            .within(distance, Unit.KM).ofLatitude(startLat).andLongitude(startLon).createQuery();

    FullTextQuery hibQuery = fTxtSess.createFullTextQuery(luceneQuery, GeoEntity.class);
    hibQuery.setProjection(FullTextQuery.THIS, FullTextQuery.SPATIAL_DISTANCE);
    hibQuery.setSpatialParameters(startLat, startLon, AbstractDocumentBuilder.COORDINATES_DEFAULT_FIELD);

    if (sortByDistance) {
        Sort distanceSort = new Sort(
                new DistanceSortField(startLat, startLon, AbstractDocumentBuilder.COORDINATES_DEFAULT_FIELD));
        hibQuery.setSort(distanceSort);
    }

    hibQuery.setFirstResult(firstResult).setMaxResults(maxResult);

    List tmpList = hibQuery.list();

    // copy distance from projection to entities
    for (Object obj[] : (List<Object[]>) tmpList) {
        GeoEntity entity = (GeoEntity) obj[0];
        entity.setDistance((Double) obj[1]);
        resultList.add(entity);
    }

    sessionHbn.getTransaction().commit();
    sessionHbn.close();
    return resultList;
}

From source file:org.hibernate.search.test.spatial.SpatialSearchSortByDistanceAndPagingTest.java

License:LGPL

/**
 * Search GeoEntities starting from startLat and startLon within distance
 *//* w  w  w . j  a va 2  s  .  c  om*/
private List distanceSearch(double startLat, double startLon, double distance, int firstResult, int maxResult,
        boolean sortByDistance) {

    List resultList = new ArrayList();

    Session sessionHbn = openSession();
    sessionHbn.beginTransaction();

    FullTextSession fTxtSess = Search.getFullTextSession(sessionHbn);

    QueryBuilder builder = fTxtSess.getSearchFactory().buildQueryBuilder().forEntity(GeoEntity.class).get();

    org.apache.lucene.search.Query luceneQuery = builder.spatial().within(distance, Unit.KM)
            .ofLatitude(startLat).andLongitude(startLon).createQuery();

    FullTextQuery hibQuery = fTxtSess.createFullTextQuery(luceneQuery, GeoEntity.class);
    hibQuery.setProjection(FullTextQuery.THIS, FullTextQuery.SPATIAL_DISTANCE);
    hibQuery.setSpatialParameters(startLat, startLon, Spatial.COORDINATES_DEFAULT_FIELD);

    if (sortByDistance) {
        Sort distanceSort = new Sort(
                new DistanceSortField(startLat, startLon, Spatial.COORDINATES_DEFAULT_FIELD));
        hibQuery.setSort(distanceSort);
    }

    hibQuery.setFirstResult(firstResult).setMaxResults(maxResult);

    List tmpList = hibQuery.list();

    // copy distance from projection to entities
    for (Object obj[] : (List<Object[]>) tmpList) {
        GeoEntity entity = (GeoEntity) obj[0];
        entity.setDistance((Double) obj[1]);
        resultList.add(entity);
    }

    sessionHbn.getTransaction().commit();
    sessionHbn.close();
    return resultList;
}

From source file:org.hippoecm.repository.FacetedNavigationEngineImpl.java

License:Apache License

public Result doView(String queryName, QueryImpl initialQuery, ContextImpl contextImpl,
        List<KeyValue<String, String>> facetsQueryList, List<FacetRange> rangeQuery, QueryImpl openQuery,
        Map<String, Map<String, Count>> resultset, Map<String, String> inheritedFilter,
        HitsRequested hitsRequested) throws UnsupportedOperationException, IllegalArgumentException {
    NamespaceMappings nsMappings = getNamespaceMappings();

    IndexReader indexReader = null;//from  w  w w  .jav a 2s .c  o m
    try {
        indexReader = getIndexReader(false);

        IndexSearcher searcher = new IndexSearcher(indexReader);
        SetDocIdSetBuilder matchingDocsSetBuilder = new SetDocIdSetBuilder();

        BooleanQuery facetsQuery = new FacetsQuery(facetsQueryList, nsMappings).getQuery();
        matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(facetsQuery, indexReader));

        BooleanQuery facetRangeQuery = new FacetRangeQuery(rangeQuery, nsMappings, this).getQuery();
        matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(facetRangeQuery, indexReader));

        BooleanQuery inheritedFilterQuery = new InheritedFilterQuery(inheritedFilter, nsMappings).getQuery();
        matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(inheritedFilterQuery, indexReader));

        org.apache.lucene.search.Query initialLuceneQuery = null;
        if (initialQuery != null && initialQuery.scopes != null && initialQuery.scopes.length > 0) {
            if (initialQuery.scopes.length == 1) {
                initialLuceneQuery = new TermQuery(
                        new Term(ServicingFieldNames.HIPPO_PATH, initialQuery.scopes[0]));
            } else {
                initialLuceneQuery = new BooleanQuery(true);
                for (String scope : initialQuery.scopes) {
                    ((BooleanQuery) initialLuceneQuery)
                            .add(new TermQuery(new Term(ServicingFieldNames.HIPPO_PATH, scope)), Occur.SHOULD);
                }
            }
        }
        matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(initialLuceneQuery, indexReader));

        FacetFiltersQuery facetFiltersQuery = null;
        if (initialQuery != null && initialQuery.facetFilters != null) {
            facetFiltersQuery = new FacetFiltersQuery(initialQuery.facetFilters, nsMappings,
                    this.getTextAnalyzer(), this.getSynonymProvider());
        }

        final BooleanQuery authorizationQuery = contextImpl.getAuthorizationQuery();
        if (authorizationQuery != null) {
            final DocIdSet authorisationIdSet = contextImpl.getAuthorisationIdSet(indexReader);
            if (authorisationIdSet != null) {
                matchingDocsSetBuilder.add(authorisationIdSet);
            }
        }

        if (resultset != null) {
            // If there are more than one facet in the 'resultset' we return an empty result as this is not allowed
            if (resultset.size() > 1) {
                log.error("The resultset cannot contain multiple facets");
                return new ResultImpl(0, null);
            }

            int cardinality = 0;
            for (String namespacedFacet : resultset.keySet()) {

                // Not a search involving scoring, thus compute bitsets for facetFiltersQuery & freeSearchInjectedSort
                if (facetFiltersQuery != null) {
                    if (facetFiltersQuery.isPlainLuceneQuery()) {
                        matchingDocsSetBuilder
                                .add(filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader));
                    } else {
                        matchingDocsSetBuilder
                                .add(filterDocIdSetJackRabbitQuery(facetFiltersQuery.getQuery(), indexReader));
                    }
                }

                if (openQuery != null) {
                    QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl);
                    // open query is always a jackrabbit query
                    matchingDocsSetBuilder.add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader));
                }

                OpenBitSet matchingDocs = matchingDocsSetBuilder.toBitSet();
                cardinality = (int) matchingDocs.cardinality();
                /*
                 * Nodes not having this facet, still should be counted if they are a hit
                 * in the query without this facet. Therefor, first get the count query without
                 * FacetPropExistsQuery.
                 */
                int numHits = 0;
                if (hitsRequested.isFixedDrillPath()) {
                    // only in the case of the fixed drillpath we use the count where the facet does not need to exist
                    numHits = (int) matchingDocs.cardinality();
                }

                ParsedFacet parsedFacet;
                try {
                    parsedFacet = ParsedFacet.getInstance(namespacedFacet);
                } catch (Exception e) {
                    log.error("Error parsing facet: ", e);
                    return new ResultImpl(0, null);
                }

                String propertyName = ServicingNameFormat.getInteralPropertyPathName(nsMappings,
                        parsedFacet.getNamespacedProperty());

                /*
                 * facetPropExists: the node must have the property as facet
                 */

                matchingDocsSetBuilder.add(filterDocIdSetPlainLuceneQuery(
                        new FacetPropExistsQuery(propertyName).getQuery(), indexReader));

                matchingDocs = matchingDocsSetBuilder.toBitSet();
                cardinality = (int) matchingDocs.cardinality();
                // this method populates the facetValueCountMap for the current facet

                // index reader is instance of JackrabbitIndexReader : we need the wrapped multi-index reader as
                // cache key : since during deletes only, the backing index reader can stay the same, we
                // also need to use numDocs to be sure we get the right cached values
                Object[] keyObjects = { matchingDocs, propertyName, parsedFacet, indexReader.getCoreCacheKey(),
                        indexReader.numDocs() };
                FVCKey fvcKey = new FVCKey(keyObjects);

                Map<String, Count> facetValueCountMap = facetValueCountCache.getIfPresent(fvcKey);
                if (facetValueCountMap == null) {
                    facetValueCountMap = new HashMap<String, Count>();
                    populateFacetValueCountMap(propertyName, parsedFacet, facetValueCountMap, matchingDocs,
                            indexReader);
                    facetValueCountCache.put(fvcKey, facetValueCountMap);
                    log.debug("Caching new facet value count map");
                } else {
                    log.debug("Reusing previously cached facet value count map");
                }

                Map<String, Count> resultFacetValueCountMap = resultset.get(namespacedFacet);
                resultFacetValueCountMap.putAll(facetValueCountMap);

                // set the numHits value
                if (hitsRequested.isFixedDrillPath()) {
                    return new ResultImpl(numHits, null);
                }
            }

            return new ResultImpl(cardinality, null);

        } else {
            // resultset is null, so search for HippoNodeType.HIPPO_RESULTSET
            if (!hitsRequested.isResultRequested()) {
                // No search with SCORING involved, this everything can be done with BitSet's
                if (facetFiltersQuery != null && facetFiltersQuery.getQuery().clauses().size() > 0) {
                    matchingDocsSetBuilder
                            .add(filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader));
                }

                if (openQuery != null) {
                    QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl);
                    matchingDocsSetBuilder.add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader));
                }

                int size = (int) matchingDocsSetBuilder.toBitSet().cardinality();
                return new ResultImpl(size, null);

            } else {

                BooleanQuery searchQuery = new BooleanQuery(false);
                Sort freeSearchInjectedSort = null;
                if (facetFiltersQuery != null && facetFiltersQuery.getQuery().clauses().size() > 0) {
                    searchQuery.add(facetFiltersQuery.getQuery(), Occur.MUST);
                }

                if (openQuery != null) {
                    QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl);
                    if (queryAndSort.query != null) {
                        searchQuery.add(queryAndSort.query, Occur.MUST);
                    }
                    freeSearchInjectedSort = queryAndSort.sort;
                }

                Set<String> fieldNames = new HashSet<String>();
                fieldNames.add(FieldNames.UUID);
                FieldSelector fieldSelector = new SetBasedFieldSelector(fieldNames, new HashSet<String>());

                int fetchTotal = hitsRequested.getOffset() + hitsRequested.getLimit();
                Sort sort = null;
                if (freeSearchInjectedSort != null) {
                    // we already have a sort from the xpath or sql free search. Use this one
                    sort = freeSearchInjectedSort;
                } else if (hitsRequested.getOrderByList().size() > 0) {
                    List<Path> orderPropertiesList = new ArrayList<Path>();
                    List<Boolean> ascSpecsList = new ArrayList<Boolean>();
                    for (OrderBy orderBy : hitsRequested.getOrderByList()) {
                        try {
                            Name orderByProp = NameFactoryImpl.getInstance().create(orderBy.getName());
                            boolean isAscending = !orderBy.isDescending();
                            orderPropertiesList.add(createPath(orderByProp));
                            ascSpecsList.add(isAscending);
                        } catch (IllegalArgumentException e) {
                            log.warn("Skip property '{}' because cannot create a Name for it: {}",
                                    orderBy.getName(), e.toString());
                        }
                    }
                    if (orderPropertiesList.size() > 0) {
                        Path[] orderProperties = orderPropertiesList
                                .toArray(new Path[orderPropertiesList.size()]);
                        boolean[] ascSpecs = new boolean[ascSpecsList.size()];
                        int i = 0;
                        for (Boolean b : ascSpecsList) {
                            ascSpecs[i] = b;
                            i++;
                        }
                        sort = new Sort(createSortFields(orderProperties, ascSpecs,
                                new String[orderProperties.length]));
                    }
                }

                boolean sortScoreAscending = false;
                // if the sort is on score descending, we can set it to null as this is the default and more efficient                  
                if (sort != null && sort.getSort().length == 1
                        && sort.getSort()[0].getType() == SortField.SCORE) {

                    if (sort.getSort()[0].getReverse()) {
                        sortScoreAscending = true;
                    } else {
                        // we can skip sort as it is on score descending
                        sort = null;
                    }
                }

                TopDocs tfDocs;
                org.apache.lucene.search.Query query = searchQuery;
                if (searchQuery.clauses().size() == 0) {
                    // add a match all query
                    // searchQuery.add(new MatchAllDocsQuery(), Occur.MUST);
                    query = new MatchAllDocsQuery();
                }

                if (sort == null) {
                    // when sort == null, use this search without search as is more efficient
                    Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet());
                    tfDocs = searcher.search(query, filterToApply, fetchTotal);
                } else {
                    if (sortScoreAscending) {
                        // we need the entire searchQuery because scoring is involved
                        Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet());
                        tfDocs = searcher.search(query, filterToApply, fetchTotal, sort);
                    } else {
                        // because we have at least one explicit sort, scoring can be skipped. We can use cached bitsets combined with a match all query
                        if (facetFiltersQuery != null) {
                            matchingDocsSetBuilder.add(
                                    filterDocIdSetPlainLuceneQuery(facetFiltersQuery.getQuery(), indexReader));
                        }
                        if (openQuery != null) {
                            QueryAndSort queryAndSort = openQuery.getLuceneQueryAndSort(contextImpl);
                            matchingDocsSetBuilder
                                    .add(filterDocIdSetJackRabbitQuery(queryAndSort.query, indexReader));
                        }

                        Filter filterToApply = new DocIdSetFilter(matchingDocsSetBuilder.toBitSet());
                        // set query to MatchAllDocsQuery because we have everything as filter now
                        query = new MatchAllDocsQuery();
                        tfDocs = searcher.search(query, filterToApply, fetchTotal, sort);
                    }

                }

                ScoreDoc[] hits = tfDocs.scoreDocs;
                int position = hitsRequested.getOffset();

                // LinkedHashSet because ordering should be kept!
                Set<NodeId> nodeIdHits = new LinkedHashSet<NodeId>();
                while (position < hits.length) {
                    Document d = indexReader.document(hits[position].doc, fieldSelector);
                    Field uuidField = d.getField(FieldNames.UUID);
                    if (uuidField != null) {
                        nodeIdHits.add(NodeId.valueOf(uuidField.stringValue()));
                    }
                    position++;
                }
                return new ResultImpl(nodeIdHits.size(), nodeIdHits);
            }
        }

    } catch (IllegalNameException e) {
        log.error("Error during creating view: ", e);
    } catch (IOException e) {
        log.error("Error during creating view: ", e);
    } finally {

        if (indexReader != null) {
            try {
                // do not call indexReader.close() as ref counting is taken care of by  
                // org.apache.jackrabbit.core.query.lucene.Util#closeOrRelease
                Util.closeOrRelease(indexReader);
            } catch (IOException e) {
                log.error("Exception while closing index reader", e);
            }
        }
    }
    return new ResultImpl(0, null);
}

From source file:org.hippoecm.repository.query.lucene.ServicingSearchIndex.java

License:Apache License

/**
 * Executes the query on the search index.
 *
 * @param session         the session that executes the query.
 * @param queryImpl       the query impl.
 * @param query           the lucene query.
 * @param orderProps      name of the properties for sort order.
 * @param orderSpecs      the order specs for the sort order properties.
 *                        <code>true</code> indicates ascending order,
 *                        <code>false</code> indicates descending.
 * @param resultFetchHint a hint on how many results should be fetched.
 * @return the query hits.// ww w.jav a2s  . c  o m
 * @throws IOException if an error occurs while searching the index.
 */
public MultiColumnQueryHits executeQuery(SessionImpl session, AbstractQueryImpl queryImpl, Query query,
        Path[] orderProps, boolean[] orderSpecs, String[] orderFuncs, long resultFetchHint) throws IOException {
    checkOpen();
    Sort sort = new Sort(createSortFields(orderProps, orderSpecs, orderFuncs));
    final IndexReader reader = getIndexReader();
    // an authorizationFilter that is equal to null means: no filter for bitset
    CachingMultiReaderQueryFilter authorizationFilter = getAuthorizationFilter(session);
    final HippoIndexSearcher searcher = new HippoIndexSearcher(session, reader, getItemStateManager(),
            authorizationFilter);
    searcher.setSimilarity(getSimilarity());
    return new FilterMultiColumnQueryHits(
            searcher.execute(query, sort, resultFetchHint, QueryImpl.DEFAULT_SELECTOR_NAME)) {
        public void close() throws IOException {
            try {
                super.close();
            } finally {
                Util.closeOrRelease(reader);
            }
        }
    };
}

From source file:org.inbio.neoportal.core.dao.impl.OccurrenceDAOImpl.java

License:Open Source License

@Override
public List searchLucene(String luceneQuery, String sortField, ResultTransformer resultTransformer, int offset,
        int quantity) {
    Session session = getSessionFactory().getCurrentSession();
    FullTextSession fullTextSession = Search.getFullTextSession(session);

    org.apache.lucene.search.Query query = null;

    QueryParser parser = new QueryParser(NeoportalCoreConstants.LuceneVersion, "",
            new StandardAnalyzer(NeoportalCoreConstants.LuceneVersion));

    try {// w  w  w  . ja  v a2  s  .co  m
        query = parser.parse(luceneQuery);
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    FullTextQuery fQuery = fullTextSession.createFullTextQuery(query, OccurrenceDwc.class);

    Sort sort = new Sort(new SortField(sortField, SortField.STRING));
    fQuery.setSort(sort);

    fQuery.setResultTransformer(resultTransformer);
    fQuery.setFirstResult(offset);
    fQuery.setMaxResults(quantity);

    return fQuery.list();
}

From source file:org.inbio.neoportal.core.dao.impl.TaxonDAOImpl.java

License:Open Source License

@Override
public List<TaxonLiteCDTO> search(String luceneQuery, String sortField, int offset, int quantity) {

    Session session = getSessionFactory().getCurrentSession();
    FullTextSession fullTextSession = Search.getFullTextSession(session);

    // wrap Lucene query in a org.hibernate.Query
    QueryParser parser = new QueryParser(Version.LUCENE_33, "",
            fullTextSession.getSearchFactory().getAnalyzer(Taxon.class));

    Query query = null;/*from w w  w.j  a  v a2 s  .  c o m*/
    try {
        query = parser.parse(luceneQuery);
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    org.hibernate.search.FullTextQuery hQuery = fullTextSession.createFullTextQuery(query, Taxon.class);

    hQuery.setSort(new Sort(new SortField(sortField, SortField.STRING)));

    hQuery.setResultTransformer(new TaxonLiteTransformer());
    hQuery.setFirstResult(offset);
    hQuery.setMaxResults(quantity);

    return hQuery.list();

}

From source file:org.infinispan.query.blackbox.AbstractLocalQueryTest.java

License:Open Source License

public void testSetSort() throws ParseException {
    person2.setAge(35);/*from w w  w. j  a va 2s.  c  o  m*/
    person3.setAge(12);

    Sort sort = new Sort(new SortField("age", SortField.STRING));

    queryParser = createQueryParser("name");

    luceneQuery = queryParser.parse("Goat");
    cacheQuery = new QueryFactory(cache, qh).getQuery(luceneQuery);
    found = cacheQuery.list();

    assert found.size() == 2;

    cacheQuery.setSort(sort);

    found = cacheQuery.list();

    assert found.size() == 2;
    assert found.get(0).equals(person2);
    assert found.get(1).equals(person3);
}