Example usage for org.apache.lucene.search Sort Sort

List of usage examples for org.apache.lucene.search Sort Sort

Introduction

In this page you can find the example usage for org.apache.lucene.search Sort Sort.

Prototype

public Sort(SortField... fields) 

Source Link

Document

Sets the sort to the given criteria in succession: the first SortField is checked first, but if it produces a tie, then the second SortField is used to break the tie, etc.

Usage

From source file:nl.strohalm.cyclos.dao.ads.AdDAOImpl.java

License:Open Source License

private FullTextSearchElements prepare(final FullTextAdQuery adQuery) {
    final String keywords = adQuery.getKeywords();
    final Calendar today = DateHelper.truncate(Calendar.getInstance());
    Sort sort = null;/*w w  w .  j  ava 2s  .  c  om*/
    Query query;
    Analyzer analyzer = adQuery.getAnalyzer();
    if (keywords == null) {
        query = new MatchAllDocsQuery();
        // When not using keywords, return newer first
        sort = new Sort(new SortField("baseDate", SortField.STRING, true));
    } else {
        try {
            query = getQueryParser(analyzer).parse(keywords);
        } catch (final ParseException e) {
            throw new QueryParseException(e);
        }
    }

    final Filters filters = new Filters();
    filters.addTerms("id", adQuery.getId());
    filters.addTerms("membersNotified", adQuery.getMembersNotified());
    filters.addTerms("category", adQuery.getCategoriesIds());
    filters.addTerms("currency", adQuery.getCurrency());
    filters.addTerms("externalPublication", adQuery.getExternalPublication());
    filters.addRange("price", adQuery.getInitialPrice(), adQuery.getFinalPrice());
    final TimePeriod since = adQuery.getSince();
    if (since != null && since.isValid()) {
        final Calendar sinceDate = since.remove(today);
        filters.addRange("publicationBegin", sinceDate, null);
    }
    filters.addPeriod("publicationBegin", adQuery.getPeriod());
    filters.addTerms("owner", adQuery.getOwner());
    filters.addTerms("owner.group", adQuery.getGroups());
    filters.addTerms("tradeType", adQuery.getTradeType());
    if (CollectionUtils.isNotEmpty(adQuery.getAdValues())) {
        for (final AdCustomFieldValue fieldValue : adQuery.getAdValues()) {
            addCustomField(filters, analyzer, fieldValue);
        }
    }
    if (CollectionUtils.isNotEmpty(adQuery.getMemberValues())) {
        for (final MemberCustomFieldValue fieldValue : adQuery.getMemberValues()) {
            addCustomField(filters, analyzer, fieldValue, "owner.customValues.%s");
        }
    }
    if (adQuery.isWithImagesOnly()) {
        filters.addTerms("hasImages", true);
    }
    // Status
    final Status status = adQuery.getStatus();
    if (status != null) {
        final Filter isPermanent = Filters.terms("permanent", true);
        final Filter isNotPermanent = Filters.terms("permanent", false);
        Filter endRange;
        Filter beginRange;
        switch (status) {
        case PERMANENT:
            // permanent = true
            filters.add(isPermanent);
            break;
        case ACTIVE:
            // permanent = true or (end > today and begin <= today) // neither begin / end are null
            beginRange = Filters.range("publicationBegin", null, today);
            endRange = Filters.range("publicationEnd", today, null);
            filters.add(Filters.or(isPermanent, Filters.and(endRange, beginRange)));
            break;
        case SCHEDULED:
            // permanent = false and begin >= today
            beginRange = Filters.range("publicationBegin", today, null);
            filters.add(Filters.and(isNotPermanent, beginRange));
            break;
        case EXPIRED:
            // permanent = false and end <= today
            endRange = Filters.range("publicationEnd", null, today, false, false);
            filters.add(Filters.and(isNotPermanent, endRange));
            break;
        }
    }
    return new FullTextSearchElements(query, filters, sort);
}

From source file:nl.strohalm.cyclos.dao.members.ElementDAOImpl.java

License:Open Source License

public List<? extends Element> fullTextSearch(final FullTextElementQuery elementQuery) {
    final String keywords = StringUtils.trimToNull(elementQuery.getKeywords());
    final Nature nature = elementQuery.getNature();
    // We cannot search on a null nature
    if (nature == null) {
        return Collections.emptyList();
    }//  w w w.j a  v  a 2  s.  co  m
    // When searching by keywords, use the full-text query
    Analyzer analyzer = elementQuery.getAnalyzer();
    Query query;
    Sort sort = null;
    if (keywords == null) {
        query = new MatchAllDocsQuery();
        sort = new Sort(new SortField("creationDate", SortField.STRING, true));
    } else {
        try {
            query = keywords == null ? new MatchAllDocsQuery() : getQueryParser(analyzer).parse(keywords);
        } catch (final ParseException e) {
            throw new QueryParseException(e);
        }
    }
    final Filters filters = new Filters();
    filters.addTerms("active", elementQuery.getEnabled());
    filters.addTerms("group", elementQuery.getGroups());
    final Collection<? extends CustomFieldValue> customValues = elementQuery.getCustomValues();
    if (CollectionUtils.isNotEmpty(customValues)) {
        for (final CustomFieldValue fieldValue : customValues) {
            addCustomField(filters, analyzer, fieldValue);
        }
    }
    if (CollectionUtils.isNotEmpty(elementQuery.getExcludeElements())) {
        Collection<Long> excludeIds = EntityHelper.toIdsAsList(elementQuery.getExcludeElements());
        filters.add(Filters.andNot(Filters.terms("id", excludeIds)));
    }

    if (elementQuery instanceof FullTextMemberQuery) {
        final FullTextMemberQuery memberQuery = (FullTextMemberQuery) elementQuery;
        filters.addPeriod("activationDate", memberQuery.getActivationPeriod());
        filters.addTerms("broker", memberQuery.getBroker());
        if (memberQuery.isWithImagesOnly()) {
            filters.addTerms("hasImages", true);
        }
        sort = decideSorting(memberQuery);

    } else if (elementQuery instanceof FullTextOperatorQuery) {
        final FullTextOperatorQuery operatorQuery = (FullTextOperatorQuery) elementQuery;
        final Member member = operatorQuery.getMember();
        if (member == null) {
            // Cannot search operators without a member
            return Collections.emptyList();
        }
        filters.addTerms("member", member);
    } else if (elementQuery instanceof FullTextAdminQuery) {
        sort = decideSorting(elementQuery);
    }
    return list(nature.getElementClass(), elementQuery, query, filters, sort);
}

From source file:nl.strohalm.cyclos.dao.members.ElementDAOImpl.java

License:Open Source License

private Sort decideSorting(final FullTextElementQuery elementQuery) {
    Sort sort;//from  w  w  w . j  a v  a 2 s .  c om
    // sorting
    LocalSettings localSettings = settingsService.getLocalSettings();
    SortOrder memberSortOrder = localSettings.getMemberSortOrder();
    if (elementQuery instanceof FullTextMemberQuery) {
        FullTextMemberQuery memberQuery = (FullTextMemberQuery) elementQuery;
        if (memberQuery.getMemberSortOrder() != null) {
            memberSortOrder = memberQuery.getMemberSortOrder();
        }
    }
    if (memberSortOrder == SortOrder.CHRONOLOGICAL) {
        sort = new Sort(new SortField("creationDate", SortField.STRING, true));
    } else {
        if (elementQuery.getNameDisplay() == MemberResultDisplay.NAME) {
            sort = new Sort(new SortField("nameForSort", SortField.STRING));
        } else {
            sort = new Sort(new SortField("usernameForSort", SortField.STRING));
        }
    }
    return sort;
}

From source file:nl.strohalm.cyclos.dao.members.MemberRecordDAOImpl.java

License:Open Source License

public List<MemberRecord> fullTextSearch(final FullTextMemberRecordQuery recordQuery) {
    final String keywords = recordQuery.getKeywords();
    Analyzer analyzer = recordQuery.getAnalyzer();
    Query query;/*ww w  . j  a  v  a2  s  .  c o m*/
    Sort sort = null;
    if (keywords == null) {
        query = new MatchAllDocsQuery();
        sort = new Sort(new SortField("date", SortField.STRING, true));
    } else {
        try {
            query = getQueryParser(analyzer).parse(keywords);
        } catch (final ParseException e) {
            throw new QueryParseException(e);
        }
    }
    final Filters filters = new Filters();
    filters.addTerms("type", recordQuery.getType());
    filters.addTerms("element", recordQuery.getElement());
    filters.addTerms("element.group", recordQuery.getGroups());
    filters.addTerms("element.broker", recordQuery.getBroker());
    filters.addTerms("by", recordQuery.getBy());
    filters.addPeriod("date", recordQuery.getPeriod());

    // Custom fields
    if (CollectionUtils.isNotEmpty(recordQuery.getCustomValues())) {
        for (final MemberRecordCustomFieldValue fieldValue : recordQuery.getCustomValues()) {
            addCustomField(filters, analyzer, fieldValue);
        }
    }
    return list(MemberRecord.class, recordQuery, query, filters, sort);
}

From source file:ntu.searchengine.Searcher.java

public ArrayList<SearchResultModel> search(String queryString) throws ParseException, IOException {
    parser.prepare(queryString);/*from w  ww . j a va  2s .  c  om*/
    Query query = parser.parse(queryString);
    TopDocs hits = null;
    if (Main.sortResultByDocId) { //If we sort the documents the score field will be NaN
        //Sort by doc number
        final Sort sort = new Sort(new SortedNumericSortField(FieldConstants.DOCNAME, SortField.Type.INT));
        hits = is.search(query, Main.topNResult, sort);
    } else {
        hits = is.search(query, Main.topNResult);
    }

    System.out.println("Result for query: " + queryString);
    ArrayList<SearchResultModel> result = getResult(hits, query);
    return ScoringFiltering.filterScores(result);
}

From source file:org.ala.lucene.Autocompleter.java

License:Open Source License

public List<String> suggestTermsFor(String term, Integer maxHits) throws IOException {
    // get the top 5 terms for query
    Query query = new TermQuery(new Term(GRAMMED_WORDS_FIELD, ClientUtils.escapeQueryChars(term)));
    SortField sf = new SortField(COUNT_FIELD, SortField.Type.INT, true);
    Sort sort = new Sort(sf);

    TopDocs docs = autoCompleteSearcher.search(query, null, maxHits, sort);
    List<String> suggestions = new ArrayList<String>();
    for (ScoreDoc doc : docs.scoreDocs) {
        suggestions.add(autoCompleteReader.document(doc.doc).get(SOURCE_WORD_FIELD));
    }/*from   www.ja  va  2  s .  c o  m*/

    return suggestions;
}

From source file:org.alfresco.repo.search.impl.lucene.LuceneAlfrescoLuceneQueryLanguage.java

License:Open Source License

public ResultSet executeQuery(SearchParameters searchParameters, ADMLuceneSearcherImpl admLuceneSearcher) {
    try {/*www. j a v  a  2 s  .  c o  m*/

        Operator defaultOperator;
        if (searchParameters.getDefaultOperator() == SearchParameters.AND) {
            defaultOperator = LuceneQueryParser.AND_OPERATOR;
        } else {
            defaultOperator = LuceneQueryParser.OR_OPERATOR;
        }

        ClosingIndexSearcher searcher = admLuceneSearcher.getClosingIndexSearcher();
        if (searcher == null) {
            // no index return an empty result set
            return new EmptyResultSet();
        }
        Query query = LuceneQueryParser.parse(searchParameters.getQuery(),
                searchParameters.getDefaultFieldName(),
                new LuceneAnalyser(admLuceneSearcher.getDictionaryService(),
                        searchParameters.getMlAnalaysisMode() == null
                                ? admLuceneSearcher.getLuceneConfig().getDefaultMLSearchAnalysisMode()
                                : searchParameters.getMlAnalaysisMode()),
                admLuceneSearcher.getNamespacePrefixResolver(), admLuceneSearcher.getDictionaryService(),
                admLuceneSearcher.getTenantService(), defaultOperator, searchParameters,
                admLuceneSearcher.getLuceneConfig().getDefaultMLSearchAnalysisMode(),
                searcher.getIndexReader());
        if (s_logger.isDebugEnabled()) {
            s_logger.debug("Query is " + query.toString());
        }

        Hits hits;

        boolean requiresDateTimePostSort = false;
        SortField[] fields = new SortField[searchParameters.getSortDefinitions().size()];

        if (searchParameters.getSortDefinitions().size() > 0) {
            int index = 0;
            for (SearchParameters.SortDefinition sd : searchParameters.getSortDefinitions()) {
                switch (sd.getSortType()) {
                case FIELD:
                    Locale sortLocale = searchParameters.getSortLocale();
                    String field = sd.getField();
                    if (field.startsWith("@")) {
                        field = admLuceneSearcher.expandAttributeFieldName(field);
                        PropertyDefinition propertyDef = admLuceneSearcher.getDictionaryService()
                                .getProperty(QName.createQName(field.substring(1)));

                        if (propertyDef == null) {
                            if (field.endsWith(".size")) {
                                propertyDef = admLuceneSearcher.getDictionaryService()
                                        .getProperty(QName.createQName(field.substring(1, field.length() - 5)));
                                if (!propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT)) {
                                    throw new SearcherException(
                                            "Order for .size only supported on content properties");
                                }
                            } else if (field.endsWith(".mimetype")) {
                                propertyDef = admLuceneSearcher.getDictionaryService()
                                        .getProperty(QName.createQName(field.substring(1, field.length() - 9)));
                                if (!propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT)) {
                                    throw new SearcherException(
                                            "Order for .mimetype only supported on content properties");
                                }
                            } else {
                                // nothing
                            }
                        } else {
                            if (propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT)) {
                                throw new SearcherException(
                                        "Order on content properties is not curently supported");
                            }

                            else if (propertyDef.getDataType().getName().equals(DataTypeDefinition.TEXT)) {
                                if (propertyDef.getIndexTokenisationMode() == IndexTokenisationMode.FALSE) {
                                    // use field as is
                                } else {

                                    String noLocalField = field + ".no_locale";
                                    for (Object current : searcher.getIndexReader()
                                            .getFieldNames(FieldOption.INDEXED)) {
                                        String currentString = (String) current;
                                        if (currentString.equals(noLocalField)) {
                                            field = noLocalField;
                                        }
                                    }

                                    if (!field.endsWith(".no_locale")) {
                                        field = admLuceneSearcher.findSortField(searchParameters, searcher,
                                                field, sortLocale);
                                    }
                                }
                            }

                            else if (propertyDef.getDataType().getName().equals(DataTypeDefinition.MLTEXT)) {

                                field = admLuceneSearcher.findSortField(searchParameters, searcher, field,
                                        sortLocale);

                            } else if (propertyDef.getDataType().getName()
                                    .equals(DataTypeDefinition.DATETIME)) {
                                DataTypeDefinition dataType = propertyDef.getDataType();
                                String analyserClassName = propertyDef.resolveAnalyserClassName();
                                if (analyserClassName.equals(DateTimeAnalyser.class.getCanonicalName())) {
                                    switch (propertyDef.getIndexTokenisationMode()) {
                                    case TRUE:
                                        requiresDateTimePostSort = true;
                                        break;
                                    case BOTH:
                                        field = field + ".sort";
                                        break;
                                    case FALSE:
                                        // Should be able to sort on actual field OK
                                        break;
                                    }
                                } else {
                                    requiresDateTimePostSort = true;
                                }
                            }
                        }
                    }

                    if (LuceneUtils.fieldHasTerm(searcher.getReader(), field)) {
                        fields[index++] = new SortField(field, sortLocale, !sd.isAscending());
                    } else {
                        fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
                    }
                    break;
                case DOCUMENT:
                    fields[index++] = new SortField(null, SortField.DOC, !sd.isAscending());
                    break;
                case SCORE:
                    // Score is naturally high to low -ie desc
                    fields[index++] = new SortField(null, SortField.SCORE, sd.isAscending());
                    break;
                }

            }
        }

        hits = searcher.search(query);

        boolean postSort = false;
        ;
        if (fields.length > 0) {
            postSort = searchParameters.usePostSort(hits.length(),
                    admLuceneSearcher.getLuceneConfig().getUseInMemorySort(),
                    admLuceneSearcher.getLuceneConfig().getMaxRawResultSetSizeForInMemorySort());
            if (postSort == false) {
                hits = searcher.search(query, new Sort(fields));
            }
        }

        ResultSet answer;
        ResultSet result = new LuceneResultSet(hits, searcher, admLuceneSearcher.getNodeService(),
                admLuceneSearcher.getTenantService(), searchParameters, admLuceneSearcher.getLuceneConfig());
        if (postSort
                || (admLuceneSearcher.getLuceneConfig().getPostSortDateTime() && requiresDateTimePostSort)) {
            ResultSet sorted = new SortedResultSet(result, admLuceneSearcher.getNodeService(),
                    searchParameters.getSortDefinitions(), admLuceneSearcher.getNamespacePrefixResolver(),
                    admLuceneSearcher.getDictionaryService(), searchParameters.getSortLocale());
            answer = sorted;
        } else {
            answer = result;
        }
        ResultSet rs = new PagingLuceneResultSet(answer, searchParameters, admLuceneSearcher.getNodeService());
        return rs;
    } catch (ParseException e) {
        throw new SearcherException("Failed to parse query: " + searchParameters.getQuery(), e);
    } catch (IOException e) {
        throw new SearcherException("IO exception during search", e);
    }
}

From source file:org.alfresco.repo.search.impl.querymodel.impl.lucene.LegacyLuceneQueryParserAdaptor.java

License:Open Source License

@Override
public Sort buildSort(List<Ordering> orderings, FunctionEvaluationContext functionContext)
        throws ParseException {
    int index = 0;
    SortField[] fields = new SortField[orderings.size()];

    for (Ordering ordering : orderings) {
        if (ordering.getColumn().getFunction().getName().equals(PropertyAccessor.NAME)) {
            PropertyArgument property = (PropertyArgument) ordering.getColumn().getFunctionArguments()
                    .get(PropertyAccessor.ARG_PROPERTY);

            if (property == null) {
                throw new IllegalStateException();
            }//from  w w w.  j a v a  2  s . c  o  m

            String propertyName = property.getPropertyName();

            String luceneField = functionContext.getLuceneSortField(this, propertyName);

            if (luceneField != null) {
                if (LuceneUtils.fieldHasTerm(lqp.getIndexReader(), luceneField)) {
                    Locale locale = this.getSearchParameters().getSortLocale();
                    fields[index++] = new SortField(luceneField, locale,
                            (ordering.getOrder() == Order.DESCENDING));
                } else {
                    fields[index++] = new SortField(null, SortField.DOC,
                            (ordering.getOrder() == Order.DESCENDING));
                }
            } else {
                throw new IllegalStateException();
            }
        } else if (ordering.getColumn().getFunction().getName().equals(Score.NAME)) {
            fields[index++] = new SortField(null, SortField.SCORE, !(ordering.getOrder() == Order.DESCENDING));
        } else {
            throw new IllegalStateException();
        }

    }

    return new Sort(fields);
}

From source file:org.alfresco.solr.SolrInformationServer.java

License:Open Source License

@Override
public List<TenantAclIdDbId> getDocsWithUncleanContent(int start, int rows) throws IOException {
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {// w w w .j  av  a2  s .c  o  m
        List<TenantAclIdDbId> docIds = new ArrayList<>();
        refCounted = this.core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();

        /*
        *  Below is the code for purging the cleanContentCache.
        *  The cleanContentCache is an in-memory LRU cache of the transactions that have already
        *  had their content fetched. This is needed because the ContentTracker does not have an up-to-date
        *  snapshot of the index to determine which nodes are marked as dirty/new. The cleanContentCache is used
        *  to filter out nodes that belong to transactions that have already been processed, which stops them from
        *  being re-processed.
        *
        *  The cleanContentCache needs to be purged periodically to support retrying of failed content fetches.
        *  This is because fetches for individual nodes within the transaction may have failed, but the transaction will still be in the
        *  cleanContentCache, which prevents it from being retried.
        *
        *  Once a transaction is purged from the cleanContentCache it will be retried automatically if it is marked dirty/new
        *  in current snapshot of the index.
        *
        *  The code below runs every two minutes and purges transactions from the
        *  cleanContentCache that is more then 20 minutes old.
        *
        */
        long purgeTime = System.currentTimeMillis();
        if (purgeTime - cleanContentLastPurged > 120000) {
            Iterator<Entry<Long, Long>> entries = cleanContentCache.entrySet().iterator();
            while (entries.hasNext()) {
                Entry<Long, Long> entry = entries.next();
                long txnTime = entry.getValue();
                if (purgeTime - txnTime > 1200000) {
                    //Purge the clean content cache of records more then 20 minutes old.
                    entries.remove();
                }
            }
            cleanContentLastPurged = purgeTime;
        }

        long txnFloor;
        Sort sort = new Sort(new SortField(FIELD_INTXID, SortField.Type.LONG));
        sort = sort.rewrite(searcher);
        TopFieldCollector collector = TopFieldCollector.create(sort, 1, null, false, false, false);

        DelegatingCollector delegatingCollector = new TxnCacheFilter(cleanContentCache); //Filter transactions that have already been processed.
        delegatingCollector.setLastDelegate(collector);
        searcher.search(dirtyOrNewContentQuery(), delegatingCollector);

        if (collector.getTotalHits() == 0) {
            return docIds;
        }

        ScoreDoc[] scoreDocs = collector.topDocs().scoreDocs;
        List<LeafReaderContext> leaves = searcher.getTopReaderContext().leaves();
        int index = ReaderUtil.subIndex(scoreDocs[0].doc, leaves);
        LeafReaderContext context = leaves.get(index);
        NumericDocValues longs = context.reader().getNumericDocValues(FIELD_INTXID);
        txnFloor = longs.get(scoreDocs[0].doc - context.docBase);

        //Find the next N transactions
        //The TxnCollector collects the transaction ids from the matching documents
        //The txnIds are limited to a range >= the txnFloor and < an arbitrary transaction ceiling.
        TxnCollector txnCollector = new TxnCollector(txnFloor);
        searcher.search(dirtyOrNewContentQuery(), txnCollector);
        LongHashSet txnSet = txnCollector.getTxnSet();

        if (txnSet.size() == 0) {
            //This should really never be the case, at a minimum the transaction floor should be collected.
            return docIds;
        }

        FieldType fieldType = searcher.getSchema().getField(FIELD_INTXID).getType();
        BooleanQuery.Builder builder = new BooleanQuery.Builder();

        for (LongCursor cursor : txnSet) {
            long txnID = cursor.value;
            //Build up the query for the filter of transactions we need to pull the dirty content for.
            TermQuery txnIDQuery = new TermQuery(
                    new Term(FIELD_INTXID, fieldType.readableToIndexed(Long.toString(txnID))));
            builder.add(new BooleanClause(txnIDQuery, BooleanClause.Occur.SHOULD));
        }

        BooleanQuery txnFilterQuery = builder.build();

        //Get the docs with dirty content for the transactions gathered above.
        DocListCollector docListCollector = new DocListCollector();
        BooleanQuery.Builder builder2 = new BooleanQuery.Builder();

        builder2.add(dirtyOrNewContentQuery(), BooleanClause.Occur.MUST);
        builder2.add(new QueryWrapperFilter(txnFilterQuery), BooleanClause.Occur.MUST);

        searcher.search(builder2.build(), docListCollector);
        IntArrayList docList = docListCollector.getDocs();
        int size = docList.size();

        List<Long> processedTxns = new ArrayList<>();
        for (int i = 0; i < size; ++i) {
            int doc = docList.get(i);
            Document document = searcher.doc(doc, REQUEST_ONLY_ID_FIELD);
            index = ReaderUtil.subIndex(doc, leaves);
            context = leaves.get(index);
            longs = context.reader().getNumericDocValues(FIELD_INTXID);

            long txnId = longs.get(doc - context.docBase);

            if (!cleanContentCache.containsKey(txnId)) {
                processedTxns.add(txnId);
                IndexableField id = document.getField(FIELD_SOLR4_ID);
                String idString = id.stringValue();
                TenantAclIdDbId tenantAndDbId = AlfrescoSolrDataModel.decodeNodeDocumentId(idString);
                docIds.add(tenantAndDbId);
            }
        }

        long txnTime = System.currentTimeMillis();

        for (Long l : processedTxns) {
            //Save the indexVersion so we know when we can clean out this entry
            cleanContentCache.put(l, txnTime);
        }

        return docIds;
    } finally {
        ofNullable(refCounted).ifPresent(RefCounted::decref);
    }
}

From source file:org.alfresco.solr.SolrInformationServer.java

License:Open Source License

@Override
public List<Transaction> getCascades(int num) throws IOException {
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {//  w w w  .ja  v  a 2 s . c  o  m
        refCounted = this.core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();

        Collector collector;

        TopFieldCollector topFieldCollector = TopFieldCollector.create(
                new Sort(new SortField(FIELD_TXID, SortField.Type.LONG)), num, null, false, false, false);

        collector = topFieldCollector;

        LegacyNumericRangeQuery q = LegacyNumericRangeQuery.newIntRange(FIELD_CASCADE_FLAG, 1, 1, true, true);
        DelegatingCollector delegatingCollector = new TxnCacheFilter(cleanCascadeCache);

        delegatingCollector.setLastDelegate(collector);
        collector = delegatingCollector;

        searcher.search(q, collector);
        ScoreDoc[] scoreDocs = topFieldCollector.topDocs().scoreDocs;

        Set<String> fields = new HashSet<>();
        fields.add(FIELD_S_TXID);
        fields.add(FIELD_S_TXCOMMITTIME);

        List<Transaction> transactions = new ArrayList<>(scoreDocs.length);

        for (ScoreDoc scoreDoc : scoreDocs) {
            Transaction transaction = new Transaction();
            Document doc = searcher.doc(scoreDoc.doc, fields);

            IndexableField txID = doc.getField(FIELD_S_TXID);
            long txnID = txID.numericValue().longValue();
            cleanCascadeCache.put(txnID, null);
            transaction.setId(txnID);

            IndexableField txnCommitTime = doc.getField(FIELD_S_TXCOMMITTIME);
            transaction.setCommitTimeMs(txnCommitTime.numericValue().longValue());

            transactions.add(transaction);
        }

        return transactions;
    } finally {
        ofNullable(refCounted).ifPresent(RefCounted::decref);
    }
}