Example usage for java.util LinkedList size

List of usage examples for java.util LinkedList size

Introduction

In this page you can find the example usage for java.util LinkedList size.

Prototype

int size

To view the source code for java.util LinkedList size.

Click Source Link

Usage

From source file:com.projity.grouping.core.hierarchy.MutableNodeHierarchy.java

private List internalIndent(List nodes, int deltaLevel, int actionType) {
    if (deltaLevel != 1 && deltaLevel != -1)
        return null;

    //Indent only parents
    LinkedList nodesToChange = new LinkedList();
    HierarchyUtils.extractParents(nodes, nodesToChange);

    List modifiedVoids = new ArrayList();

    //exclude Assignments and VoidNodes
    if (deltaLevel > 0) {
        for (ListIterator i = nodesToChange.listIterator(); i.hasNext();) {
            if (!internalIndent((Node) i.next(), deltaLevel, actionType & NodeModel.UNDO, modifiedVoids))
                i.remove();/*from   w  ww .j  a va  2 s . com*/
            for (Iterator j = modifiedVoids.iterator(); j.hasNext();) {
                i.add(j.next());
            }
            modifiedVoids.clear();
        }
    } else {
        for (ListIterator i = nodesToChange.listIterator(nodesToChange.size()); i.hasPrevious();) {
            if (!internalIndent((Node) i.previous(), deltaLevel, actionType & NodeModel.UNDO, modifiedVoids))
                i.remove();
            for (Iterator j = modifiedVoids.iterator(); j.hasNext();) {
                i.add(j.next());
            }
            modifiedVoids.clear();
        }
    }

    if (isEvent(actionType) && nodesToChange.size() > 0)
        fireNodesChanged(this, nodesToChange.toArray());
    return nodesToChange;
}

From source file:net.semanticmetadata.lire.imageanalysis.bovw.LocalFeatureHistogramBuilderKmeansPlusPlus.java

/**
 * Uses an existing index, where each and every document should have a set of local features. A number of
 * random images (numDocsForVocabulary) is selected and clustered to get a vocabulary of visual words
 * (the cluster means). For all images a histogram on the visual words is created and added to the documents.
 * Pre-existing histograms are deleted, so this method can be used for re-indexing.
 *
 * @throws java.io.IOException// w  w  w  . j  a v a  2  s .  co  m
 */
public void index() throws IOException {
    df.setMaximumFractionDigits(3);
    // find the documents for building the vocabulary:
    HashSet<Integer> docIDs = selectVocabularyDocs();
    System.out.println("Using " + docIDs.size() + " documents to build the vocabulary.");
    KMeansPlusPlusClusterer kpp = new KMeansPlusPlusClusterer(numClusters, 15);
    // fill the KMeans object:
    LinkedList<DoublePoint> features = new LinkedList<DoublePoint>();
    // Needed for check whether the document is deleted.
    Bits liveDocs = MultiFields.getLiveDocs(reader);
    for (Iterator<Integer> iterator = docIDs.iterator(); iterator.hasNext();) {
        int nextDoc = iterator.next();
        if (reader.hasDeletions() && !liveDocs.get(nextDoc))
            continue; // if it is deleted, just ignore it.
        Document d = reader.document(nextDoc);
        //            features.clear();
        IndexableField[] fields = d.getFields(localFeatureFieldName);
        String file = d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0];
        for (int j = 0; j < fields.length; j++) {
            LireFeature f = getFeatureInstance();
            f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset,
                    fields[j].binaryValue().length);
            // copy the data over to new array ...
            double[] feat = new double[f.getDoubleHistogram().length];
            System.arraycopy(f.getDoubleHistogram(), 0, feat, 0, feat.length);
            features.add(new DoublePoint(f.getDoubleHistogram()));
        }
    }
    if (features.size() < numClusters) {
        // this cannot work. You need more data points than clusters.
        throw new UnsupportedOperationException("Only " + features.size() + " features found to cluster in "
                + numClusters + ". Try to use less clusters or more images.");
    }
    // do the clustering:
    System.out.println("Number of local features: " + df.format(features.size()));
    System.out.println("Starting clustering ...");
    List<CentroidCluster<DoublePoint>> clusterList = kpp.cluster(features);
    // TODO: Serializing clusters to a file on the disk ...
    System.out.println("Clustering finished, " + clusterList.size() + " clusters found");
    clusters = new LinkedList<double[]>();
    for (Iterator<CentroidCluster<DoublePoint>> iterator = clusterList.iterator(); iterator.hasNext();) {
        CentroidCluster<DoublePoint> centroidCluster = iterator.next();
        clusters.add(centroidCluster.getCenter().getPoint());
    }
    System.out.println("Creating histograms ...");
    int[] tmpHist = new int[numClusters];
    IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true,
            LuceneUtils.AnalyzerType.WhitespaceAnalyzer, 256d);

    // careful: copy reader to RAM for faster access when reading ...
    //        reader = IndexReader.open(new RAMDirectory(reader.directory()), true);
    LireFeature f = getFeatureInstance();
    for (int i = 0; i < reader.maxDoc(); i++) {
        try {
            if (reader.hasDeletions() && !liveDocs.get(i))
                continue;
            for (int j = 0; j < tmpHist.length; j++) {
                tmpHist[j] = 0;
            }
            Document d = reader.document(i);
            IndexableField[] fields = d.getFields(localFeatureFieldName);
            // remove the fields if they are already there ...
            d.removeField(visualWordsFieldName);
            d.removeField(localFeatureHistFieldName);

            // find the appropriate cluster for each feature:
            for (int j = 0; j < fields.length; j++) {
                f.setByteArrayRepresentation(fields[j].binaryValue().bytes, fields[j].binaryValue().offset,
                        fields[j].binaryValue().length);
                tmpHist[clusterForFeature(f, clusters)]++;
            }
            //                System.out.println(Arrays.toString(tmpHist));
            d.add(new StoredField(localFeatureHistFieldName,
                    SerializationUtils.toByteArray(normalize(tmpHist))));
            quantize(tmpHist);
            d.add(new TextField(visualWordsFieldName, arrayToVisualWordString(tmpHist), Field.Store.YES));

            // remove local features to save some space if requested:
            if (DELETE_LOCAL_FEATURES) {
                d.removeFields(localFeatureFieldName);
            }
            // now write the new one. we use the identifier to update ;)
            iw.updateDocument(new Term(DocumentBuilder.FIELD_NAME_IDENTIFIER,
                    d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]), d);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    iw.commit();
    // this one does the "old" commit(), it removes the deleted local features.
    iw.forceMerge(1);
    iw.close();
    System.out.println("Finished.");
}

From source file:com.vladium.emma.report.html.ReportGenerator.java

private IItem[] getParentPath(IItem item) {
    final LinkedList /* IItem */ _result = new LinkedList();

    for (; item != null; item = item.getParent()) {
        _result.add(item);/* ww w.  ja  v  a 2  s.c om*/
    }

    final IItem[] result = new IItem[_result.size()];
    int j = result.length - 1;
    for (Iterator i = _result.iterator(); i.hasNext(); --j) {
        result[j] = (IItem) i.next();
    }

    return result;
}

From source file:com.baidu.rigel.biplatform.tesseract.isservice.index.service.impl.IndexServiceImpl.java

@Override
public boolean initMiniCubeIndex(List<Cube> cubeList, DataSourceInfo dataSourceInfo, boolean indexAsap,
        boolean limited) throws IndexAndSearchException {
    /**// www  .j av a 2  s . c  o  m
     * MiniCubeConnection.publishCubes(List<String> cubes, DataSourceInfo
     * dataSourceInfo);??
     */
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "initMiniCubeIndex",
            "[cubeList:" + cubeList + "][dataSourceInfo:" + dataSourceInfo + "][indexAsap:" + indexAsap
                    + "][limited:" + limited + "]"));

    // step 1 process cubeList and fill indexMeta infomation
    List<IndexMeta> idxMetaList = this.indexMetaService.initMiniCubeIndexMeta(cubeList, dataSourceInfo);

    if (idxMetaList.size() == 0) {
        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS, "initMiniCubeIndex",
                "[cubeList:" + cubeList + "][dataSourceInfo:" + dataSourceInfo + "][indexAsap:" + indexAsap
                        + "][limited:" + limited + "]",
                "Init MiniCube IndexMeta failed"));
        return false;
    } else {
        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS_NO_PARAM, "initMiniCubeIndex",
                "Success init " + idxMetaList.size() + " MiniCube"));
    }

    // step 2 merge indexMeta with exist indexMetas and update indexMeta
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS_NO_PARAM, "initMiniCubeIndex",
            "Merging IndexMeta with exist indexMetas"));

    LinkedList<IndexMeta> idxMetaListForIndex = new LinkedList<IndexMeta>();
    for (IndexMeta idxMeta : idxMetaList) {
        idxMeta = this.indexMetaService.mergeIndexMeta(idxMeta);

        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS_NO_PARAM, "initMiniCubeIndex",
                "Merge indexMeta success. After merge:[" + idxMeta.toString() + "]"));

        idxMetaListForIndex.add(idxMeta);
    }

    // step 3 if(indexAsap) then call doIndex else return

    if (indexAsap) {
        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS_NO_PARAM, "initMiniCubeIndex",
                "index as soon as possible"));
        // if need index as soon as possible
        IndexAction idxAction = IndexAction.INDEX_INIT;
        if (limited) {
            idxAction = IndexAction.INDEX_INIT_LIMITED;
        }
        while (idxMetaListForIndex.size() > 0) {
            IndexMeta idxMeta = idxMetaListForIndex.poll();
            if (idxMeta.getIdxState().equals(IndexState.INDEX_AVAILABLE_MERGE)) {
                idxMeta.setIdxState(IndexState.INDEX_AVAILABLE);
                this.indexMetaService.saveOrUpdateIndexMeta(idxMeta);
                continue;
            } else if (idxMeta.getIdxState().equals(IndexState.INDEX_AVAILABLE_NEEDMERGE)) {
                idxAction = IndexAction.INDEX_MERGE;
            }

            try {
                doIndexByIndexAction(idxMeta, idxAction, null);

            } catch (Exception e) {
                LOGGER.error(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_EXCEPTION,
                        "initMiniCubeIndex", "[cubeList:" + cubeList + "][dataSourceInfo:" + dataSourceInfo
                                + "][indexAsap:" + indexAsap + "][limited:" + limited + "]"),
                        e);

                String message = TesseractExceptionUtils.getExceptionMessage(
                        IndexAndSearchException.INDEXEXCEPTION_MESSAGE,
                        IndexAndSearchExceptionType.INDEX_EXCEPTION);
                throw new IndexAndSearchException(message, e, IndexAndSearchExceptionType.INDEX_EXCEPTION);
            } finally {
                LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_PROCESS_NO_PARAM,
                        "initMiniCubeIndex", "[Index indexmeta : " + idxMeta.toString()));
            }
        }
    }
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "initMiniCubeIndex",
            "[cubeList:" + cubeList + "][dataSourceInfo:" + dataSourceInfo + "][indexAsap:" + indexAsap
                    + "][limited:" + limited + "]"));
    return true;
}

From source file:org.openmeetings.app.data.user.Organisationmanagement.java

/**
 * TODO//from ww w  . j a v  a2  s.c  o  m
 * 
 * @param org
 * @param users
 * @return
 */
@SuppressWarnings({ "unused", "rawtypes" })
private Long updateOrganisationUsersByHashMap(Organisation org, LinkedHashMap users, long insertedby) {
    try {
        LinkedList<Long> usersToAdd = new LinkedList<Long>();
        LinkedList<Long> usersToDel = new LinkedList<Long>();

        List usersStored = this.getUsersByOrganisationId(org.getOrganisation_id());

        for (Iterator it = users.keySet().iterator(); it.hasNext();) {
            Integer key = (Integer) it.next();
            Long userIdToAdd = Long.valueOf(users.get(key).toString()).longValue();
            log.error("userIdToAdd: " + userIdToAdd);
            if (!this.checkUserAlreadyStored(userIdToAdd, usersStored))
                usersToAdd.add(userIdToAdd);
        }

        for (Iterator it = usersStored.iterator(); it.hasNext();) {
            Users us = (Users) it.next();
            Long userIdStored = us.getUser_id();
            log.error("userIdStored: " + userIdStored);
            if (!this.checkUserShouldBeStored(userIdStored, users))
                usersToDel.add(userIdStored);
        }

        log.debug("usersToAdd.size " + usersToAdd.size());
        log.debug("usersToDel.size " + usersToDel.size());

        for (Iterator<Long> it = usersToAdd.iterator(); it.hasNext();) {
            Long user_id = it.next();
            this.addUserToOrganisation(user_id, org.getOrganisation_id(), insertedby);
        }

        for (Iterator<Long> it = usersToDel.iterator(); it.hasNext();) {
            Long user_id = it.next();
            this.deleteUserFromOrganisation(new Long(3), user_id, org.getOrganisation_id());
        }

    } catch (Exception err) {
        log.error("updateOrganisationUsersByHashMap", err);
    }
    return null;
}

From source file:com.mirth.connect.plugins.dashboardstatus.DashboardConnectorEventListener.java

@Override
protected void processEvent(Event event) {
    if (event instanceof ConnectionStatusEvent) {
        ConnectionStatusEvent connectionStatusEvent = (ConnectionStatusEvent) event;
        String channelId = connectionStatusEvent.getChannelId();
        Integer metaDataId = connectionStatusEvent.getMetaDataId();
        String information = connectionStatusEvent.getMessage();
        Timestamp timestamp = new Timestamp(event.getDateTime());

        String connectorId = channelId + "_" + metaDataId;

        ConnectionStatusEventType eventType = connectionStatusEvent.getState();

        ConnectionStatusEventType connectionStatusEventType = eventType;
        Integer connectorCount = null;
        Integer maximum = null;/*from   w  w w  . ja  v a2 s  .  co  m*/

        if (event instanceof ConnectorCountEvent) {
            ConnectorCountEvent connectorCountEvent = (ConnectorCountEvent) connectionStatusEvent;

            maximum = connectorCountEvent.getMaximum();
            Boolean increment = connectorCountEvent.isIncrement();

            if (maximum != null) {
                maxConnectionMap.put(connectorId, maximum);
            } else {
                maximum = maxConnectionMap.get(connectorId);
            }

            AtomicInteger count = connectorCountMap.get(connectorId);

            if (count == null) {
                count = new AtomicInteger();
                connectorCountMap.put(connectorId, count);
            }

            if (increment != null) {
                if (increment) {
                    count.incrementAndGet();
                } else {
                    count.decrementAndGet();
                }
            }

            connectorCount = count.get();

            if (connectorCount == 0) {
                connectionStatusEventType = ConnectionStatusEventType.IDLE;
            } else {
                connectionStatusEventType = ConnectionStatusEventType.CONNECTED;
            }
        }

        String stateString = null;
        if (connectionStatusEventType.isState()) {
            Color color = getColor(connectionStatusEventType);
            stateString = connectionStatusEventType.toString();
            if (connectorCount != null) {
                if (maximum != null && connectorCount.equals(maximum)) {
                    stateString += " <font color='red'>(" + connectorCount + ")</font>";
                } else if (connectorCount > 0) {
                    stateString += " (" + connectorCount + ")";
                }
            }

            connectorStateMap.put(connectorId, new Object[] { color, stateString });
        }

        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss.SSS");
        String channelName = "";
        String connectorType = "";

        LinkedList<String[]> channelLog = null;

        Channel channel = ControllerFactory.getFactory().createChannelController()
                .getDeployedChannelById(channelId);

        if (channel != null) {
            channelName = channel.getName();
            // grab the channel's log from the HashMap, if not exist, create
            // one.
            if (connectorInfoLogs.containsKey(channelId)) {
                channelLog = connectorInfoLogs.get(channelId);
            } else {
                channelLog = new LinkedList<String[]>();
            }

            if (metaDataId == 0) {
                connectorType = "Source: " + channel.getSourceConnector().getTransportName() + "  ("
                        + channel.getSourceConnector().getTransformer().getInboundDataType().toString() + " -> "
                        + channel.getSourceConnector().getTransformer().getOutboundDataType().toString() + ")";
            } else {
                Connector connector = getConnectorFromMetaDataId(channel.getDestinationConnectors(),
                        metaDataId);
                connectorType = "Destination: " + connector.getTransportName() + " - " + connector.getName();
            }
        }

        if (channelLog != null) {
            synchronized (this) {
                if (channelLog.size() == MAX_LOG_SIZE) {
                    channelLog.removeLast();
                }
                channelLog.addFirst(
                        new String[] { String.valueOf(logId), channelName, dateFormat.format(timestamp),
                                connectorType, ((ConnectionStatusEvent) event).getState().toString(),
                                information, channelId, Integer.toString(metaDataId) });

                if (entireConnectorInfoLogs.size() == MAX_LOG_SIZE) {
                    entireConnectorInfoLogs.removeLast();
                }
                entireConnectorInfoLogs.addFirst(
                        new String[] { String.valueOf(logId), channelName, dateFormat.format(timestamp),
                                connectorType, ((ConnectionStatusEvent) event).getState().toString(),
                                information, channelId, Integer.toString(metaDataId) });

                logId++;

                // put the channel log into the HashMap.
                connectorInfoLogs.put(channelId, channelLog);
            }
        }

    }
}

From source file:eu.stratosphere.nephele.instance.ec2.EC2CloudManager.java

/**
 * Checks whether there is a floating instance with the specific type. If there are instances available,
 * they will be removed from the list and returned...
 * //  ww  w. j  a  v  a 2 s . c o m
 * @param awsAccessId
 *        the access ID into AWS
 * @param awsSecretKey
 *        the secret key used to generate signatures for authentication
 * @param type
 *        the type of the floating instance, which is checked
 * @return a list of suitable floating instances.
 * @throws InstanceException
 *         something wrong happens to the global configuration
 */
private LinkedList<FloatingInstance> anyFloatingInstancesAvailable(final String awsAccessId,
        final String awsSecretKey, final InstanceType type, final int count) throws InstanceException {

    LOG.info("Checking for up to " + count + " floating instance of type " + type.getIdentifier());

    final LinkedList<FloatingInstance> foundfloatinginstances = new LinkedList<FloatingInstance>();

    synchronized (this.floatingInstances) {

        final Iterator<Map.Entry<InstanceConnectionInfo, FloatingInstance>> it = this.floatingInstances
                .entrySet().iterator();
        while (it.hasNext()) {
            final FloatingInstance i = it.next().getValue();
            // Check if we own this instance
            if (i.isFromThisOwner(awsAccessId, awsSecretKey)) {
                // Yes it is.. now check if it is of the desired type..
                if (i.getType().equals(type)) {
                    // Found..
                    it.remove();
                    foundfloatinginstances.add(i);
                    if (foundfloatinginstances.size() >= count) {
                        // We have enough floating instances!
                        break;
                    }
                }
            }
        }

    }

    LOG.info("Found " + foundfloatinginstances.size() + " suitable floating instances.");

    return foundfloatinginstances;

}

From source file:com.robonobo.eon.SEONConnection.java

private int trimQueue(LinkedList<SEONPacket> q, SEONPacket recvdPkt) {
    if (q.size() == 0)
        return 0;

    int bytesAcked = 0;
    long[] sackBegins = recvdPkt.getSackBegins();
    long[] sackEnds = recvdPkt.getSackEnds();

    // Take note of the highest num we're acknowledging, so we don't have to
    // process the entire list every time
    long maxSackEnd = -1;
    if (sackBegins != null) {
        for (int i = 0; i < sackEnds.length; i++) {
            if (maxSackEnd == -1 || mod.gt(sackEnds[i], maxSackEnd))
                maxSackEnd = sackEnds[i];
        }/*from www. j  a  v a 2s . c o  m*/
    }

    // Iterate over our queue, and remove it if it has been acknowledged,
    // either through the normal ACK number or else through SACK blocks
    for (Iterator<SEONPacket> iter = q.iterator(); iter.hasNext();) {
        SEONPacket pkt = iter.next();
        long firstSeqNum = pkt.getSequenceNumber();
        long lastSeqNum;
        if (pkt.getPayload() != null && pkt.getPayload().limit() > 1)
            lastSeqNum = mod.add(firstSeqNum, pkt.getPayload().limit() - 1);
        else
            lastSeqNum = firstSeqNum;
        boolean pktIsAcked = false;
        if (lastSeqNum < sendUna) {
            // This pkt has been acknowledged through the normal ack number
            pktIsAcked = true;
        } else {
            // Check to see if this pkt has been acknowledged through sack
            // blocks
            if (sackBegins != null) {
                for (int i = 0; i < sackBegins.length; i++) {
                    if (mod.gte(firstSeqNum, sackBegins[i]) && mod.lt(lastSeqNum, sackEnds[i])) {
                        pktIsAcked = true;
                        break;
                    }
                }
            }
        }
        if (pktIsAcked) {
            if (pkt.getPayload() != null)
                bytesAcked += pkt.getPayload().limit();
            iter.remove();
            Long thisSN = new Long(firstSeqNum);
            if (transmissionTimes.containsKey(thisSN)) {
                int rtt = (int) (TimeUtil.now().getTime() - ((Long) transmissionTimes.get(thisSN)).longValue());
                updateRTO(rtt);
                transmissionTimes.remove(thisSN);
            }
        } else {
            // Check to see if we can stop processing the list
            if (maxSackEnd < 0 || mod.gt(pkt.getSequenceNumber(), maxSackEnd))
                break;
        }
    }
    return bytesAcked;
}

From source file:net.sf.jvifm.ui.FileLister.java

private String getLastLongestPath(String path) {

    if (path.indexOf(File.separator) < 0)
        return path;
    LinkedList<String> list = historyManager.getFullHistory();
    if (list.size() < 0)
        return path;

    if (!path.endsWith(File.separator))
        path = path + File.separator;
    String longestPath = path;//from www.  ja va 2s. co  m

    int lastMatchIndex = -1;
    for (int i = list.size() - 1; i > 0; i--) {
        String his = (String) list.get(i);
        if (his.startsWith(path)) {
            lastMatchIndex = i;
            longestPath = his;
            break;
        }
    }
    while (true) {
        if (lastMatchIndex < 1)
            break;
        String tmp = (String) list.get(lastMatchIndex--);

        if (!tmp.startsWith(longestPath))
            break;
        longestPath = tmp;
    }
    if (longestPath.endsWith(File.separator))
        longestPath = longestPath.substring(0, longestPath.length() - 1);
    return longestPath;

}

From source file:com.erudika.para.persistence.CassandraDAO.java

@Override
public <P extends ParaObject> List<P> readPage(String appid, Pager pager) {
    LinkedList<P> results = new LinkedList<P>();
    if (StringUtils.isBlank(appid)) {
        return results;
    }//  w  ww  .  ja v  a 2  s.c  o m
    if (pager == null) {
        pager = new Pager();
    }
    try {
        Statement st = new SimpleStatement(
                "SELECT json FROM " + CassandraUtils.getTableNameForAppid(appid) + ";");
        st.setFetchSize(pager.getLimit());
        String lastPage = pager.getLastKey();
        if (lastPage != null) {
            if ("end".equals(lastPage)) {
                return results;
            } else {
                st.setPagingState(PagingState.fromString(lastPage));
            }
        }
        ResultSet rs = getClient().execute(st);
        PagingState nextPage = rs.getExecutionInfo().getPagingState();

        int remaining = rs.getAvailableWithoutFetching();
        for (Row row : rs) {
            P obj = fromRow(row.getString("json"));
            if (obj != null) {
                results.add(obj);
            }
            if (--remaining == 0) {
                break;
            }
        }

        if (nextPage != null) {
            pager.setLastKey(nextPage.toString());
        } else {
            pager.setLastKey("end");
        }

        if (!results.isEmpty()) {
            pager.setCount(pager.getCount() + results.size());
        }
    } catch (Exception e) {
        logger.error(null, e);
    }
    logger.debug("readPage() page: {}, results:", pager.getPage(), results.size());
    return results;
}