List of usage examples for com.mongodb DBCursor limit
public DBCursor limit(final int limit)
From source file:org.basex.modules.MongoDB.java
License:BSD License
/** * MongoDB find with all parameters.//w w w. j av a 2s .c o m * @param handler Database handler * @param col collection * @param query Query parameters * @param opt options in Map like: {"limit":2} * @param field Projection * @return Item * @throws QueryException */ public Item find(final Str handler, final Item col, final Item query, final Item opt, final Item projection) throws QueryException { final DB db = getDbHandler(handler); db.requestStart(); try { DBObject p = null; if (opt != null && opt instanceof Str) { p = getDbObjectFromStr(opt); } else if (projection != null && projection instanceof Str) { p = getDbObjectFromStr(projection); } final DBObject q = query != null ? getDbObjectFromStr(query) : null; final DBCollection coll = db.getCollection(itemToString(col)); final DBCursor cursor = coll.find(q, p); Map options = null; options = (opt != null && opt instanceof Map) ? (Map) opt : (projection != null && projection instanceof Map) ? (Map) projection : null; if (options != null) { Value keys = options.keys(); for (final Item key : keys) { if (!(key instanceof Str)) throw MongoDBErrors.generalExceptionError("String expected " + key.toJava()); final String k = ((Str) key).toJava(); final Value v = options.get(key, null); if (v instanceof Str || v.type().instanceOf(SeqType.ITR)) { if (k.equals(LIMIT)) { if (v.type().instanceOf(SeqType.ITR_OM)) { long l = ((Item) v).itr(null); cursor.limit((int) l); } else { throw MongoDBErrors .generalExceptionError("Number Expected for key '" + key.toJava() + "'"); } } else if (k.equals(SKIP)) { //cursor.skip(Token.toInt(v)); } else if (k.equals(SORT)) { BasicDBObject sort = new BasicDBObject(k, v); sort.append("name", "-1"); cursor.sort((DBObject) sort); } else if (k.equals(COUNT)) { int count = cursor.count(); BasicDBObject res = new BasicDBObject(); res.append("count", count); return objectToItem(handler, res); } else if (k.equals(EXPLAIN)) { DBObject result = cursor.explain(); return objectToItem(handler, result); } } else if (v instanceof Map) { } else { throw MongoDBErrors.generalExceptionError("Invalid value 2..."); } } } return cursorToItem(handler, cursor); } catch (MongoException e) { throw MongoDBErrors.generalExceptionError(e.getMessage()); } finally { db.requestDone(); } }
From source file:org.basex.modules.nosql.MongoDB.java
License:BSD License
/** * MongoDB find with all parameters./*from ww w. j a v a 2 s. com*/ * @param handler database handler Database handler * @param col collection collection * @param query Query parameters * @param opt options in Map like: {"limit":2} * @param projection projection (selection field) * @return Item * @throws QueryException query exception */ public Item find(final Str handler, final Item col, final Item query, final Item opt, final Item projection) throws QueryException { final DB db = getDbHandler(handler); db.requestStart(); try { DBObject p = null; if (opt != null && opt instanceof Str) { p = getDbObjectFromItem(opt); } else if (projection != null && projection instanceof Str) { p = getDbObjectFromItem(projection); } final DBObject q = query != null ? getDbObjectFromItem(query) : null; final DBCollection coll = db.getCollection(itemToString(col)); final DBCursor cursor = coll.find(q, p); Map options = null; options = (opt != null && opt instanceof Map) ? (Map) opt : (projection != null && projection instanceof Map) ? (Map) projection : null; if (options != null) { Value keys = options.keys(); for (final Item key : keys) { if (!(key instanceof Str)) throw MongoDBErrors.generalExceptionError("String expected " + key.toJava()); final String k = ((Str) key).toJava(); final Value v = options.get(key, null); if (v instanceof Str || v.seqType().instanceOf(SeqType.ITR)) { if (k.equals(LIMIT)) { if (v.seqType().instanceOf(SeqType.ITR_OM)) { long l = ((Item) v).itr(null); cursor.limit((int) l); } else { throw MongoDBErrors .generalExceptionError("Number Expected for key '" + key.toJava() + "'"); } } else if (k.equals(SKIP)) { //cursor.skip(Token.toInt(v)); } else if (k.equals(SORT)) { BasicDBObject sort = new BasicDBObject(k, v); sort.append("name", "-1"); cursor.sort(sort); } else if (k.equals(COUNT)) { int count = cursor.count(); BasicDBObject res = new BasicDBObject(); res.append("count", count); return objectToItem(handler, res); } else if (k.equals(EXPLAIN)) { DBObject result = cursor.explain(); return objectToItem(handler, result); } } else if (v instanceof Map) { } else { throw MongoDBErrors.generalExceptionError("Invalid value 2..."); } } } return cursorToItem(handler, cursor); } catch (MongoException e) { throw MongoDBErrors.generalExceptionError(e.getMessage()); } finally { db.requestDone(); } }
From source file:org.broad.igv.plugin.mongocollab.MongoFeatureSource.java
License:Open Source License
/** * * @param queryObject//from w w w .j a va 2 s.com * @param limit Limitation on the number of results returned. Setting to 0 is equivalent to unlimited * @return * @throws IOException */ private Collection<DBFeature.IGVFeat> getFeatures(DBObject queryObject, int limit) throws IOException { DBCursor cursor = this.collection.find(queryObject); cursor.limit(limit >= 0 ? limit : 0); //Sort by increasing start value //Only do this if we have an index, otherwise might be too memory intensive if (hasLocusIndex) { cursor.sort(new BasicDBObject("Start", 1)); } boolean isSorted = true; int lastStart = -1; List<DBFeature.IGVFeat> features = new ArrayList<DBFeature.IGVFeat>(); while (cursor.hasNext()) { DBObject obj = cursor.next(); DBFeature feat = (DBFeature) obj; features.add(feat.createIGVFeature()); isSorted &= feat.getStart() >= lastStart; lastStart = feat.getStart(); } if (!isSorted) { FeatureUtils.sortFeatureList(features); } return features; }
From source file:org.canedata.provider.mongodb.entity.MongoEntity.java
License:Apache License
public List<Fields> list(int offset, int count) { if (logger.isDebug()) logger.debug("Listing entities, Database is {0}, Collection is {1}, offset is {2}, count is {3}.", getSchema(), getName(), offset, count); List<Fields> rlt = new ArrayList<Fields>(); BasicDBObject options = new BasicDBObject(); DBCursor cursor = null; try {/* w ww . j a va 2 s . co m*/ validateState(); MongoExpressionFactory expFactory = new MongoExpressionFactory.Impl(); BasicDBObject projection = new BasicDBObject(); Limiter limiter = new Limiter.Default(); BasicDBObject sorter = new BasicDBObject(); IntentParser.parse(getIntent(), expFactory, null, projection, limiter, sorter, options); if (!options.isEmpty()) prepareOptions(options); if (null != getCache()) {// cache cursor = getCollection().find(expFactory.toQuery(), new BasicDBObject().append("_id", 1)); } else {// no cache // projection if (projection.isEmpty()) cursor = getCollection().find(expFactory.toQuery()); else cursor = getCollection().find(expFactory.toQuery(), projection); } // sort if (!sorter.isEmpty()) cursor.sort(sorter); if (offset > 0) limiter.offset(offset); if (count > 0) limiter.count(count); if (limiter.offset() > 0) cursor.skip(limiter.offset()); if (limiter.count() > 0) cursor.limit(limiter.count()); if (null != getCache()) { Map<Object, MongoFields> missedCacheHits = new HashMap<Object, MongoFields>(); while (cursor.hasNext()) { BasicDBObject dbo = (BasicDBObject) cursor.next(); Object key = dbo.get("_id"); String cacheKey = getKey().concat("#").concat(key.toString()); MongoFields ele = null; if (getCache().isAlive(cacheKey)) {// load from cache MongoFields mf = (MongoFields) getCache().restore(cacheKey); if (null != mf) ele = mf.clone();// pooling } if (null != ele && !projection.isEmpty()) ele.project(projection.keySet()); if (null == ele) { ele = new MongoFields(this, getIntent()); missedCacheHits.put(key, ele); } rlt.add(ele); } // load missed cache hits. if (!missedCacheHits.isEmpty()) { loadForMissedCacheHits(missedCacheHits, projection.keySet()); missedCacheHits.clear(); } if (logger.isDebug()) logger.debug("Listed entities hit cache ..."); } else { while (cursor.hasNext()) { BasicDBObject dbo = (BasicDBObject) cursor.next(); rlt.add(new MongoFields(this, getIntent(), dbo)); } if (logger.isDebug()) logger.debug("Listed entities ..."); } return rlt; } catch (AnalyzeBehaviourException abe) { if (logger.isDebug()) logger.debug(abe, "Analyzing behaviour failure, cause by: {0}.", abe.getMessage()); throw new RuntimeException(abe); } finally { if (!options.getBoolean(Options.RETAIN)) getIntent().reset(); if (cursor != null) cursor.close(); } }
From source file:org.datanucleus.store.mongodb.MongoDBUtils.java
License:Open Source License
/** * Convenience method to return all objects of the candidate type (optionally allowing subclasses). * @param q Query/*from w ww .j a va 2 s . co m*/ * @param db Mongo DB * @param filterObject Optional filter object * @param orderingObject Optional ordering object * @param options Set of options for controlling this query * @param skip Number of records to skip * @param limit Max number of records to return * @return List of all candidate objects (implements QueryResult) */ public static List getObjectsOfCandidateType(Query q, DB db, BasicDBObject filterObject, BasicDBObject orderingObject, Map<String, Object> options, Integer skip, Integer limit) { LazyLoadQueryResult qr = new LazyLoadQueryResult(q); // Find the DBCollections we need to query ExecutionContext ec = q.getExecutionContext(); StoreManager storeMgr = ec.getStoreManager(); ClassLoaderResolver clr = ec.getClassLoaderResolver(); List<AbstractClassMetaData> cmds = MetaDataUtils.getMetaDataForCandidates(q.getCandidateClass(), q.isSubclasses(), ec); Map<String, List<AbstractClassMetaData>> classesByCollectionName = new HashMap(); for (AbstractClassMetaData cmd : cmds) { if (cmd instanceof ClassMetaData && ((ClassMetaData) cmd).isAbstract()) { // Omit any classes that are not instantiable (e.g abstract) } else { String collectionName = storeMgr.getNamingFactory().getTableName(cmd); List<AbstractClassMetaData> cmdsForCollection = classesByCollectionName.get(collectionName); if (cmdsForCollection == null) { cmdsForCollection = new ArrayList(); classesByCollectionName.put(collectionName, cmdsForCollection); } cmdsForCollection.add(cmd); } } // Add a query for each DBCollection we need Iterator<Map.Entry<String, List<AbstractClassMetaData>>> iter = classesByCollectionName.entrySet() .iterator(); while (iter.hasNext()) { Map.Entry<String, List<AbstractClassMetaData>> entry = iter.next(); String collectionName = entry.getKey(); List<AbstractClassMetaData> cmdsForCollection = entry.getValue(); AbstractClassMetaData rootCmd = cmdsForCollection.get(0); int[] fpMembers = q.getFetchPlan().getFetchPlanForClass(rootCmd).getMemberNumbers(); BasicDBObject fieldsSelection = new BasicDBObject(); if (fpMembers != null && fpMembers.length > 0) { fieldsSelection = new BasicDBObject(); for (int i = 0; i < fpMembers.length; i++) { AbstractMemberMetaData mmd = rootCmd .getMetaDataForManagedMemberAtAbsolutePosition(fpMembers[i]); RelationType relationType = mmd.getRelationType(clr); if (mmd.isEmbedded() && RelationType.isRelationSingleValued(relationType)) { boolean nested = true; String nestedStr = mmd.getValueForExtension("nested"); if (nestedStr != null && nestedStr.equalsIgnoreCase("false")) { nested = false; } if (nested) { // Nested Embedded field, so include field String fieldName = storeMgr.getNamingFactory().getColumnName(mmd, ColumnType.COLUMN); fieldsSelection.append(fieldName, 1); } else { // Flat Embedded field, so add all fields of sub-objects selectAllFieldsOfEmbeddedObject(mmd, fieldsSelection, ec, clr); } } else { String fieldName = storeMgr.getNamingFactory().getColumnName(mmd, ColumnType.COLUMN); fieldsSelection.append(fieldName, 1); } } } if (rootCmd.getIdentityType() == IdentityType.DATASTORE) { fieldsSelection.append( storeMgr.getNamingFactory().getColumnName(rootCmd, ColumnType.DATASTOREID_COLUMN), 1); } if (rootCmd.isVersioned()) { VersionMetaData vermd = rootCmd.getVersionMetaDataForClass(); if (vermd.getFieldName() != null) { AbstractMemberMetaData verMmd = rootCmd.getMetaDataForMember(vermd.getFieldName()); String fieldName = storeMgr.getNamingFactory().getColumnName(verMmd, ColumnType.COLUMN); fieldsSelection.append(fieldName, 1); } else { fieldsSelection.append( storeMgr.getNamingFactory().getColumnName(rootCmd, ColumnType.VERSION_COLUMN), 1); } } if (rootCmd.hasDiscriminatorStrategy()) { fieldsSelection.append( storeMgr.getNamingFactory().getColumnName(rootCmd, ColumnType.DISCRIMINATOR_COLUMN), 1); } BasicDBObject query = new BasicDBObject(); if (filterObject != null) { Iterator<Map.Entry<String, Object>> filterEntryIter = filterObject.entrySet().iterator(); while (filterEntryIter.hasNext()) { Map.Entry<String, Object> filterEntry = filterEntryIter.next(); query.put(filterEntry.getKey(), filterEntry.getValue()); } } if (rootCmd.hasDiscriminatorStrategy() && cmdsForCollection.size() == 1) { // TODO Add this restriction on *all* possible cmds for this DBCollection // Discriminator present : Add restriction on the discriminator value for this class query.put(storeMgr.getNamingFactory().getColumnName(rootCmd, ColumnType.DISCRIMINATOR_COLUMN), rootCmd.getDiscriminatorValue()); } if (storeMgr.getStringProperty(PropertyNames.PROPERTY_TENANT_ID) != null) { // Multitenancy discriminator present : Add restriction for this tenant if ("true".equalsIgnoreCase(rootCmd.getValueForExtension("multitenancy-disable"))) { // Don't bother with multitenancy for this class } else { String fieldName = storeMgr.getNamingFactory().getColumnName(rootCmd, ColumnType.MULTITENANCY_COLUMN); String value = storeMgr.getStringProperty(PropertyNames.PROPERTY_TENANT_ID); query.put(fieldName, value); } } DBCollection dbColl = db.getCollection(collectionName); Object val = (options != null ? options.get("slave-ok") : Boolean.FALSE); if (val == Boolean.TRUE) { dbColl.setReadPreference(ReadPreference.secondaryPreferred()); } if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) { NucleusLogger.DATASTORE_NATIVE .debug("Performing find() using query on collection " + collectionName + " for fields=" + fieldsSelection + " with filter=" + query + " and ordering=" + orderingObject); } DBCursor curs = dbColl.find(query, fieldsSelection); if (ec.getStatistics() != null) { // Add to statistics ec.getStatistics().incrementNumReads(); } if (classesByCollectionName.size() == 1) { if (orderingObject != null) { curs = curs.sort(orderingObject); qr.setOrderProcessed(true); } // We have a single DBCursor so apply the range specification directly to this DBCursor if (skip != null && skip > 0) { curs = curs.skip(skip); qr.setRangeProcessed(true); } if (limit != null && limit > 0) { curs = curs.limit(limit); qr.setRangeProcessed(true); } } qr.addCandidateResult(rootCmd, curs, fpMembers); } return qr; }
From source file:org.eclipse.birt.data.oda.mongodb.internal.impl.MDbMetaData.java
License:Open Source License
/** * Returns all fields' name and corresponding metadata found in the specified collection. * @param collectionName name of MongoDB collection (i.e. table) * @param searchLimit maximum number of documents, i.e. rows to search for available fields; * a zero or negative value would adopt the default limit * @param runtimeProps an instance of QueryProperties containing the data set runtime property values; * may be null to apply all default values in finding the available fields metadata * @return the DocumentsMetaData object that contains the list of available field names and * corresponding metadata; //w w w. j a v a 2 s .com * an empty list is returned if no available fields are found, or * if the specified collection does not exist * @throws OdaException */ public DocumentsMetaData getAvailableFields(String collectionName, int searchLimit, QueryProperties runtimeProps) throws OdaException { DBCollection collection = getCollection(collectionName); if (collection == null && !runtimeProps.hasRunCommand()) { if (runtimeProps.getOperationType() == CommandOperationType.RUN_DB_COMMAND && runtimeProps.getOperationExpression().isEmpty()) throw new OdaException(Messages.bind(Messages.mDbMetaData_missingCmdExprText, runtimeProps.getOperationType().displayName())); else throw new OdaException(Messages.bind(Messages.mDbMetaData_invalidCollectionName, collectionName)); } if (searchLimit <= 0) // no limit specified, applies meta data design-time default searchLimit = DEFAULT_META_DATA_SEARCH_LIMIT; // handle optional command operation if (runtimeProps.hasValidCommandOperation()) { QueryModel.validateCommandSyntax(runtimeProps.getOperationType(), runtimeProps.getOperationExpression()); Iterable<DBObject> commandResults = null; if (runtimeProps.hasAggregateCommand()) commandResults = MDbOperation.callAggregateCmd(collection, runtimeProps); else if (runtimeProps.hasMapReduceCommand()) { MapReduceOutput mapReduceOut = MDbOperation.callMapReduceCmd(collection, runtimeProps); commandResults = mapReduceOut.results(); // skip running $query on output collection in discovering metadata } else if (runtimeProps.hasRunCommand()) commandResults = MDbOperation.callDBCommand(m_connectedDB, runtimeProps); if (commandResults != null) return getMetaData(commandResults, searchLimit); return sm_emptyFields; } // run search query operation by default DBCursor rowsCursor = collection.find(); if (searchLimit > 0) rowsCursor.limit(searchLimit); QueryProperties mdCursorProps = runtimeProps != null ? runtimeProps : QueryProperties.defaultValues(); MDbOperation.applyPropertiesToCursor(rowsCursor, mdCursorProps, false); return getMetaData(rowsCursor); }
From source file:org.eclipse.birt.data.oda.mongodb.internal.impl.MDbOperation.java
License:Open Source License
private void applyPropertiesToCursor(DBCursor rowsCursor, QueryProperties queryProps, boolean includeMetaDataSearchLimit, boolean includeSortExpr) { if (includeMetaDataSearchLimit) { Integer searchLimit = getModel().getEffectiveMDSearchLimit(queryProps); if (searchLimit > 0) rowsCursor.limit(searchLimit); }// w w w .j a v a2 s . c om applyPropertiesToCursor(rowsCursor, queryProps, includeSortExpr); }
From source file:org.eclipse.jetty.nosql.mongodb.MongoSessionIdManager.java
License:Open Source License
/** * Purge is a process that cleans the mongodb cluster of old sessions that are no * longer valid./*from w w w .j a v a 2s . c om*/ * * There are two checks being done here: * * - if the accessed time is older than the current time minus the purge invalid age * and it is no longer valid then remove that session * - if the accessed time is older then the current time minus the purge valid age * then we consider this a lost record and remove it * * NOTE: if your system supports long lived sessions then the purge valid age should be * set to zero so the check is skipped. * * The second check was added to catch sessions that were being managed on machines * that might have crashed without marking their sessions as 'valid=false' */ protected void purge() { __log.debug("PURGING"); BasicDBObject invalidQuery = new BasicDBObject(); invalidQuery.put(MongoSessionManager.__VALID, false); invalidQuery.put(MongoSessionManager.__ACCESSED, new BasicDBObject("$lt", System.currentTimeMillis() - _purgeInvalidAge)); DBCursor oldSessions = _sessions.find(invalidQuery, new BasicDBObject(MongoSessionManager.__ID, 1)); if (_purgeLimit > 0) { oldSessions.limit(_purgeLimit); } for (DBObject session : oldSessions) { String id = (String) session.get("id"); __log.debug("MongoSessionIdManager:purging invalid session {}", id); _sessions.remove(session); } if (_purgeValidAge != 0) { BasicDBObject validQuery = new BasicDBObject(); validQuery.put(MongoSessionManager.__VALID, true); validQuery.put(MongoSessionManager.__ACCESSED, new BasicDBObject("$lt", System.currentTimeMillis() - _purgeValidAge)); oldSessions = _sessions.find(validQuery, new BasicDBObject(MongoSessionManager.__ID, 1)); if (_purgeLimit > 0) { oldSessions.limit(_purgeLimit); } for (DBObject session : oldSessions) { String id = (String) session.get(MongoSessionManager.__ID); __log.debug("MongoSessionIdManager:purging valid session {}", id); _sessions.remove(session); } } }
From source file:org.eclipselabs.mongoemf.streams.MongoInputStream.java
License:Open Source License
@Override public void loadResource(Resource resource) throws IOException { // We need to set up the XMLResource.URIHandler so that proxy URIs are handled properly. XMLResource.URIHandler uriHandler = (XMLResource.URIHandler) options.get(XMLResource.OPTION_URI_HANDLER); if (uriHandler == null) uriHandler = new org.eclipse.emf.ecore.xmi.impl.URIHandlerImpl(); if (resource.getURI().hasQuery()) uriHandler.setBaseURI(resource.getURI().trimSegments(1).appendSegment("-1")); else/*w w w. j ava 2 s .co m*/ uriHandler.setBaseURI(resource.getURI()); boolean includeAttributesForProxyReferences = Boolean.TRUE .equals(options.get(Options.OPTION_PROXY_ATTRIBUTES)); EObjectBuilder builder = builderFactory.createObjectBuilder(converterService, uriHandler, includeAttributesForProxyReferences, eClassCache); // If the URI contains a query string, use it to locate a collection of objects from // MongoDB, otherwise simply get the object from MongoDB using the id. EList<EObject> contents = resource.getContents(); if (uri.query() != null) { if (queryEngine == null) throw new IOException("The query engine was not found"); MongoQuery mongoQuery = queryEngine.buildDBObjectQuery(uri); DBCursor resultCursor = null; if (mongoQuery.getProjection() == null) resultCursor = collection.find(mongoQuery.getFilter()); else resultCursor = collection.find(mongoQuery.getFilter(), mongoQuery.getProjection()); if (mongoQuery.getSkip() != null) resultCursor.skip(mongoQuery.getSkip()); if (mongoQuery.getSort() != null) resultCursor = resultCursor.sort(mongoQuery.getSort()); if (mongoQuery.getLimit() != null) resultCursor = resultCursor.limit(mongoQuery.getLimit()); boolean createCursor = Boolean.TRUE.equals(options.get(Options.OPTION_QUERY_CURSOR)); if (createCursor) { MongoCursor cursor = ModelFactory.eINSTANCE.createMongoCursor(); cursor.setDbCollection(collection); cursor.setDbCursor(resultCursor); cursor.setObjectBuilder(builder); contents.add(cursor); } else { EReferenceCollection eCollection = EmodelingFactory.eINSTANCE.createEReferenceCollection(); InternalEList<EObject> values = (InternalEList<EObject>) eCollection.getValues(); for (DBObject dbObject : resultCursor) values.addUnique(builder.buildEObject(collection, dbObject, resource, true)); contents.add(eCollection); } } else { DBObject dbObject = collection.findOne(new BasicDBObject(Keywords.ID_KEY, MongoUtils.getID(uri))); if (dbObject != null) { EObject eObject = builder.buildEObject(collection, dbObject, resource, false); if (eObject != null) contents.add(eObject); response.put(URIConverter.RESPONSE_TIME_STAMP_PROPERTY, dbObject.get(Keywords.TIME_STAMP_KEY)); } } }
From source file:org.envirocar.server.mongo.dao.MongoMeasurementDao.java
License:Open Source License
private Measurements query(DBObject query, Pagination p) { final Mapper mapper = this.mongoDB.getMapper(); final Datastore ds = this.mongoDB.getDatastore(); final DBCollection coll = ds.getCollection(MongoMeasurement.class); DBCursor cursor = coll.find(query, null); long count = 0; cursor.setDecoderFactory(ds.getDecoderFact()); if (p != null) { count = coll.count(query);// w ww . j a va 2s .com if (p.getOffset() > 0) { cursor.skip(p.getOffset()); } if (p.getLimit() > 0) { cursor.limit(p.getLimit()); } } cursor.sort(QueryImpl.parseFieldsString(MongoMeasurement.TIME, MongoMeasurement.class, mapper, true)); Iterable<MongoMeasurement> i = new MorphiaIterator<MongoMeasurement, MongoMeasurement>(cursor, mapper, MongoMeasurement.class, coll.getName(), mapper.createEntityCache()); return createPaginatedIterable(i, p, count); }