List of usage examples for com.mongodb.client FindIterable skip
FindIterable<TResult> skip(int skip);
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Returns all the documents in a collection. * * @param databaseName the database/*from w w w . j av a2 s .c om*/ * @param collectionName the collection * @param query a JSON query param in the style of mongo * @param fields fields to return * @param skip the amount of documents to skip * @param limit the amount of documents to limit the result to * @param orderBy order ascending or descending by property * @param includeId determines whether to include the Mongo "_id" field * @return the documents in a collection * @throws DatasourceException * @throws NotFoundException */ @Override public List<String> getAll(String databaseName, String collectionName, String query, String fields, String skip, String limit, String orderBy, boolean includeId) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!collectionExists(databaseName, collectionName)) { throw new NotFoundException("The collection doesn't exist in the datasource"); } Integer intSkip, intLimit; try { intSkip = Integer.parseInt(skip); } catch (NumberFormatException ex) { intSkip = 0; } try { intLimit = Integer.parseInt(limit); } catch (NumberFormatException ex) { intLimit = 0; } // 1 or -1 to specify an ascending or descending sort respectively. Document orderByObject = null; if (orderBy != null && !orderBy.isEmpty()) { if (orderBy.contains("ascending")) { String[] parts = orderBy.split(":"); orderByObject = new Document(parts[0], 1); } else if (orderBy.contains("descending")) { String[] parts = orderBy.split(":"); orderByObject = new Document(parts[0], -1); } } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection collection = mongoDatabase.getCollection(collectionName); FindIterable iterable = (query == null || query.isEmpty()) ? collection.find() : collection.find(Document.parse(query)); // TODO: Figure out how to do this in new API // if (fields != null && !fields.isEmpty()) { // // expect the form to be field:value,field:value // Document document = new Document(); // String[] parts = fields.split(","); // for (String part : parts) { // String[] tempParts = part.split(":"); // document.append(tempParts[0], tempParts[1]); // } // iterable.projection(document); // } iterable.skip(intSkip); iterable.limit(intLimit); if (orderByObject != null) { iterable.sort(orderByObject); } Iterator<Document> curIter = iterable.iterator(); List<String> documentList = new ArrayList<>(); while (curIter.hasNext()) { Document current = curIter.next(); if (!includeId) { current.remove("_id"); } documentList.add(JSON.serialize(current)); } return documentList; } catch (MongoException ex) { logger.error("An error occured while retrieving the document list", ex); throw new DatasourceException("An error occured while retrieving the document list"); } }
From source file:org.helm.rest.MongoDB.java
public JSONObject List(String table, String cols, BsonDocument where, BsonDocument sortby, int page, int countperpage) { if (page < 1) page = 1;//from ww w .j ava2s . co m if (countperpage < 1) countperpage = 10; long count; FindIterable iter; MongoCollection coll = db.getCollection(table); if (where == null) { count = coll.count(); iter = coll.find(); } else { count = coll.count(where); iter = coll.find(where); } if (sortby != null) iter = iter.sort(sortby); if (cols != null) { String[] ss = cols.split(","); Document fields = new Document("_id", false); for (int i = 0; i < ss.length; ++i) { fields.append(ss[i].trim().toLowerCase(), true); } iter = iter.projection(fields); } long mod = count % countperpage; long pages = (count - mod) / countperpage + (mod == 0 ? 0 : 1); if (page > 1) iter = iter.skip((page - 1) * countperpage); iter = iter.limit(countperpage); MongoCursor cur = iter.iterator(); JSONObject ret = new JSONObject(); ret.put("page", page); ret.put("pages", pages); ret.put("rows", ResultSet2Json(cur)); cur.close(); return ret; }
From source file:org.opencb.commons.datastore.mongodb.MongoDBNativeQuery.java
License:Apache License
public FindIterable<Document> find(Bson query, Bson projection, QueryOptions options) { if (projection == null) { projection = getProjection(projection, options); }/*from www . j a v a 2 s. c o m*/ FindIterable<Document> findIterable = dbCollection.find(query).projection(projection); int limit = (options != null) ? options.getInt(QueryOptions.LIMIT, 0) : 0; if (limit > 0) { findIterable.limit(limit); } int skip = (options != null) ? options.getInt(QueryOptions.SKIP, 0) : 0; if (skip > 0) { findIterable.skip(skip); } Object sortObject = (options != null) ? options.get(QueryOptions.SORT) : null; if (sortObject != null) { if (sortObject instanceof Bson) { findIterable.sort(((Bson) sortObject)); } else if (sortObject instanceof String) { String order = options.getString(QueryOptions.ORDER, "DESC"); if (order.equalsIgnoreCase(QueryOptions.ASCENDING) || order.equalsIgnoreCase("ASC") || order.equals("1")) { findIterable.sort(Sorts.ascending(((String) sortObject))); } else { findIterable.sort(Sorts.descending(((String) sortObject))); } } } if (options != null && options.containsKey(MongoDBCollection.BATCH_SIZE)) { findIterable.batchSize(options.getInt(MongoDBCollection.BATCH_SIZE, 20)); } if (options != null && options.containsKey(QueryOptions.TIMEOUT)) { findIterable.maxTime(options.getLong(QueryOptions.TIMEOUT), TimeUnit.MILLISECONDS); } return findIterable; }
From source file:org.restheart.db.CollectionDAO.java
License:Open Source License
ArrayList<BsonDocument> getCollectionData(final MongoCollection<BsonDocument> coll, final int page, final int pagesize, final BsonDocument sortBy, final BsonDocument filters, final BsonDocument keys, CursorPool.EAGER_CURSOR_ALLOCATION_POLICY eager) throws JSONParseException { ArrayList<BsonDocument> ret = new ArrayList<>(); int toskip = pagesize * (page - 1); SkippedFindIterable _cursor = null;//from w ww . j a v a2 s .c o m if (eager != CursorPool.EAGER_CURSOR_ALLOCATION_POLICY.NONE) { _cursor = CursorPool.getInstance().get(new CursorPoolEntryKey(coll, sortBy, filters, keys, toskip, 0), eager); } int _pagesize = pagesize; // in case there is not cursor in the pool to reuse FindIterable<BsonDocument> cursor; if (_cursor == null) { cursor = getFindIterable(coll, sortBy, filters, keys); cursor.skip(toskip); MongoCursor<BsonDocument> mc = cursor.iterator(); while (_pagesize > 0 && mc.hasNext()) { ret.add(mc.next()); _pagesize--; } } else { int alreadySkipped; cursor = _cursor.getFindIterable(); alreadySkipped = _cursor.getAlreadySkipped(); long startSkipping = 0; int cursorSkips = alreadySkipped; if (LOGGER.isDebugEnabled()) { startSkipping = System.currentTimeMillis(); } LOGGER.debug("got cursor from pool with skips {}. " + "need to reach {} skips.", alreadySkipped, toskip); MongoCursor<BsonDocument> mc = cursor.iterator(); while (toskip > alreadySkipped && mc.hasNext()) { mc.next(); alreadySkipped++; } if (LOGGER.isDebugEnabled()) { LOGGER.debug("skipping {} times took {} msecs", toskip - cursorSkips, System.currentTimeMillis() - startSkipping); } while (_pagesize > 0 && mc.hasNext()) { ret.add(mc.next()); _pagesize--; } } // the pool is populated here because, skipping with cursor.next() is heavy operation // and we want to minimize the chances that pool cursors are allocated in parallel CursorPool.getInstance().populateCache(new CursorPoolEntryKey(coll, sortBy, filters, keys, toskip, 0), eager); return ret; }
From source file:org.restheart.db.CursorPool.java
License:Open Source License
private void populateCacheLinear(CursorPoolEntryKey key) { if (key.getSkipped() < SKIP_SLICE_LINEAR_WIDTH) { return;/*from w w w . j a v a 2 s .c o m*/ } int firstSlice = key.getSkipped() / SKIP_SLICE_LINEAR_WIDTH; try { POOL_POPULATOR.submit(() -> { int slice = firstSlice; for (int tohave : SKIP_SLICES_HEIGHTS) { int sliceSkips = slice * SKIP_SLICE_LINEAR_WIDTH - SKIP_SLICE_LINEAR_DELTA; CursorPoolEntryKey sliceKey = new CursorPoolEntryKey(key.getCollection(), key.getSort(), key.getFilter(), key.getFilter(), sliceSkips, -1); long existing = getSliceHeight(sliceKey); long tocreate = tohave - existing; for (long cont = tocreate; cont > 0; cont--) { // create the first cursor FindIterable<BsonDocument> cursor = dbsDAO.getFindIterable(key.getCollection(), key.getSort(), key.getFilter(), key.getKeys()); cursor.skip(sliceSkips); // TODO check after refactoring cursor.iterator(); // this forces the actual skipping CursorPoolEntryKey newkey = new CursorPoolEntryKey(key.getCollection(), key.getSort(), key.getFilter(), key.getKeys(), sliceSkips, System.nanoTime()); cache.put(newkey, cursor); LOGGER.debug("{} cursor in pool: {}", ansi().fg(YELLOW).bold().a("new").reset().toString(), newkey); } slice++; } }); } catch (RejectedExecutionException rej) { // this happens if the thread executor (whose pool size is 1) // is already creating a cursor LOGGER.trace("creation of new cursor pool {}", ansi().fg(RED).bold().a("rejected").reset().toString()); } }
From source file:rapture.repo.mongodb.MongoDbDataStore.java
License:Open Source License
@Override public RaptureQueryResult runNativeQuery(final String repoType, final List<String> queryParams) { if (repoType.toUpperCase().equals(MONGODB)) { MongoRetryWrapper<RaptureQueryResult> wrapper = new MongoRetryWrapper<RaptureQueryResult>() { @Override//from w ww. j av a2 s . c o m public FindIterable<Document> makeCursor() { // Here we go, the queryParams are basically // (1) the searchCriteria Document queryObj = getQueryObjFromQueryParams(queryParams); // Document fieldObj = // getFieldObjFromQueryParams(queryParams); MongoCollection<Document> collection = MongoDBFactory.getCollection(instanceName, tableName); FindIterable<Document> find = collection.find(queryObj).batchSize(100); if (queryParams.size() > 2) { Map<String, Object> options = JacksonUtil.getMapFromJson(queryParams.get(2)); if (options.containsKey(SKIP)) { find.skip((Integer) options.get(SKIP)); } if (options.containsKey(LIMIT)) { find.limit((Integer) options.get(LIMIT)); } if (options.containsKey(SORT)) { Map<String, Object> sortInfo = JacksonUtil.getMapFromJson(options.get(SORT).toString()); Document sortInfoObject = new Document(); sortInfoObject.putAll(sortInfo); find.sort(sortInfoObject); } } return find; } @Override public RaptureQueryResult action(FindIterable<Document> iterable) { RaptureQueryResult res = new RaptureQueryResult(); for (Document d : iterable) { res.addRowContent(new JsonContent(d.toString())); } return res; } }; return wrapper.doAction(); } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, mongoMsgCatalog.getMessage("Mismatch", repoType)); } }
From source file:rapture.repo.mongodb.MongoDbDataStore.java
License:Open Source License
@Override public RaptureNativeQueryResult runNativeQueryWithLimitAndBounds(String repoType, final List<String> queryParams, final int limit, final int offset) { if (repoType.toUpperCase().equals(MONGODB)) { MongoRetryWrapper<RaptureNativeQueryResult> wrapper = new MongoRetryWrapper<RaptureNativeQueryResult>() { @Override//w ww .j ava 2 s . co m public FindIterable<Document> makeCursor() { // This is like a native query, except that we need to (a) // add in // the displayname (the key) into the // results, and force a limit and offset (so the queryParams // will // only be of size 2). Document queryObj = getQueryObjFromQueryParams(queryParams); // Document fieldObj = // getFieldObjFromQueryParams(queryParams); // if (!fieldObj.keySet().isEmpty()) { // fieldObj.put(KEY, "1"); // } MongoCollection<Document> collection = MongoDBFactory.getCollection(instanceName, tableName); FindIterable<Document> cursor = collection.find(queryObj); cursor = cursor.skip(offset).limit(limit); return cursor; } @Override public RaptureNativeQueryResult action(FindIterable<Document> iterable) { RaptureNativeQueryResult res = new RaptureNativeQueryResult(); for (Document d : iterable) { RaptureNativeRow row = new RaptureNativeRow(); row.setName(d.get(KEY).toString()); row.setContent(new JsonContent(d.get(VALUE).toString())); res.addRowContent(row); } return res; } }; return wrapper.doAction(); } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_INTERNAL_ERROR, mongoMsgCatalog.getMessage("Mismatch", repoType)); } }
From source file:rapture.table.mongodb.MongoIndexHandler.java
License:Open Source License
@Override public List<TableRecord> queryTable(final TableQuery querySpec) { MongoRetryWrapper<List<TableRecord>> wrapper = new MongoRetryWrapper<List<TableRecord>>() { @Override/*from w w w.j a v a 2 s .com*/ public FindIterable<Document> makeCursor() { FindIterable<Document> ret; MongoCollection<Document> collection = MongoDBFactory.getCollection(instanceName, tableName); // Convert the query into that for a mongodb collection, then // call find Document query = EpochManager.getNotEqualEpochQueryObject(); if (querySpec.getFieldTests() != null) { for (TableSelect sel : querySpec.getFieldTests()) { switch (sel.getOper()) { case "=": query.append(sel.getFieldName(), sel.getTestValue()); break; case ">": Document gt = new Document(); gt.append("$gt", sel.getTestValue()); query.append(sel.getFieldName(), gt); break; case "<": Document lt = new Document(); lt.append("$lt", sel.getTestValue()); query.append(sel.getFieldName(), lt); break; case "LIKE": query.append(sel.getFieldName(), java.util.regex.Pattern.compile(sel.getTestValue().toString())); break; } } } // That's the query, now determine the return fields... List<String> projection = querySpec.getFieldReturns(); if (projection != null) { projection.add(KEY); } // Now we need to do the query, with a limit and skip applied if // necessary Document sort = new Document(); if (querySpec.getSortFields() != null) { for (TableColumnSort sortField : querySpec.getSortFields()) { sort.put(sortField.getFieldName(), sortField.getAscending() ? 1 : -1); } } ret = collection.find(query).projection(Projections .include((List<String>) ((projection == null) ? Collections.emptyList() : projection))); if (!sort.isEmpty()) { ret = ret.sort(sort); } if (querySpec.getSkip() != 0) { ret = ret.skip(querySpec.getSkip()); } if (querySpec.getLimit() != 0) { ret = ret.limit(querySpec.getLimit()); } return ret; } @Override public List<TableRecord> action(FindIterable<Document> cursor) { List<TableRecord> records = new ArrayList<>(); if (cursor != null) { for (Document obj : cursor) { if (obj != null) { TableRecord rec = new TableRecord(); rec.setKeyName(obj.getString(KEY)); rec.setFields(obj); rec.setContent(obj.toString()); records.add(rec); } } } return records; } }; return wrapper.doAction(); }
From source file:rapture.table.mongodb.MongoIndexHandler.java
License:Open Source License
public TableQueryResult query(final IndexQuery indexQuery) { if (log.isDebugEnabled()) { log.debug("Parsed query " + indexQuery); }// w w w . j ava2 s . c om TableQueryResult res = new TableQueryResult(); final Document mongoQuery = getClause(indexQuery.getWhere()); final MongoCollection<Document> collection = MongoDBFactory.getCollection(instanceName, tableName); List<List<Object>> rows = new ArrayList<>(); List<String> fieldList = indexQuery.getSelect().getFieldList(); // Mongo can't do distinct based on multiple fields for some reason if (!indexQuery.isDistinct()) { // What fields to return final Document fields = new Document(); for (String fieldName : indexQuery.getSelect().getFieldList()) { log.debug("Adding return field " + fieldName); fields.put(fieldName, 1); } res.setColumnNames(indexQuery.getSelect().getFieldList()); fields.put(KEY, 1); MongoRetryWrapper<List<List<Object>>> wrapper = new MongoRetryWrapper<List<List<Object>>>() { @Override public FindIterable<Document> makeCursor() { FindIterable<Document> ret; if (fields.isEmpty()) { ret = collection.find(mongoQuery); } else { fields.put(KEY, 1); ret = collection.find(mongoQuery).projection(fields); } if (indexQuery.getOrderBy().getFieldList().size() > 0) { Document sort = new Document(); for (String field : indexQuery.getOrderBy().getFieldList()) { sort.put(field, indexQuery.getDirection() != OrderDirection.DESC ? 1 : -1); } ret = ret.sort(sort); } int skip = indexQuery.getSkip(); if (skip > 0) { ret = ret.skip(skip); } int limit = indexQuery.getLimit(); if (limit > 0) { // By specifying a negative limit we tell Mongo that it can close the cursor after returning a single batch. ret = ret.limit(-(limit)); } return ret; } @Override public List<List<Object>> action(FindIterable<Document> cursor) { List<List<Object>> rows = new ArrayList<>(); for (Document obj : cursor) { List<Object> row = new ArrayList<>(); for (String field : indexQuery.getSelect().getFieldList()) { row.add(obj.get(field)); } rows.add(row); } return rows; } }; res.setRows(wrapper.doAction()); return res; // We are done. } else if (fieldList.size() > 1) { // What fields to return final Document fields = new Document(); for (String fieldName : indexQuery.getSelect().getFieldList()) { log.debug("Adding return field " + fieldName); fields.put(fieldName, 1); } res.setColumnNames(indexQuery.getSelect().getFieldList()); fields.put(KEY, 1); MongoRetryWrapper<List<List<Object>>> wrapper = new MongoRetryWrapper<List<List<Object>>>() { @Override public FindIterable<Document> makeCursor() { FindIterable<Document> ret; if (fields.isEmpty()) { ret = collection.find(mongoQuery); } else { fields.put(KEY, 1); ret = collection.find(mongoQuery).projection(fields); } if (indexQuery.getOrderBy().getFieldList().size() > 0) { Document sort = new Document(); for (String field : indexQuery.getOrderBy().getFieldList()) { sort.put(field, indexQuery.getDirection() != OrderDirection.DESC ? 1 : -1); } ret = ret.sort(sort); } // We can't apply SKIP and LIMIT here because we must drop the fields that aren't distinct; // Mongo doesn't appear to support distinct on multiple keys return ret; } @Override public List<List<Object>> action(FindIterable<Document> cursor) { int limit = (indexQuery.getSkip()) + (indexQuery.getLimit()); if (limit == 0) limit = Integer.MAX_VALUE; List<List<Object>> rows = new ArrayList<>(); for (Document obj : cursor) { List<Object> row = new ArrayList<>(); for (String field : indexQuery.getSelect().getFieldList()) { row.add(obj.get(field)); } if (indexQuery.isDistinct() && rows.contains(row)) continue; rows.add(row); if (rows.size() > limit) break; } return rows; } }; rows = wrapper.doAction(); // We are not done - still need to apply skip and limit } else { String key = fieldList.get(0); DistinctIterable<String> values = collection.distinct(key, mongoQuery, String.class); for (String v : values) { rows.add(ImmutableList.of(v)); } res.setColumnNames(ImmutableList.of(key)); if (indexQuery.getOrderBy().getFieldList().size() > 0) { List<String> columnNames = indexQuery.getSelect().getFieldList(); Collections.sort(rows, RowComparatorFactory.createComparator(indexQuery.getOrderBy().getFieldList(), columnNames, indexQuery.getDirection())); if (indexQuery.getDirection() == OrderDirection.DESC) { Collections.reverse(rows); } } } int skip = (indexQuery.getSkip()); if (skip < rows.size()) { int limit = indexQuery.getLimit(); if ((limit > 0) && (rows.size() - skip > limit)) { res.setRows(rows.subList(skip, skip + limit)); } else res.setRows(rows); } // else all rows are skipped return res; }
From source file:step.core.accessors.Collection.java
License:Open Source License
public CollectionFind<Document> find(Bson query, SearchOrder order, Integer skip, Integer limit) { // StringBuilder query = new StringBuilder(); // List<Object> parameters = new ArrayList<>(); // if(queryFragments!=null&&queryFragments.size()>0) { // query.append("{$and:["); // Iterator<String> it = queryFragments.iterator(); // while(it.hasNext()) { // String criterium = it.next(); // query.append("{"+criterium+"}"); // if(it.hasNext()) { // query.append(","); // } // }// w ww . j a va 2s. c om // query.append("]}"); // } // StringBuilder sort = new StringBuilder(); // sort.append("{").append(order.getAttributeName()).append(":") // .append(Integer.toString(order.getOrder())).append("}"); long count = collection.count(); CountOptions option = new CountOptions(); option.skip(0).limit(DEFAULT_LIMIT); long countResults = collection.count(query, option); FindIterable<Document> find = collection.find(query); if (order != null) { Document sortDoc = new Document(order.getAttributeName(), order.getOrder()); find.sort(sortDoc); } if (skip != null) { find.skip(skip); } if (limit != null) { find.limit(limit); } return new CollectionFind<Document>(count, countResults, find.iterator()); }