List of usage examples for com.mongodb.client MongoCollection find
FindIterable<TDocument> find(ClientSession clientSession);
From source file:if4031.ServerHandler.java
@Override public boolean isChannelExist(String channel) throws TException { MongoCollection<Document> channelCollection = database.getCollection("Channel"); return channelCollection.find(eq("name", channel)).first() != null; }
From source file:info.bunji.mongodb.synces.CollectionExtractor.java
License:Apache License
@Override protected void execute() throws Exception { Set<String> includeFields = config.getIncludeFields(); Set<String> excludeFields = config.getExcludeFields(); String index = config.getDestDbName(); String syncName = config.getSyncName(); // ???/* w w w. j a v a 2 s .c o m*/ try (MongoClient client = MongoClientService.getClient(config)) { /* if (timestamp == null) { logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName()); // ??oplog??????? //BsonTimestamp lastOpTs = config.getLastOpTime(); //logger.debug("[{}] current oplog timestamp = [{}]", syncName, lastOpTs.toString()); // ?????? MongoDatabase db = client.getDatabase(config.getMongoDbName()); // ???? Object lastId = null; for (String collection : getTargetColectionList(db)) { logger.info("[{}] start initial import. [{}]", syncName, collection); MongoCollection<Document> conn = db.getCollection(collection); BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId); long count = conn.count(filter); // get document from collection long processed = 0; FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1)); for (Document doc : results) { Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields); append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null)); if ((++processed % LOGGING_INTERVAL) == 0) { logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count); } } logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed); } } */ //===================== logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName()); // ?????? MongoDatabase db = client.getDatabase(config.getMongoDbName()); // ???? Object lastId = null; for (String collection : getTargetColectionList(db)) { logger.info("[{}] start initial import. [{}]", syncName, collection); MongoCollection<Document> conn = db.getCollection(collection); BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId); long count = conn.count(filter); long processed = 0; FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1)); for (Document doc : results) { Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields); append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null)); if ((++processed % LOGGING_INTERVAL) == 0) { logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count); } } logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed); } //===================== logger.info("[{}] finish import collection(s).", syncName); // append(DocumentUtils.makeStatusOperation(Status.RUNNING, config, timestamp)); config.setStatus(Status.RUNNING); config.setLastOpTime(timestamp); // config.setLastSyncTime(timestamp); // append(DocumentUtils.makeStatusOperation(config)); append(SyncOperation.fromConfig(config)); } catch (Throwable t) { config.setStatus(Status.INITIAL_IMPORT_FAILED); logger.error("[{}] initial import failed.({})", syncName, t.getMessage(), t); throw t; } }
From source file:io.debezium.connector.mongodb.Replicator.java
License:Apache License
/** * Use the given primary to read the oplog. * //from w ww . j a v a 2 s . c om * @param primary the connection to the replica set's primary node; may not be null */ protected void readOplog(MongoClient primary) { BsonTimestamp oplogStart = source.lastOffsetTimestamp(replicaSet.replicaSetName()); logger.info("Reading oplog for '{}' primary {} starting at {}", replicaSet, primary.getAddress(), oplogStart); // Include none of the cluster-internal operations and only those events since the previous timestamp ... MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = oplog.find(filter).sort(new Document("$natural", 1)) // force forwards collection scan .oplogReplay(true) // tells Mongo to not rely on indexes .noCursorTimeout(true) // don't timeout waiting for events .cursorType(CursorType.TailableAwait); // tail and await new data // Read as much of the oplog as we can ... ServerAddress primaryAddress = primary.getAddress(); try (MongoCursor<Document> cursor = results.iterator()) { while (running.get() && cursor.hasNext()) { if (!handleOplogEvent(primaryAddress, cursor.next())) { // Something happened, and we're supposed to stop reading return; } } } }
From source file:io.lumeer.storage.mongodb.dao.collection.MongoDataDao.java
License:Open Source License
@Override public List<DataDocument> getData(final String collectionId, final SearchQuery query) { final MongoCollection<Document> collection = dataCollection(collectionId); final FindIterable<Document> mongoIterable = collection.find(createFilter(collectionId, query)); addPaginationToSuggestionQuery(mongoIterable, query); final List<DataDocument> foundDocuments = MongoUtils.convertIterableToList(mongoIterable); return foundDocuments; }
From source file:io.lumeer.storage.mongodb.MongoDbStorage.java
License:Open Source License
@Override public List<DataDocument> search(String collectionName, DataFilter filter, final DataSort sort, List<String> attributes, final int skip, int limit) { MongoCollection<Document> collection = database.getCollection(collectionName); FindIterable<Document> documents = filter != null ? collection.find(filter.<Bson>get()) : collection.find(); if (sort != null) { documents = documents.sort(sort.<Bson>get()); }/*from w ww. j a v a2 s. c o m*/ if (attributes != null && !attributes.isEmpty()) { documents.projection(Projections.fields(Projections.include(attributes))); } if (skip > 0) { documents = documents.skip(skip); } if (limit > 0) { documents = documents.limit(limit); } return MongoUtils.convertIterableToList(documents); }
From source file:it.terrinoni.hw2.Homework.java
public static void main(String[] args) { MongoClient client = new MongoClient(); MongoDatabase database = client.getDatabase("students"); MongoCollection<Document> collection = database.getCollection("grades"); Bson filter = eq("type", "homework"); Bson sort = ascending(asList("student_id", "score")); MongoCursor<Document> cursor = collection.find(filter).sort(sort).iterator(); double last_student_id = -1; try {/*w ww .j a v a 2 s .c o m*/ while (cursor.hasNext()) { Document doc = cursor.next(); if (doc.getDouble("student_id") != last_student_id) { last_student_id = doc.getDouble("student_id"); collection.deleteOne(doc); System.out.println("Document for " + last_student_id + " with score " + String.valueOf(doc.getDouble("score")) + " eliminated"); } Helpers.printJson(doc); } } finally { cursor.close(); } }
From source file:javamongo.TweetMethod.java
public void expand_tweet(String username) { if (getUser(username)) { collection = db.getCollection("userline"); try ( // find all document MongoCursor<Document> cursor = collection.find(eq("username", username)).iterator()) { while (cursor.hasNext()) { String tweet_id = cursor.next().get("tweet_id").toString(); MongoCollection<Document> timeline = db.getCollection("tweets"); try (MongoCursor<Document> cursorTimeline = timeline.find(eq("tweet_id", tweet_id)) .iterator()) {//from w ww. ja v a 2 s. c o m while (cursorTimeline.hasNext()) { System.out.println(cursorTimeline.next().get("body")); } } } } } else { System.out.println("Username doesn't exist"); } }
From source file:javamongo.TweetMethod.java
public void timeline() { collection = db.getCollection("timeline"); try ( // find all document MongoCursor<Document> cursor = collection.find(eq("username", username)).iterator()) { while (cursor.hasNext()) { String tweet_id = cursor.next().get("tweet_id").toString(); MongoCollection<Document> timeline = db.getCollection("tweets"); try (MongoCursor<Document> cursorTimeline = timeline.find(eq("tweet_id", tweet_id)).iterator()) { while (cursorTimeline.hasNext()) { System.out.println(cursorTimeline.next().get("body")); }//from w w w . j a v a 2 s . co m } } } }
From source file:javamongo.TweetMethod.java
public void tweet(String text) { String uuid = UUID.randomUUID().toString(); // UUID timeuuid = UUIDs.timeBased(); // insert into tweets MongoCollection<Document> collectionTweets = db.getCollection("tweets"); Document docTweets = new Document("tweet_id", uuid).append("username", username).append("body", text); collectionTweets.insertOne(docTweets); // insert into userline MongoCollection<Document> collectionUserline = db.getCollection("userline"); Document docUserline = new Document("username", username).append("tweet_id", uuid); collectionUserline.insertOne(docUserline); // insert into timeline MongoCollection<Document> collectionTimeline = db.getCollection("timeline"); Document docTimeline = new Document("username", username).append("tweet_id", uuid); collectionTimeline.insertOne(docTimeline); // insert to all follower's timeline MongoCollection<Document> collectionFollower = db.getCollection("followers"); try ( // find all document MongoCursor<Document> cursor = collectionFollower.find(eq("username", username)).iterator()) { while (cursor.hasNext()) { String follower = cursor.next().getString("followers"); MongoCollection<Document> followerTimeline = db.getCollection("timeline"); Document docFollower = new Document("username", follower).append("tweet_id", uuid); followerTimeline.insertOne(docFollower); }// ww w .j a v a 2 s . c o m } }
From source file:joliex.mongodb.MongoDbConnector.java
@RequestResponse public Value find(Value request) throws FaultException { Value v = Value.create();//w w w . ja v a2 s . c o m FindIterable<BsonDocument> iterable = null; ; try { String collectionName = request.getFirstChild("collection").strValue(); MongoCollection<BsonDocument> collection = db.getCollection(collectionName, BsonDocument.class); if (request.hasChildren("readConcern")) { ReadConcern readConcern = new ReadConcern( ReadConcernLevel.fromString(request.getFirstChild("readConcern").strValue())); collection.withReadConcern(readConcern); } if (request.hasChildren("filter")) { BsonDocument bsonQueryDocument = BsonDocument.parse(request.getFirstChild("filter").strValue()); prepareBsonQueryData(bsonQueryDocument, request.getFirstChild("filter")); printlnJson("Query filter", bsonQueryDocument); if (request.hasChildren("sort") && request.hasChildren("limit")) { BsonDocument bsonSortDocument = BsonDocument.parse(request.getFirstChild("sort").strValue()); prepareBsonQueryData(bsonSortDocument, request.getFirstChild("sort")); printlnJson("Query sort", bsonSortDocument); int limitQuery = request.getFirstChild("limit").intValue(); iterable = collection.find(bsonQueryDocument).sort(bsonSortDocument).limit(limitQuery); } if (request.hasChildren("sort") && request.hasChildren("limit") && request.hasChildren("skip")) { BsonDocument bsonSortDocument = BsonDocument.parse(request.getFirstChild("sort").strValue()); prepareBsonQueryData(bsonSortDocument, request.getFirstChild("sort")); printlnJson("Query sort", bsonSortDocument); int limitQuery = request.getFirstChild("limit").intValue(); int skipPosition = request.getFirstChild("skip").intValue(); iterable = collection.find(bsonQueryDocument).sort(bsonSortDocument).limit(limitQuery) .skip(skipPosition); } if (request.hasChildren("sort") && !request.hasChildren("limit")) { BsonDocument bsonSortDocument = BsonDocument.parse(request.getFirstChild("sort").strValue()); prepareBsonQueryData(bsonSortDocument, request.getFirstChild("sort")); printlnJson("Query sort", bsonSortDocument); iterable = collection.find(bsonQueryDocument).sort(bsonSortDocument); } if (!request.hasChildren("sort") && request.hasChildren("limit")) { int limitQuery = request.getFirstChild("limit").intValue(); iterable = collection.find(bsonQueryDocument).limit(limitQuery); } if (!request.hasChildren("sort") && !request.hasChildren("limit")) { iterable = collection.find(bsonQueryDocument); } } else { if (request.hasChildren("sort") && request.hasChildren("limit")) { BsonDocument bsonSortDocument = BsonDocument.parse(request.getFirstChild("sort").strValue()); prepareBsonQueryData(bsonSortDocument, request.getFirstChild("sort")); printlnJson("Query sort", bsonSortDocument); int findLimit = request.getFirstChild("limit").intValue(); iterable = collection.find(new Document()).sort(bsonSortDocument); ///.sort(bsonSortDocument).limit(limitQuery); } if (request.hasChildren("sort") && !request.hasChildren("limit")) { BsonDocument bsonSortDocument = BsonDocument.parse(request.getFirstChild("sort").strValue()); prepareBsonQueryData(bsonSortDocument, request.getFirstChild("sort")); printlnJson("Query sort", bsonSortDocument); iterable = collection.find(new Document()).sort(bsonSortDocument); } if (!request.hasChildren("sort") && request.hasChildren("limit")) { int limitQuery = request.getFirstChild("limit").intValue(); iterable = collection.find(new Document()).limit(limitQuery); } if (!request.hasChildren("sort") && !request.hasChildren("limit")) { iterable = collection.find(); } } iterable.forEach(new Block<BsonDocument>() { @Override public void apply(BsonDocument t) { Value queryValue = processQueryRow(t); printlnJson("Query document", t); v.getChildren("document").add(queryValue); } }); showLog(); } catch (MongoException ex) { showLog(); throw new FaultException("MongoException", ex); } catch (JsonParseException ex) { showLog(); throw new FaultException("JsonParseException", ex); } return v; }