List of usage examples for com.mongodb.client MongoCollection bulkWrite
BulkWriteResult bulkWrite(List<? extends WriteModel<? extends TDocument>> requests);
From source file:com.yahoo.ycsb.db3.MongoDbClient.java
License:Open Source License
/** * Insert a record in the database. Any field/value pairs in the specified * values HashMap will be written into the record with the specified record * key.//from w w w.j ava 2 s.c om * * @param table * The name of the table * @param key * The record key of the record to insert. * @param values * A HashMap of field/value pairs to insert in the record * @return Zero on success, a non-zero error code on error. See the {@link DB} * class's description for a discussion of error codes. */ @Override public Status insert(String table, String key, HashMap<String, ByteIterator> values) { try { MongoCollection<Document> collection = database.getCollection(table); Document toInsert = new Document("_id", key); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { toInsert.put(entry.getKey(), entry.getValue().toArray()); } if (batchSize == 1) { if (useUpsert) { // this is effectively an insert, but using an upsert instead due // to current inability of the framework to clean up after itself // between test runs. collection.replaceOne(new Document("_id", toInsert.get("_id")), toInsert, UPDATE_WITH_UPSERT); } else { collection.insertOne(toInsert); } } else { bulkInserts.add(toInsert); if (bulkInserts.size() == batchSize) { if (useUpsert) { List<UpdateOneModel<Document>> updates = new ArrayList<UpdateOneModel<Document>>( bulkInserts.size()); for (Document doc : bulkInserts) { updates.add(new UpdateOneModel<Document>(new Document("_id", doc.get("_id")), doc, UPDATE_WITH_UPSERT)); } collection.bulkWrite(updates); } else { collection.insertMany(bulkInserts, INSERT_UNORDERED); } bulkInserts.clear(); } else { return OptionsSupport.BATCHED_OK; } } return Status.OK; } catch (Exception e) { System.err.println("Exception while trying bulk insert with " + bulkInserts.size()); e.printStackTrace(); return Status.ERROR; } }
From source file:examples.tour.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *///from w ww. ja v a 2 s . c o m public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = new MongoClient(); } else { mongoClient = new MongoClient(new MongoClientURI(args[0])); } // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Aggregation collection .aggregate( asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}")))) .forEach(printBlock); myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), set("i", 110)); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100)); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); //collection.find().forEach(printBlock); // Clean up database.drop(); // release resources mongoClient.close(); }
From source file:mongodb.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *///from w w w . j av a2s. co m public static void main(final String[] args) { //represents a pool of connections to the database MongoClient mongoClient = new MongoClient("10.9.17.105", 27017); // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("test"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); collection.find().forEach(printBlock); // Clean up // database.drop(); // release resources mongoClient.close(); }
From source file:mongoSample.MongoSample.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args//w w w. j a va 2 s . c o m * takes an optional single argument for the connection string */ public static void main(final String[] args) { String mongoServer = args[0]; MongoClient mongoClient = new MongoClient(mongoServer); MongoDatabase database = mongoClient.getDatabase("sakila"); MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest // earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can // explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); // collection.find().forEach(printBlock); // Clean up //database.drop(); // release resources mongoClient.close(); }
From source file:org.netbeans.modules.mongodb.util.Importer.java
License:Open Source License
@SuppressWarnings("unchecked") private void importFrom(Reader reader) throws IOException { final MongoCollection<BsonDocument> collection = db.getCollection(properties.getCollection(), BsonDocument.class); final BufferedReader br = new BufferedReader(reader); String line;//from w w w . j ava2 s .com List<WriteModel<BsonDocument>> requests = new ArrayList<>(); while ((line = br.readLine()) != null) { if (Thread.interrupted()) { return; } line = line.trim(); if (line.isEmpty()) { continue; } if (line.charAt(0) == '[') { List<BsonDocument> array = (List<BsonDocument>) Bsons.fromJson(line, BsonType.ARRAY); for (BsonDocument document : array) { requests.add(new InsertOneModel<>(document)); } } else { requests.add(new InsertOneModel<>(BsonDocument.parse(line))); } } if (requests.isEmpty() == false) { collection.bulkWrite(requests); } }
From source file:org.restheart.db.DAOUtils.java
License:Open Source License
public static BulkOperationResult bulkUpsertDocuments(MongoCollection<BsonDocument> coll, final BsonArray documents, BsonDocument shardKeys) { Objects.requireNonNull(coll); Objects.requireNonNull(documents); ObjectId newEtag = new ObjectId(); List<WriteModel<BsonDocument>> wm = getBulkWriteModel(coll, documents, shardKeys, newEtag); BulkWriteResult result = coll.bulkWrite(wm); return new BulkOperationResult(HttpStatus.SC_OK, newEtag, result); }
From source file:org.restheart.db.DocumentDAO.java
License:Open Source License
@Override public BulkOperationResult bulkDeleteDocuments(String dbName, String collName, BsonDocument filter, BsonDocument shardedKeys) {//from w ww . j av a2 s . co m MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<BsonDocument> mcoll = mdb.getCollection(collName, BsonDocument.class); List<WriteModel<BsonDocument>> deletes = new ArrayList<>(); Bson _filter; if (shardedKeys != null) { _filter = and(filter, shardedKeys); } else { _filter = filter; } deletes.add(new DeleteManyModel<>(_filter)); BulkWriteResult result = mcoll.bulkWrite(deletes); return new BulkOperationResult(HttpStatus.SC_OK, null, result); }
From source file:org.restheart.db.DocumentDAO.java
License:Open Source License
@Override public BulkOperationResult bulkPatchDocuments(String dbName, String collName, BsonDocument filter, BsonDocument shardedKeys, BsonDocument data) { MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<BsonDocument> mcoll = mdb.getCollection(collName, BsonDocument.class); List<WriteModel<BsonDocument>> patches = new ArrayList<>(); Bson _filter;/*from w ww. j a va 2s. c o m*/ if (shardedKeys != null) { _filter = and(filter, shardedKeys); } else { _filter = filter; } patches.add(new UpdateManyModel<>(_filter, DAOUtils.getUpdateDocument(data), DAOUtils.U_NOT_UPSERT_OPS)); BulkWriteResult result = mcoll.bulkWrite(patches); return new BulkOperationResult(HttpStatus.SC_OK, null, result); }
From source file:tour.NewQuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *//*ww w . ja v a 2s . c om*/ public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = new MongoClient(); } else { mongoClient = new MongoClient(new MongoClientURI(args[0])); } // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb"); database.drop(); // get a list of the collections in this database and print them out List<String> collectionNames = database.listCollectionNames().into(new ArrayList<String>()); for (final String s : collectionNames) { System.out.println(s); } // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // max time collection.find().maxTime(1, TimeUnit.SECONDS).first(); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); // getting a list of databases for (String name : mongoClient.listDatabaseNames()) { System.out.println(name); } // drop a database mongoClient.dropDatabase("databaseToBeDropped"); // create a collection database.createCollection("cappedCollection", new CreateCollectionOptions().capped(true).sizeInBytes(0x100000)); for (String name : database.listCollectionNames()) { System.out.println(name); } // create an ascending index on the "i" field collection.createIndex(new Document("i", 1)); // list the indexes on the collection for (final Document index : collection.listIndexes()) { System.out.println(index); } // create a text index on the "content" field collection.createIndex(new Document("content", "text")); collection.insertOne(new Document("_id", 0).append("content", "textual content")); collection.insertOne(new Document("_id", 1).append("content", "additional content")); collection.insertOne(new Document("_id", 2).append("content", "irrelevant content")); // Find using the text index Document search = new Document("$search", "textual content -irrelevant"); Document textSearch = new Document("$text", search); long matchCount = collection.count(textSearch); System.out.println("Text search matches: " + matchCount); // Find using the $language operator textSearch = new Document("$text", search.append("$language", "english")); matchCount = collection.count(textSearch); System.out.println("Text search matches (english): " + matchCount); // Find the highest scoring match Document projection = new Document("score", new Document("$meta", "textScore")); myDoc = collection.find(textSearch).projection(projection).first(); System.out.println("Highest scoring document: " + myDoc); // release resources mongoClient.close(); }