List of usage examples for com.mongodb.client MongoCollection updateMany
UpdateResult updateMany(Bson filter, List<? extends Bson> update);
From source file:com.redhat.thermostat.gateway.common.mongodb.executor.MongoExecutor.java
License:Open Source License
public MongoDataResultContainer execPutRequest(MongoCollection<Document> collection, String body, List<String> queries, Set<String> realms) { Document inputDocument = Document.parse(body); MongoDataResultContainer metaDataContainer = new MongoDataResultContainer(); Document setDocument = inputDocument.get("set", Document.class); setDocument.remove(KeycloakFields.REALMS_KEY); final Bson fields = new Document("$set", setDocument); final Bson bsonQueries = MongoRequestFilters.buildQuery(queries, realms); collection.updateMany(bsonQueries, fields); metaDataContainer.setPutReqMatches(collection.count(bsonQueries)); return metaDataContainer; }
From source file:com.redhat.thermostat.gateway.common.mongodb.MongoStorageHandler.java
License:Open Source License
public void updateOneSystemObject(MongoCollection<Document> collection, final String systemId, String queries, String body) {//from w w w . j av a 2 s. c o m Bson sysQuery = buildEq(ThermostatFields.SYSTEM_ID, systemId); Bson query = buildAnd(sysQuery, MongoRequestFilters.buildQueriesFilter(queries)); BasicDBObject inputObject = (BasicDBObject) JSON.parse(body); BasicDBObject setObject = (BasicDBObject) inputObject.get(SET_FIELD_NAME); if (setObject.containsField(ThermostatFields.SYSTEM_ID)) { throw new UnsupportedOperationException( "Updating " + ThermostatFields.SYSTEM_ID + " field is not allowed"); } final Bson fields = new Document("$set", setObject); collection.updateMany(query, fields); }
From source file:com.redhat.thermostat.gateway.common.mongodb.MongoStorageHandler.java
License:Open Source License
public void updateOneJvmObject(MongoCollection<Document> collection, final String systemId, final String jvmId, String queries, String body) { Bson sysQuery = buildEq(ThermostatFields.SYSTEM_ID, systemId); Bson jvmQuery = buildEq(ThermostatFields.JVM_ID, jvmId); Bson query = buildAnd(buildAnd(sysQuery, jvmQuery), MongoRequestFilters.buildQueriesFilter(queries)); BasicDBObject inputObject = (BasicDBObject) JSON.parse(body); BasicDBObject setObject = (BasicDBObject) inputObject.get(SET_FIELD_NAME); if (setObject.containsField(ThermostatFields.SYSTEM_ID)) { throw new UnsupportedOperationException( "Updating " + ThermostatFields.SYSTEM_ID + " field is not allowed"); }/*from w w w . java 2 s.com*/ if (setObject.containsField(ThermostatFields.JVM_ID)) { throw new UnsupportedOperationException( "Updating " + ThermostatFields.JVM_ID + " field is not allowed"); } final Bson fields = new Document("$set", setObject); collection.updateMany(query, fields); }
From source file:com.redhat.thermostat.gateway.service.jvms.mongo.JvmInfoMongoStorageHandler.java
License:Open Source License
public void updateTimestamps(MongoCollection<Document> collection, String body, String systemId, Long timeStamp) {//w w w. j av a2 s. com final Bson filter; if (body != null && body.length() > 0) { List<String> jvms = (List<String>) JSON.parse(body); List<Bson> jvmFilters = new ArrayList<>(); for (String id : jvms) { jvmFilters.add(eq(StorageFields.JVM_ID, id)); } filter = and(eq(StorageFields.SYSTEM_ID, systemId), or(jvmFilters)); } else { filter = eq(StorageFields.SYSTEM_ID, systemId); } final Bson lastUpdated = new Document(StorageFields.LAST_UPDATED, timeStamp); final Bson update = new Document(SET_KEY, lastUpdated); collection.updateMany(filter, update); }
From source file:examples.tour.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *//*www . jav a 2s . c o m*/ public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = new MongoClient(); } else { mongoClient = new MongoClient(new MongoClientURI(args[0])); } // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Aggregation collection .aggregate( asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}")))) .forEach(printBlock); myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), set("i", 110)); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100)); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); //collection.find().forEach(printBlock); // Clean up database.drop(); // release resources mongoClient.close(); }
From source file:mongodb.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *///from ww w .j a va2 s . c o m public static void main(final String[] args) { //represents a pool of connections to the database MongoClient mongoClient = new MongoClient("10.9.17.105", 27017); // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("test"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); collection.find().forEach(printBlock); // Clean up // database.drop(); // release resources mongoClient.close(); }
From source file:mongoSample.MongoSample.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args//from w w w. ja v a2s .c om * takes an optional single argument for the connection string */ public static void main(final String[] args) { String mongoServer = args[0]; MongoClient mongoClient = new MongoClient(mongoServer); MongoDatabase database = mongoClient.getDatabase("sakila"); MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest // earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc.toJson()); // now, lets add lots of little documents to the collection so we can // explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // find first myDoc = collection.find().first(); System.out.println(myDoc); System.out.println(myDoc.toJson()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur.toJson()); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next().toJson()); } } finally { cursor.close(); } // Query Filters myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc.toJson()); // now use a range query to get a larger subset Block<Document> printBlock = new Block<Document>() { @Override public void apply(final Document document) { System.out.println(document.toJson()); } }; collection.find(gt("i", 50)).forEach(printBlock); // filter where; 50 < i <= 100 collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock); // Sorting myDoc = collection.find(exists("i")).sort(descending("i")).first(); System.out.println(myDoc.toJson()); // Projection myDoc = collection.find().projection(excludeId()).first(); System.out.println(myDoc.toJson()); // Update One collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110))); // Update Many UpdateResult updateResult = collection.updateMany(lt("i", 100), new Document("$inc", new Document("i", 100))); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("i", 110)); // Delete Many DeleteResult deleteResult = collection.deleteMany(gte("i", 100)); System.out.println(deleteResult.getDeletedCount()); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); // collection.find().forEach(printBlock); // Clean up //database.drop(); // release resources mongoClient.close(); }
From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java
protected UpdateResult performSingleUpdate(Document query, Document updateDocument, ProcessContext context, ProcessSession session) {//from ww w. ja v a 2s. c o m final ProcessorLog logger = getLogger(); StopWatch watch = new StopWatch(true); final String mode = context.getProperty(MODE).getValue(); final WriteConcern writeConcern = getWriteConcern(context); final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern); UpdateResult result = null; if (!updateDocument.isEmpty()) { watch.start(); // logger.info("Running Mongo Update with query: " + query + " and document: " + updateDocument); switch (mode) { case MODE_SINGLE: result = collection.updateOne(query, updateDocument); break; case MODE_MANY: result = collection.updateMany(query, updateDocument); break; } watch.stop(); logger.info("Running Mongo Update with query: " + query + " and document: " + updateDocument + " took " + watch.getDuration(TimeUnit.MILLISECONDS) + "ms"); return result; } else { //nothing to do return null; } }
From source file:net.springfieldusa.storage.mongodb.comp.MongoStorageComponent.java
License:Open Source License
@Override public <T extends EntityObject> long update(String collection, String query, T data) { Document jsonQuery = Document.parse(query); MongoCollection<Document> mongoCollection = getCollection(collection); Document document = new Document(data.getAttributes()); document.put(ID, data.getId());// ww w . ja v a2 s . c o m document.put(META, data.getMeta()); document.put(RELATIONSHIPS, createRelationships(data)); UpdateResult result = mongoCollection.updateMany(jsonQuery, document); return result.getMatchedCount(); }
From source file:org.kaaproject.kaa.server.datamigration.CtlNotificationMigration.java
License:Apache License
@Override protected List<Schema> transform() throws SQLException { List<Schema> res = super.transform(); if (nosql.equals(Options.DEFAULT_NO_SQL)) { MongoDatabase database = client.getDatabase(dbName); MongoCollection<Document> notification = database.getCollection("notification"); MongoCollection<Document> enpNotification = database.getCollection("endpoint_notification"); FindIterable<Document> cursor = notification.find(); for (Document document : cursor) { Object id = document.get("_id"); Long schemaId = parseLong((String) document.get("notification_schema_id")); notification.updateMany(Filters.eq("_id", id), Filters.eq("$set", Filters.eq("notification_schema_id", schemaId + idShift))); }//from w w w . j ava2 s.co m cursor = enpNotification.find(); for (Document document : cursor) { Object id = document.get("_id"); Long schemaId = parseLong((String) document.get("notification.notification_schema_id")); notification.updateMany(Filters.eq("_id", id), Filters.eq("$set", Filters.eq("notification.notification_schema_id", schemaId + idShift))); } } else { Session session = cluster.connect(dbName); BatchStatement batchStatement = new BatchStatement(); //notification ResultSet results = session.execute(select().from("notification")); for (Row row : results) { String id = row.getString("nf_id"); Long schemaId = parseLong(row.getString("schema_id")); String[] ids = id.split("::"); batchStatement.add(update("notification").with(set("schema_id", String.valueOf(schemaId + idShift))) .where(eq("topic_id", ids[0])).and(eq("nf_type", ids[1])) .and(eq("nf_version", Integer.valueOf(ids[2]))) .and(eq("seq_num", Integer.valueOf(ids[3])))); } //endpoint notification results = session.execute(select().from("ep_nfs")); for (Row row : results) { String id = row.getString("nf_id"); Long schemaId = parseLong(row.getString("schema_id")); String[] ids = id.split("::"); ByteBuffer epKeyHash = Bytes.fromHexString(ids[0]); Date lastModTime = new Date(Long.valueOf(ids[1])); batchStatement.add(update("ep_nfs").with(set("schema_id", String.valueOf(schemaId + idShift))) .where(eq("ep_key_hash", epKeyHash)).and(eq("last_mod_time", lastModTime))); } session.execute(batchStatement); session.close(); cluster.close(); } return res; }