List of usage examples for com.mongodb.client MongoDatabase drop
void drop();
From source file:org.kantega.respiro.mongodb.DroppingMongoDBPlugin.java
License:Apache License
public DroppingMongoDBPlugin() { this.modifier = dbp -> { logger.warn(" ******** Attention please *********"); logger.warn(/*w w w.j a va 2 s. c om*/ " You are using the respoiro-mongodb-test-driver-plugin, and you have just modified the MongoDatabase provider to be used for testing. Databases will always be dropped before they are handed over to you."); logger.warn( " If you are using this in production, switch to the respiro-mongodb-plugin if you want your data to survive server restarts."); logger.warn(" "); return dbname -> { MongoDatabase db = dbp.getDatabase(dbname); db.drop(); return db; }; }; }
From source file:org.restheart.db.DatabaseImpl.java
License:Open Source License
/** * * @param cs the client session/*w w w . java 2 s . c om*/ * @param dbName * @param requestEtag * @return */ @Override public OperationResult deleteDatabase(final ClientSession cs, final String dbName, final String requestEtag, final boolean checkEtag) { MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<Document> mcoll = mdb.getCollection(META_COLLNAME); if (checkEtag) { var query = eq("_id", DB_META_DOCID); Document properties = cs == null ? mcoll.find(query).projection(FIELDS_TO_RETURN).first() : mcoll.find(cs, query).projection(FIELDS_TO_RETURN).first(); if (properties != null) { Object oldEtag = properties.get("_etag"); if (oldEtag != null) { if (requestEtag == null) { return new OperationResult(HttpStatus.SC_CONFLICT, oldEtag); } else if (!Objects.equals(oldEtag.toString(), requestEtag)) { return new OperationResult(HttpStatus.SC_PRECONDITION_FAILED, oldEtag); } } } } if (cs == null) { mdb.drop(); } else { mdb.drop(cs); } return new OperationResult(HttpStatus.SC_NO_CONTENT); }
From source file:org.restheart.db.DbsDAO.java
License:Open Source License
/** * * @param dbName//from w ww.j a v a 2s. co m * @param requestEtag * @return */ @Override public OperationResult deleteDatabase(final String dbName, final String requestEtag, final boolean checkEtag) { MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<Document> mcoll = mdb.getCollection("_properties"); if (checkEtag) { Document properties = mcoll.find(eq("_id", "_properties")).projection(FIELDS_TO_RETURN).first(); if (properties != null) { Object oldEtag = properties.get("_etag"); if (oldEtag != null) { if (requestEtag == null) { return new OperationResult(HttpStatus.SC_CONFLICT, oldEtag); } else if (!Objects.equals(oldEtag.toString(), requestEtag)) { return new OperationResult(HttpStatus.SC_PRECONDITION_FAILED, oldEtag); } } } } mdb.drop(); return new OperationResult(HttpStatus.SC_NO_CONTENT); }
From source file:tour.JavaIntroduction.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string * @throws InterruptedException if a latch is interrupted */// w w w.j a va2s . co m public static void main(final String[] args) throws InterruptedException { JavaSparkContext jsc = createJavaSparkContext(args); // Create a RDD JavaRDD<Document> documents = jsc.parallelize(asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) .map(new Function<Integer, Document>() { @Override public Document call(final Integer i) throws Exception { return Document.parse("{test: " + i + "}"); } }); // Saving data from an RDD to MongoDB MongoSpark.save(documents); // Saving data with a custom WriteConfig Map<String, String> writeOverrides = new HashMap<String, String>(); writeOverrides.put("collection", "spark"); writeOverrides.put("writeConcern.w", "majority"); WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeOverrides); JavaRDD<Document> sparkDocuments = jsc.parallelize(asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) .map(new Function<Integer, Document>() { @Override public Document call(final Integer i) throws Exception { return Document.parse("{spark: " + i + "}"); } }); // Saving data from an RDD to MongoDB MongoSpark.save(sparkDocuments, writeConfig); // Loading and analyzing data from MongoDB JavaMongoRDD<Document> rdd = MongoSpark.load(jsc); System.out.println(rdd.count()); System.out.println(rdd.first().toJson()); // Loading data with a custom ReadConfig Map<String, String> readOverrides = new HashMap<String, String>(); readOverrides.put("collection", "spark"); readOverrides.put("readPreference.name", "secondaryPreferred"); ReadConfig readConfig = ReadConfig.create(jsc).withOptions(readOverrides); JavaMongoRDD<Document> customRdd = MongoSpark.load(jsc, readConfig); System.out.println(customRdd.count()); System.out.println(customRdd.first().toJson()); // Filtering an rdd using an aggregation pipeline before passing data to Spark JavaMongoRDD<Document> aggregatedRdd = rdd .withPipeline(singletonList(Document.parse("{ $match: { test : { $gt : 5 } } }"))); System.out.println(aggregatedRdd.count()); System.out.println(aggregatedRdd.first().toJson()); // Datasets // Drop database dropDatabase(getMongoClientURI(args)); // Add Sample Data List<String> characters = asList("{'name': 'Bilbo Baggins', 'age': 50}", "{'name': 'Gandalf', 'age': 1000}", "{'name': 'Thorin', 'age': 195}", "{'name': 'Balin', 'age': 178}", "{'name': 'Kli', 'age': 77}", "{'name': 'Dwalin', 'age': 169}", "{'name': 'in', 'age': 167}", "{'name': 'Glin', 'age': 158}", "{'name': 'Fli', 'age': 82}", "{'name': 'Bombur'}"); MongoSpark.save(jsc.parallelize(characters).map(new Function<String, Document>() { @Override public Document call(final String json) throws Exception { return Document.parse(json); } })); // Load inferring schema Dataset<Row> df = MongoSpark.load(jsc).toDF(); df.printSchema(); df.show(); // Declare the Schema via a Java Bean SparkSession sparkSession = SparkSession.builder().getOrCreate(); Dataset<Row> explicitDF = MongoSpark.load(jsc).toDF(Character.class); explicitDF.printSchema(); // SQL explicitDF.registerTempTable("characters"); Dataset<Row> centenarians = sparkSession.sql("SELECT name, age FROM characters WHERE age >= 100"); // Saving DataFrame MongoSpark.write(centenarians).option("collection", "hundredClub").save(); MongoSpark.load(sparkSession, ReadConfig.create(sparkSession).withOption("collection", "hundredClub"), Character.class).show(); // Drop database MongoConnector.apply(jsc.sc()).withDatabaseDo(ReadConfig.create(sparkSession), new Function<MongoDatabase, Void>() { @Override public Void call(final MongoDatabase db) throws Exception { db.drop(); return null; } }); String objectId = "123400000000000000000000"; List<Document> docs = asList(new Document("_id", new ObjectId(objectId)).append("a", 1), new Document("_id", new ObjectId()).append("a", 2)); MongoSpark.save(jsc.parallelize(docs)); // Set the schema using the ObjectId helper StructType schema = DataTypes.createStructType(asList(StructFields.objectId("_id", false), DataTypes.createStructField("a", DataTypes.IntegerType, false))); // Create a dataframe with the helper functions registered df = MongoSpark.read(sparkSession).schema(schema).option("registerSQLHelperFunctions", "true").load(); // Query using the ObjectId string df.filter(format("_id = ObjectId('%s')", objectId)).show(); }
From source file:tour.NewQuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *///from w w w. j a va 2 s . c om public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = new MongoClient(); } else { mongoClient = new MongoClient(new MongoClientURI(args[0])); } // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb"); database.drop(); // get a list of the collections in this database and print them out List<String> collectionNames = database.listCollectionNames().into(new ArrayList<String>()); for (final String s : collectionNames) { System.out.println(s); } // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); // drop all the data in it collection.drop(); // make a document and insert it Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102)); collection.insertOne(doc); // get it (since it's the only one in there since we dropped the rest earlier on) Document myDoc = collection.find().first(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors List<Document> documents = new ArrayList<Document>(); for (int i = 0; i < 100; i++) { documents.add(new Document("i", i)); } collection.insertMany(documents); System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + collection.count()); // lets get all the documents in the collection and print them out MongoCursor<Document> cursor = collection.find().iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } for (Document cur : collection.find()) { System.out.println(cur); } // now use a query to get 1 document out myDoc = collection.find(eq("i", 71)).first(); System.out.println(myDoc); // now use a range query to get a larger subset cursor = collection.find(gt("i", 50)).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // max time collection.find().maxTime(1, TimeUnit.SECONDS).first(); collection.drop(); // ordered bulk writes List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>(); writes.add(new InsertOneModel<Document>(new Document("_id", 4))); writes.add(new InsertOneModel<Document>(new Document("_id", 5))); writes.add(new InsertOneModel<Document>(new Document("_id", 6))); writes.add( new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2)))); writes.add(new DeleteOneModel<Document>(new Document("_id", 2))); writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4))); collection.bulkWrite(writes); collection.drop(); collection.bulkWrite(writes, new BulkWriteOptions().ordered(false)); // getting a list of databases for (String name : mongoClient.listDatabaseNames()) { System.out.println(name); } // drop a database mongoClient.dropDatabase("databaseToBeDropped"); // create a collection database.createCollection("cappedCollection", new CreateCollectionOptions().capped(true).sizeInBytes(0x100000)); for (String name : database.listCollectionNames()) { System.out.println(name); } // create an ascending index on the "i" field collection.createIndex(new Document("i", 1)); // list the indexes on the collection for (final Document index : collection.listIndexes()) { System.out.println(index); } // create a text index on the "content" field collection.createIndex(new Document("content", "text")); collection.insertOne(new Document("_id", 0).append("content", "textual content")); collection.insertOne(new Document("_id", 1).append("content", "additional content")); collection.insertOne(new Document("_id", 2).append("content", "irrelevant content")); // Find using the text index Document search = new Document("$search", "textual content -irrelevant"); Document textSearch = new Document("$text", search); long matchCount = collection.count(textSearch); System.out.println("Text search matches: " + matchCount); // Find using the $language operator textSearch = new Document("$text", search.append("$language", "english")); matchCount = collection.count(textSearch); System.out.println("Text search matches (english): " + matchCount); // Find the highest scoring match Document projection = new Document("score", new Document("$meta", "textScore")); myDoc = collection.find(textSearch).projection(projection).first(); System.out.println("Highest scoring document: " + myDoc); // release resources mongoClient.close(); }
From source file:tour.PojoQuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *//*www . ja v a 2 s. c om*/ public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = MongoClients.create(); } else { mongoClient = MongoClients.create(args[0]); } // create codec registry for POJOs CodecRegistry pojoCodecRegistry = fromRegistries(MongoClientSettings.getDefaultCodecRegistry(), fromProviders(PojoCodecProvider.builder().automatic(true).build())); // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb").withCodecRegistry(pojoCodecRegistry); // get a handle to the "people" collection MongoCollection<Person> collection = database.getCollection("people", Person.class); // drop all the data in it collection.drop(); // make a document and insert it Person ada = new Person("Ada Byron", 20, new Address("St James Square", "London", "W1")); System.out.println("Original Person Model: " + ada); collection.insertOne(ada); // Person will now have an ObjectId System.out.println("Mutated Person Model: " + ada); // get it (since it's the only one in there since we dropped the rest earlier on) Person somebody = collection.find().first(); System.out.println(somebody); // now, lets add some more people so we can explore queries and cursors List<Person> people = asList( new Person("Charles Babbage", 45, new Address("5 Devonshire Street", "London", "W11")), new Person("Alan Turing", 28, new Address("Bletchley Hall", "Bletchley Park", "MK12")), new Person("Timothy Berners-Lee", 61, new Address("Colehill", "Wimborne", null))); collection.insertMany(people); System.out.println("total # of people " + collection.countDocuments()); System.out.println(""); // lets get all the documents in the collection and print them out Block<Person> printBlock = new Block<Person>() { @Override public void apply(final Person person) { System.out.println(person); } }; collection.find().forEach(printBlock); System.out.println(""); // now use a query to get 1 document out somebody = collection.find(eq("address.city", "Wimborne")).first(); System.out.println(somebody); System.out.println(""); // now lets find every over 30 collection.find(gt("age", 30)).forEach(printBlock); System.out.println(""); // Update One collection.updateOne(eq("name", "Ada Byron"), combine(set("age", 23), set("name", "Ada Lovelace"))); System.out.println(""); // Update Many UpdateResult updateResult = collection.updateMany(not(eq("zip", null)), set("zip", null)); System.out.println(updateResult.getModifiedCount()); System.out.println(""); // Replace One updateResult = collection.replaceOne(eq("name", "Ada Lovelace"), ada); System.out.println(updateResult.getModifiedCount()); // Delete One collection.deleteOne(eq("address.city", "Wimborne")); // Delete Many DeleteResult deleteResult = collection.deleteMany(eq("address.city", "London")); System.out.println(deleteResult.getDeletedCount()); // Clean up database.drop(); // release resources mongoClient.close(); }