Example usage for com.mongodb.client MongoCollection deleteOne

List of usage examples for com.mongodb.client MongoCollection deleteOne

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection deleteOne.

Prototype

DeleteResult deleteOne(Bson filter);

Source Link

Document

Removes at most one document from the collection that matches the given filter.

Usage

From source file:database.BFIdataTable.java

public void delete(String attr, Object val) {
    MongoCollection<Document> booklist = db.getCollection(col_name);
    booklist.deleteOne(eq(attr, val));
}

From source file:documentation.ChangeStreamSamples.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*w  w w  .j a  va2  s  .co m*/
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
    } else {
        mongoClient = MongoClients.create(args[0]);
    }

    // Select the MongoDB database.
    MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
    database.drop();
    sleep();

    // Select the collection to query.
    MongoCollection<Document> collection = database.getCollection("documents");

    /*
     * Example 1
     * Create a simple change stream against an existing collection.
     */
    System.out.println("1. Initial document from the Change Stream:");

    // Create the change stream cursor.
    MongoChangeStreamCursor<ChangeStreamDocument<Document>> cursor = collection.watch().cursor();

    // Insert a test document into the collection.
    collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
    ChangeStreamDocument<Document> next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 2
     * Create a change stream with 'lookup' option enabled.
     * The test document will be returned with a full version of the updated document.
     */
    System.out.println("2. Document from the Change Stream, with lookup enabled:");

    // Create the change stream cursor.
    cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Update the test document.
    collection.updateOne(Document.parse("{username: 'alice123'}"),
            Document.parse("{$set : { email: 'alice@example.com'}}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 3
     * Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
     */
    System.out.println(
            "3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");

    // Insert some dummy data.
    collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));

    // Create $match pipeline stage.
    List<Bson> pipeline = singletonList(
            Aggregates.match(Filters.or(Document.parse("{'fullDocument.username': 'alice123'}"),
                    Filters.in("operationType", asList("update", "replace", "delete")))));

    // Create the change stream cursor with $match.
    cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Forward to the end of the change stream
    next = cursor.tryNext();

    // Update the test document.
    collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
    next = cursor.next();
    System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));

    // Replace the test document.
    collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
    next = cursor.next();
    System.out.println(format("Replace operationType: %s", next));

    // Delete the test document.
    collection.deleteOne(Filters.eq("username", "alice123"));
    next = cursor.next();
    System.out.println(format("Delete operationType: %s", next));
    cursor.close();
    sleep();

    /**
     * Example 4
     * Resume a change stream using a resume token.
     */
    System.out.println("4. Document from the Change Stream including a resume token:");

    // Get the resume token from the last document we saw in the previous change stream cursor.
    BsonDocument resumeToken = cursor.getResumeToken();
    System.out.println(resumeToken);

    // Pass the resume token to the resume after function to continue the change stream cursor.
    cursor = collection.watch().resumeAfter(resumeToken).cursor();

    // Insert a test document.
    collection.insertOne(Document.parse("{test: 'd'}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
}

From source file:DutyDatabase.DutyScheduleDB.java

License:Open Source License

/**
 * Removes a previously stored calendar from the database.
 * @param id ID of the user./*from  w  w w. j  a v  a  2 s.c o m*/
 * @param calendarName Name of the calendar to be removed.
 */
public void removeScheduledCalendar(String id, String calendarName) {
    //Access collection of scheduled calendars.
    MongoCollection collection = db.getCollection("ScheduledCalendars");
    //Remove specified calendar.
    collection.deleteOne(new Document("name", id + calendarName));
}

From source file:edu.uniandes.ecos.codeaholics.config.DataBaseUtil.java

/***
 * Adiciona el Documentos en la coleccion especificada.
 * /*  w  w w .  j  a  v  a2 s .c  o m*/
 * @param pRegister
 *            registro que se desea adicionar
 * @param pCollection
 *            colection de destino
 */
public static void delete(Document pRegister, String pCollection) throws MongoWriteException {

    log.debug("Deleting " + pRegister);
    log.debug("In Collection " + pCollection);
    MongoCollection<Document> collection = db.getCollection(pCollection);
    collection.deleteOne(pRegister);
    log.info("-----------------------------------");
    log.info("Successful Delete");
    log.info("-----------------------------------");
}

From source file:eu.vital.vitalcep.restApp.alert.Alerts.java

/**
 * Gets a filter./*  w w w  .j  a  v a  2  s.  co m*/
 *
 * @param filterId
 * @param req
 * @return the filter 
 */
@DELETE
@Path("deletealert")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteAlert(String filterId, @Context HttpServletRequest req) throws IOException {
    MongoClient mongo = null;
    MongoDatabase db = null;

    try {
        StringBuilder ck = new StringBuilder();
        Security slogin = new Security();

        Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
        if (!token) {
            return Response.status(Response.Status.UNAUTHORIZED).build();
        }
        this.cookie = ck.toString();

        JSONObject jo = new JSONObject(filterId);
        String idjo = jo.getString("id");

        mongo = new MongoClient(new MongoClientURI(mongoURL));
        db = mongo.getDatabase(mongoDB);

        try {
            db.getCollection("alerts");
        } catch (Exception e) {
            //System.out.println("Mongo is down");
            mongo.close();
            return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
        }

        MongoCollection<Document> coll = db.getCollection("alerts");

        Bson filter = Filters.eq("id", idjo);

        FindIterable<Document> iterable = coll.find(filter);

        String cepInstance;

        CEP cepProcess = new CEP();

        if (iterable != null && iterable.first() != null) {
            Document doc = iterable.first();
            cepInstance = doc.getString("cepinstance");

            MongoCollection<Document> collInstances = db.getCollection("cepinstances");

            ObjectId ci = new ObjectId(cepInstance);
            Bson filterInstances = Filters.eq("_id", ci);

            FindIterable<Document> iterable2 = collInstances.find(filterInstances);

            if (iterable2 != null) {
                Document doc2 = iterable2.first();
                cepProcess.PID = doc2.getInteger("PID");
                cepProcess.fileName = doc2.getString("fileName");
                cepProcess.cepFolder = doc2.getString("cepFolder");
                cepProcess.type = CEP.CEPType.ALERT.toString();
                CepProcess cp = new CepProcess(null, null, null, null);
                cp.PID = doc2.getInteger("PID");

                cepProcess.cp = cp;

                if (!cepProcess.cepDispose()) {
                    java.util.logging.Logger.getLogger(Alerts.class.getName()).log(Level.SEVERE,
                            "bcep Instance not terminated");
                } else {

                    Bson filter1 = Filters.eq("_id", ci);
                    Bson update = new Document("$set", new Document("status", "terminated"));
                    UpdateOptions options = new UpdateOptions().upsert(false);
                    UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter1, update,
                            options);

                }
                ;
                CepContainer.deleteCepProcess(cp.PID);

            }
        } else {
            return Response.status(Response.Status.NOT_FOUND).build();
        }

        DeleteResult deleteResult = coll.deleteOne(eq("id", idjo));

        if (deleteResult.getDeletedCount() < 1) {
            return Response.status(Response.Status.NOT_FOUND).build();
        } else {
            return Response.status(Response.Status.OK).build();
        }
    } catch (Exception e) {
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
    } finally {
        db = null;
        if (mongo != null) {
            mongo.close();
            mongo = null;
        }
    }
}

From source file:eu.vital.vitalcep.restApp.cepRESTApi.CEPICO.java

/**
 * Gets a filter./*from  w  w w . ja va  2  s  .c  om*/
 *
 * @param filterId
 * @param req
 * @return the filter 
 */
@DELETE
@Path("deletecepico")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteCEPICO(String filterId, @Context HttpServletRequest req) throws IOException {

    StringBuilder ck = new StringBuilder();
    Security slogin = new Security();

    Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
    if (!token) {
        return Response.status(Response.Status.UNAUTHORIZED).build();
    }
    this.cookie = ck.toString();

    JSONObject jo = new JSONObject(filterId);
    String idjo = jo.getString("id");

    MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL));
    MongoDatabase db = mongo.getDatabase(mongoDB);

    try {
        db.getCollection("cepicos");
    } catch (Exception e) {
        //System.out.println("Mongo is down");
        mongo.close();
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
    }

    MongoCollection<Document> coll = db.getCollection("cepicos");

    Bson filter = Filters.eq("id", idjo);

    FindIterable<Document> iterable = coll.find(filter);

    String cepInstance;

    CEP cepProcess = new CEP();

    if (iterable != null && iterable.first() != null) {
        Document doc = iterable.first();
        cepInstance = doc.getString("cepinstance");

        MongoCollection<Document> collInstances = db.getCollection("cepinstances");

        ObjectId ci = new ObjectId(cepInstance);
        Bson filterInstances = Filters.eq("_id", ci);

        FindIterable<Document> iterable2 = collInstances.find(filterInstances);

        if (iterable2 != null) {
            Document doc2 = iterable2.first();
            cepProcess.PID = doc2.getInteger("PID");
            cepProcess.fileName = doc2.getString("fileName");
            cepProcess.cepFolder = doc2.getString("cepFolder");
            cepProcess.type = CEP.CEPType.CEPICO.toString();
            CepProcess cp = new CepProcess(null, null, null, null);
            cp.PID = doc2.getInteger("PID");

            cepProcess.cp = cp;

            if (!cepProcess.cepDispose()) {
                java.util.logging.Logger.getLogger(CEPICO.class.getName()).log(Level.SEVERE,
                        "bcep Instance not terminated");
            } else {

                Bson filter1 = Filters.eq("_id", ci);
                Bson update = new Document("$set", new Document("status", "terminated"));
                UpdateOptions options = new UpdateOptions().upsert(false);
                UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter1, update, options);

            }
            ;

            CepContainer.deleteCepProcess(cp.PID);

        }
    } else {
        return Response.status(Response.Status.NOT_FOUND).build();
    }

    DeleteResult deleteResult = coll.deleteOne(eq("id", idjo));

    if (deleteResult.getDeletedCount() < 1) {
        return Response.status(Response.Status.NOT_FOUND).build();
    } else {
        return Response.status(Response.Status.OK).build();
    }
}

From source file:eu.vital.vitalcep.restApp.filteringApi.ContinuosFiltering.java

/**
 * Gets a filter.//from   w ww .  j  a v  a2s.  c om
 *
 * @param filterId
 * @param req
 * @return the filter 
 */
@DELETE
@Path("deletecontinuousfilter")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deletecontinuousfilter(String filterId, @Context HttpServletRequest req) throws IOException {

    StringBuilder ck = new StringBuilder();
    Security slogin = new Security();

    Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
    if (!token) {
        return Response.status(Response.Status.UNAUTHORIZED).build();
    }
    this.cookie = ck.toString();

    JSONObject jo = new JSONObject(filterId);
    String idjo = jo.getString("id");

    MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL));
    MongoDatabase db = mongo.getDatabase(mongoDB);

    try {
        db.getCollection("continuousfilters");
    } catch (Exception e) {
        //System.out.println("Mongo is down");
        db = null;
        if (mongo != null) {
            mongo.close();
            mongo = null;
        }
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();

    }

    MongoCollection<Document> coll = db.getCollection("continuousfilters");

    Bson filter = Filters.eq("id", idjo);

    FindIterable<Document> iterable = coll.find(filter);

    String cepInstance;

    CEP cepProcess = new CEP();

    if (iterable != null && iterable.first() != null) {
        Document doc = iterable.first();
        cepInstance = doc.getString("cepinstance");

        MongoCollection<Document> collInstances = db.getCollection("cepinstances");

        ObjectId ci = new ObjectId(cepInstance);
        Bson filterInstances = Filters.eq("_id", ci);

        FindIterable<Document> iterable2 = collInstances.find(filterInstances);

        if (iterable2 != null) {
            Document doc2 = iterable2.first();
            cepProcess.PID = doc2.getInteger("PID");
            cepProcess.fileName = doc2.getString("fileName");
            cepProcess.cepFolder = doc2.getString("cepFolder");
            cepProcess.type = CEP.CEPType.CONTINUOUS.toString();
            CepProcess cp = new CepProcess(null, null, null, null);
            cp.PID = doc2.getInteger("PID");

            cepProcess.cp = cp;

            if (!cepProcess.cepDispose()) {
                java.util.logging.Logger.getLogger(ContinuosFiltering.class.getName()).log(Level.SEVERE,
                        "bcep Instance not terminated");
            } else {

                Bson filter1 = Filters.eq("_id", ci);
                Bson update = new Document("$set", new Document("status", "terminated"));
                UpdateOptions options = new UpdateOptions().upsert(false);
                UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter1, update, options);

            }
            ;
            CepContainer.deleteCepProcess(cp.PID);

        }
    }

    DeleteResult deleteResult = coll.deleteOne(eq("id", idjo));
    db = null;
    if (mongo != null) {
        mongo.close();
        mongo = null;
    }
    if (deleteResult.getDeletedCount() < 1) {
        return Response.status(Response.Status.NOT_FOUND).build();
    } else {

        return Response.status(Response.Status.OK).build();
    }
}

From source file:examples.tour.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from   w  w w .j  a va 2 s. c o  m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {

        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Aggregation
    collection
            .aggregate(
                    asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}"))))
            .forEach(printBlock);

    myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), set("i", 110));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    //collection.find().forEach(printBlock);

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}

From source file:it.terrinoni.Controller.java

public static void main(String[] args) {
    MongoClient client = new MongoClient();
    MongoDatabase database = client.getDatabase("photo-sharing");

    MongoCollection<Document> albums = database.getCollection("albums");
    MongoCollection<Document> images = database.getCollection("images");

    albums.createIndex(new Document("images", 1));

    // Get the iterator of the whole collection
    MongoCursor<Document> cursor = images.find().iterator();

    try {//from   www  .  ja  v  a2s . co  m
        while (cursor.hasNext()) {
            Document currImg = cursor.next();
            Document foundImg = albums.find(eq("images", currImg.getDouble("_id"))).first();
            if (foundImg == null) {
                //System.out.println(currImg.getDouble("_id") + " deleted.");
                images.deleteOne(currImg);
            }
            //System.out.println(currImg.getDouble("_id") + " is ok.");
        }
    } finally {
        cursor.close();
    }

    long numImgs = images.count(eq("tags", "sunrises"));
    System.out.println("The total number of images with the tag \"sunrises\" after the removal of orphans is: "
            + String.valueOf(numImgs));
}

From source file:it.terrinoni.hw2.Homework.java

public static void main(String[] args) {
    MongoClient client = new MongoClient();
    MongoDatabase database = client.getDatabase("students");
    MongoCollection<Document> collection = database.getCollection("grades");

    Bson filter = eq("type", "homework");
    Bson sort = ascending(asList("student_id", "score"));

    MongoCursor<Document> cursor = collection.find(filter).sort(sort).iterator();

    double last_student_id = -1;

    try {/*from w  w  w  . j  av  a  2  s. com*/
        while (cursor.hasNext()) {
            Document doc = cursor.next();
            if (doc.getDouble("student_id") != last_student_id) {
                last_student_id = doc.getDouble("student_id");
                collection.deleteOne(doc);
                System.out.println("Document for " + last_student_id + " with score "
                        + String.valueOf(doc.getDouble("score")) + "  eliminated");
            }
            Helpers.printJson(doc);
        }
    } finally {
        cursor.close();
    }

}