Example usage for com.mongodb.client MongoCursor close

List of usage examples for com.mongodb.client MongoCursor close

Introduction

In this page you can find the example usage for com.mongodb.client MongoCursor close.

Prototype

@Override
    void close();

Source Link

Usage

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 */// www .j  a v  a 2s.  c o m
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args//  w  ww .  j a va2 s  .c  o  m
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MyMongoDemo.java

License:Apache License

public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("NGDBDemo");
    MongoCollection<Document> collection = database.getCollection("test");

    collection.drop();//from w w w .  j  a va2  s  .  c o m

    Document people = new Document(); // A document for a person
    people.put("Name", "Guy");
    people.put("Email", "guy@gmail.com");

    BasicDBList friendList = new BasicDBList(); // A list for the persons
    // friends

    BasicDBObject friendDoc = new BasicDBObject(); // A document for each
    // friend

    friendDoc.put("Name", "Jo");
    friendDoc.put("Email", "Jo@gmail.com");
    friendList.add(friendDoc);
    friendDoc.clear();
    friendDoc.put("Name", "John");
    friendDoc.put("Email", "john@gmail.com");
    friendList.add(friendDoc);
    people.put("Friends", friendDoc);

    collection.insertOne(people);

    System.out.println('1');
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }
    System.out.println('2');

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    System.out.println('3');
    // now use a query to get 1 document out
    Document myDoc = collection.find(eq("Name", "Guy")).first();
    System.out.println(myDoc.toJson());

    database = mongoClient.getDatabase("sakila");
    collection = database.getCollection("films");
    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }
    mongoClient.close();
}

From source file:org.apache.nifi.mongodb.MongoDBControllerService.java

License:Apache License

public Document findOne(Document query) {
    MongoCursor<Document> cursor = this.col.find(query).limit(1).iterator();
    Document retVal = cursor.next();
    cursor.close();

    return retVal;
}

From source file:org.apache.nifi.mongodb.MongoDBLookupService.java

License:Apache License

private Document findOne(Document query, Document projection) {
    MongoCollection col = controllerService.getDatabase(databaseName).getCollection(collection);
    MongoCursor<Document> it = (projection != null ? col.find(query).projection(projection) : col.find(query))
            .iterator();/*from  w  ww  .  ja v a2s  .c o m*/
    Document retVal = it.hasNext() ? it.next() : null;
    it.close();

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.GetMongo.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();

    final Document query = context.getProperty(QUERY).isSet()
            ? Document.parse(context.getProperty(QUERY).evaluateAttributeExpressions().getValue())
            : null;//from   ww w .ja v  a  2s .co m
    final Document projection = context.getProperty(PROJECTION).isSet()
            ? Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions().getValue())
            : null;
    final Document sort = context.getProperty(SORT).isSet()
            ? Document.parse(context.getProperty(SORT).evaluateAttributeExpressions().getValue())
            : null;
    final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
    configureMapper(jsonTypeSetting);

    final MongoCollection<Document> collection = getCollection(context);

    try {
        final FindIterable<Document> it = query != null ? collection.find(query) : collection.find();
        if (projection != null) {
            it.projection(projection);
        }
        if (sort != null) {
            it.sort(sort);
        }
        if (context.getProperty(LIMIT).isSet()) {
            it.limit(context.getProperty(LIMIT).evaluateAttributeExpressions().asInteger());
        }
        if (context.getProperty(BATCH_SIZE).isSet()) {
            it.batchSize(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
        }

        final MongoCursor<Document> cursor = it.iterator();
        ComponentLog log = getLogger();
        try {
            FlowFile flowFile = null;
            if (context.getProperty(RESULTS_PER_FLOWFILE).isSet()) {
                int ceiling = context.getProperty(RESULTS_PER_FLOWFILE).evaluateAttributeExpressions()
                        .asInteger();
                List<Document> batch = new ArrayList<>();

                while (cursor.hasNext()) {
                    batch.add(cursor.next());
                    if (batch.size() == ceiling) {
                        try {
                            if (log.isDebugEnabled()) {
                                log.debug("Writing batch...");
                            }
                            String payload = buildBatch(batch, jsonTypeSetting);
                            writeBatch(payload, context, session);
                            batch = new ArrayList<>();
                        } catch (IOException ex) {
                            getLogger().error("Error building batch", ex);
                        }
                    }
                }
                if (batch.size() > 0) {
                    try {
                        writeBatch(buildBatch(batch, jsonTypeSetting), context, session);
                    } catch (IOException ex) {
                        getLogger().error("Error sending remainder of batch", ex);
                    }
                }
            } else {
                while (cursor.hasNext()) {
                    flowFile = session.create();
                    flowFile = session.write(flowFile, new OutputStreamCallback() {
                        @Override
                        public void process(OutputStream out) throws IOException {
                            String json;
                            if (jsonTypeSetting.equals(JSON_TYPE_STANDARD)) {
                                json = mapper.writerWithDefaultPrettyPrinter()
                                        .writeValueAsString(cursor.next());
                            } else {
                                json = cursor.next().toJson();
                            }
                            IOUtils.write(json, out);
                        }
                    });
                    flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(),
                            "application/json");

                    session.getProvenanceReporter().receive(flowFile, getURI(context));
                    session.transfer(flowFile, REL_SUCCESS);
                }
            }

            session.commit();

        } finally {
            cursor.close();
        }

    } catch (final RuntimeException e) {
        context.yield();
        session.rollback();
        logger.error("Failed to execute query {} due to {}", new Object[] { query, e }, e);
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.DeleteGridFS.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = session.get();/*  w  ww  . j  a  va 2 s  . co m*/
    if (input == null) {
        return;
    }

    final String deleteQuery = getQuery(context, input);
    final String queryAttribute = context.getProperty(QUERY_ATTRIBUTE).isSet()
            ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue()
            : null;
    GridFSBucket bucket = getBucket(input, context);

    try {
        Document query = Document.parse(deleteQuery);
        MongoCursor cursor = bucket.find(query).iterator();
        if (cursor.hasNext()) {
            GridFSFile file = (GridFSFile) cursor.next();
            bucket.delete(file.getObjectId());

            if (!StringUtils.isEmpty(queryAttribute)) {
                input = session.putAttribute(input, queryAttribute, deleteQuery);
            }

            session.transfer(input, REL_SUCCESS);
        } else {
            getLogger().error(String.format("Query %s did not delete anything in %s", deleteQuery,
                    bucket.getBucketName()));
            session.transfer(input, REL_FAILURE);
        }

        cursor.close();
    } catch (Exception ex) {
        getLogger().error(String.format("Error deleting using query: %s", deleteQuery), ex);
        session.transfer(input, REL_FAILURE);
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.GridFSITTestBase.java

License:Apache License

public boolean fileExists(String name, String bucketName) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = it.hasNext();
    it.close();

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.gridfs.GridFSITTestBase.java

License:Apache License

public boolean fileHasProperties(String name, String bucketName, Map<String, String> attrs) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = false;

    if (it.hasNext()) {
        GridFSFile file = (GridFSFile) it.next();
        Document metadata = file.getMetadata();
        if (metadata != null && metadata.size() == attrs.size()) {
            retVal = true;/*www. j av a 2s  .co m*/
            for (Map.Entry<String, Object> entry : metadata.entrySet()) {
                Object val = attrs.get(entry.getKey());
                if (val == null || !entry.getValue().equals(val)) {
                    retVal = false;
                    break;
                }
            }
        }
    }

    it.close();

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.RunMongoAggregation.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = null;//from w  w  w .  jav  a 2 s  .  com
    if (context.hasIncomingConnection()) {
        flowFile = session.get();

        if (flowFile == null && context.hasNonLoopConnection()) {
            return;
        }
    }

    final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
    final Boolean allowDiskUse = context.getProperty(ALLOW_DISK_USE).asBoolean();
    final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(flowFile)
            .getValue();
    final Integer batchSize = context.getProperty(BATCH_SIZE).asInteger();
    final Integer resultsPerFlowfile = context.getProperty(RESULTS_PER_FLOWFILE).asInteger();
    final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
    final String dateFormat = context.getProperty(DATE_FORMAT).evaluateAttributeExpressions(flowFile)
            .getValue();

    configureMapper(jsonTypeSetting, dateFormat);

    Map<String, String> attrs = new HashMap<>();
    if (queryAttr != null && queryAttr.trim().length() > 0) {
        attrs.put(queryAttr, query);
    }

    MongoCursor<Document> iter = null;

    try {
        MongoCollection<Document> collection = getCollection(context, flowFile);
        List<Bson> aggQuery = buildAggregationQuery(query);
        AggregateIterable<Document> it = collection.aggregate(aggQuery).allowDiskUse(allowDiskUse);
        ;
        it.batchSize(batchSize != null ? batchSize : 1);

        iter = it.iterator();
        List<Document> batch = new ArrayList<>();

        while (iter.hasNext()) {
            batch.add(iter.next());
            if (batch.size() == resultsPerFlowfile) {
                writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
                batch = new ArrayList<>();
            }
        }

        if (batch.size() > 0) {
            writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
        }

        if (flowFile != null) {
            session.transfer(flowFile, REL_ORIGINAL);
        }
    } catch (Exception e) {
        getLogger().error("Error running MongoDB aggregation query.", e);
        if (flowFile != null) {
            session.transfer(flowFile, REL_FAILURE);
        }
    } finally {
        if (iter != null) {
            iter.close();
        }
    }
}