Example usage for com.mongodb.client MongoCollection count

List of usage examples for com.mongodb.client MongoCollection count

Introduction

In this page you can find the example usage for com.mongodb.client MongoCollection count.

Prototype

@Deprecated
long count(ClientSession clientSession);

Source Link

Document

Counts the number of documents in the collection.

Usage

From source file:info.bunji.mongodb.synces.CollectionExtractor.java

License:Apache License

@Override
protected void execute() throws Exception {

    Set<String> includeFields = config.getIncludeFields();
    Set<String> excludeFields = config.getExcludeFields();
    String index = config.getDestDbName();
    String syncName = config.getSyncName();

    // ???/*from   w w  w.  j  a  va  2 s.  co  m*/
    try (MongoClient client = MongoClientService.getClient(config)) {
        /*
                 if (timestamp == null) {
                    logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName());
                
                    // ??oplog???????
                    //BsonTimestamp lastOpTs = config.getLastOpTime();
                            
                    //logger.debug("[{}] current oplog timestamp = [{}]", syncName, lastOpTs.toString());
                
                    // ??????
                    MongoDatabase db = client.getDatabase(config.getMongoDbName());
                
                    // ????
                    Object lastId = null;
                    for (String collection : getTargetColectionList(db)) {
                       logger.info("[{}] start initial import. [{}]", syncName, collection);
                
                       MongoCollection<Document> conn = db.getCollection(collection);
                       BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId);
                
                       long count = conn.count(filter);
                
                       // get document from collection
                       long processed = 0;
                       FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1));
                       for (Document doc : results) {
          Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields);
          append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null));
          if ((++processed % LOGGING_INTERVAL) == 0) {
             logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count);
          }
                       }
                       logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed);
                    }
                 }
        */

        //=====================
        logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName());

        // ??????
        MongoDatabase db = client.getDatabase(config.getMongoDbName());

        // ????
        Object lastId = null;
        for (String collection : getTargetColectionList(db)) {
            logger.info("[{}] start initial import. [{}]", syncName, collection);

            MongoCollection<Document> conn = db.getCollection(collection);
            BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId);

            long count = conn.count(filter);

            long processed = 0;
            FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1));
            for (Document doc : results) {
                Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields);
                append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null));
                if ((++processed % LOGGING_INTERVAL) == 0) {
                    logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed,
                            count);
                }
            }
            logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed);
        }
        //=====================
        logger.info("[{}] finish import collection(s).", syncName);

        //         append(DocumentUtils.makeStatusOperation(Status.RUNNING, config, timestamp));
        config.setStatus(Status.RUNNING);
        config.setLastOpTime(timestamp);
        //         config.setLastSyncTime(timestamp);
        //         append(DocumentUtils.makeStatusOperation(config));
        append(SyncOperation.fromConfig(config));

    } catch (Throwable t) {
        config.setStatus(Status.INITIAL_IMPORT_FAILED);
        logger.error("[{}] initial import failed.({})", syncName, t.getMessage(), t);
        throw t;
    }
}

From source file:io.lumeer.storage.mongodb.MongoDbStorage.java

License:Open Source License

@Override
public long count(final String collectionName, final DataFilter filter) {
    MongoCollection<Document> collection = database.getCollection(collectionName);

    return filter != null ? collection.count(filter.<Bson>get()) : collection.count();
}

From source file:it.terrinoni.Controller.java

public static void main(String[] args) {
    MongoClient client = new MongoClient();
    MongoDatabase database = client.getDatabase("photo-sharing");

    MongoCollection<Document> albums = database.getCollection("albums");
    MongoCollection<Document> images = database.getCollection("images");

    albums.createIndex(new Document("images", 1));

    // Get the iterator of the whole collection
    MongoCursor<Document> cursor = images.find().iterator();

    try {//ww  w .  ja  v  a  2  s  .c o  m
        while (cursor.hasNext()) {
            Document currImg = cursor.next();
            Document foundImg = albums.find(eq("images", currImg.getDouble("_id"))).first();
            if (foundImg == null) {
                //System.out.println(currImg.getDouble("_id") + " deleted.");
                images.deleteOne(currImg);
            }
            //System.out.println(currImg.getDouble("_id") + " is ok.");
        }
    } finally {
        cursor.close();
    }

    long numImgs = images.count(eq("tags", "sunrises"));
    System.out.println("The total number of images with the tag \"sunrises\" after the removal of orphans is: "
            + String.valueOf(numImgs));
}

From source file:module.test.CustomExport.java

License:Open Source License

public CustomExport() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Find exp_group in the database =====

    // === Query 1 ===
    /*/*from  w  ww. j a v  a  2 s .c  o  m*/
    String queryName = "breast_cancer_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 3)); // tumoral
     */

    // === Query 2 ===
    /*
    String queryName = "breast_normal_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 1)); // normal
    */

    // === Query 3 ===
    String queryName = "breast_cancer_with_survival_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 3)); // tumoral
    filters.add(Filters.or(Filters.ne("exp_group.os_months", null), Filters.ne("exp_group.dfss_months", null),
            Filters.ne("exp_group.relapsed", null), Filters.ne("exp_group.dead", null)));

    Bson filter = Filters.and(filters);
    Long nbSamples = collection.count(filter);
    List<String> listSeries = collection.distinct("exp_group.main_gse_number", filter, String.class)
            .into(new ArrayList<String>());
    queryName = queryName + "_" + nbSamples + "_samples_" + listSeries.size() + "_series";

    List<Document> docExpGroup = collection.find(filter)
            .projection(Projections.fields(Projections.include("exp_group"), Projections.excludeId()))
            .into(new ArrayList<Document>());

    List<Document> docParam = collection.find(filter)
            .projection(Projections.fields(Projections.include("parameters"), Projections.excludeId()))
            .into(new ArrayList<Document>());

    mongoClient.close();

    // ===== Load Exp Group into a matrix =====

    List<String> headerExpGroup = new ArrayList<String>();
    List<Object> dataExpGroup = new ArrayList<Object>();

    for (int i = 0; i < docExpGroup.size(); i++) {
        Map<String, String> expGroup = (Map<String, String>) docExpGroup.get(i).get("exp_group");
        if (i == 0) {
            headerExpGroup.addAll(expGroup.keySet());
        }

        Object[] dataLine = new Object[headerExpGroup.size()];
        for (int j = 0; j < headerExpGroup.size(); j++) {
            dataLine[j] = expGroup.get(headerExpGroup.get(j));
        }
        dataExpGroup.add(dataLine);
    }

    // ===== Load Params into a matrix =====

    Set<String> headerParamSet = new HashSet<String>();
    List<String> headerParam = new ArrayList<String>();
    List<Object> dataParam = new ArrayList<Object>();

    for (int i = 0; i < docParam.size(); i++) {
        Map<String, String> param = (Map<String, String>) docParam.get(i).get("parameters");
        headerParamSet.addAll(param.keySet());
    }
    headerParam.addAll(headerParamSet);
    Collections.sort(headerParam);

    for (int i = 0; i < docParam.size(); i++) {
        Map<String, String> param = (Map<String, String>) docParam.get(i).get("parameters");
        Object[] dataLine = new Object[headerParam.size()];
        for (int j = 0; j < headerParam.size(); j++) {
            dataLine[j] = param.get(headerParam.get(j));
        }
        // System.out.println(Arrays.toString(dataLine));
        dataParam.add(dataLine);

    }

    // === Output ===

    String fileName = this.getOutputDirectory() + this.getDirSeparator() + "EpiMed_database_" + queryName + "_"
            + dateFormat.format(new Date()) + ".xlsx";
    System.out.println(fileName);
    XSSFWorkbook workbook = fileService.createWorkbook();
    fileService.addSheet(workbook, "exp_group_" + dateFormat.format(new Date()), headerExpGroup, dataExpGroup);
    fileService.addSheet(workbook, "parameters_" + dateFormat.format(new Date()), headerParam, dataParam);
    fileService.writeWorkbook(workbook, fileName);

}

From source file:module.UpdateNumberSamples.java

License:Open Source License

public UpdateNumberSamples() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("sample");

    // String [] listIdSeries = {"TISSUE_SPECIFIC_GENES_MM"};
    // List<Document> listSeries = collectionSeries.find(Filters.in("_id", listIdSeries)).into(new ArrayList<Document>());

    List<Document> listSeries = collectionSeries.find().into(new ArrayList<Document>());

    for (Document ser : listSeries) {
        System.out.println(ser);// w ww  . java 2 s .  c  o m

        String idSeries = ser.getString("_id");
        Bson filter = Filters.in("series", idSeries);

        Long nbSamples = collectionSamples.count(filter);

        System.out.println(idSeries + " " + nbSamples);

        ser.append("nb_samples", nbSamples);
        collectionSeries.updateOne(Filters.eq("_id", idSeries), new Document("$set", ser));

    }

    mongoClient.close();

}

From source file:mongodb.QuickTourAdmin.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args//from  ww w  . j  a  v  a2s  . c  o m
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the specified database server
        mongoClient = new MongoClient("10.9.17.105", 27017);
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "test" database
    MongoDatabase database = mongoClient.getDatabase("test");

    database.drop();

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // getting a list of databases
    for (String name : mongoClient.listDatabaseNames()) {
        System.out.println(name);
    }

    // drop a database
    mongoClient.getDatabase("databaseToBeDropped").drop();

    // create a collection
    database.createCollection("cappedCollection",
            new CreateCollectionOptions().capped(true).sizeInBytes(0x100000));

    for (String name : database.listCollectionNames()) {
        System.out.println(name);
    }

    // drop a collection:
    collection.drop();

    // create an ascending index on the "i" field
    // 1 ascending or -1 for descending
    collection.createIndex(new Document("i", 1));

    // list the indexes on the collection
    for (final Document index : collection.listIndexes()) {
        System.out.println(index.toJson());
    }

    // create a text index on the "content" field
    // text indexes to support text search of string content
    collection.createIndex(new Document("content", "text"));

    collection.insertOne(new Document("_id", 0).append("content", "textual content"));
    collection.insertOne(new Document("_id", 1).append("content", "additional content"));
    collection.insertOne(new Document("_id", 2).append("content", "irrelevant content"));

    // Find using the text index
    long matchCount = collection.count(text("textual content -irrelevant"));
    System.out.println("Text search matches: " + matchCount);

    // Find using the $language operator
    Bson textSearch = text("textual content -irrelevant", "english");
    matchCount = collection.count(textSearch);
    System.out.println("Text search matches (english): " + matchCount);

    // Find the highest scoring match
    Document projection = new Document("score", new Document("$meta", "textScore"));
    Document myDoc = collection.find(textSearch).projection(projection).first();
    System.out.println("Highest scoring document: " + myDoc.toJson());

    // Run a command
    Document buildInfo = database.runCommand(new Document("buildInfo", 1));
    System.out.println(buildInfo);

    // release resources
    database.drop();
    mongoClient.close();
}

From source file:net.netzgut.integral.mongo.internal.services.MongoODMImplementation.java

License:Apache License

@Override
public <T extends Serializable> long count(Bson filter, Class<T> entityClass) {
    MongoCollection<Document> collection = this.mongo.getCollection(entityClass);
    return collection.count(filter);
}

From source file:org.apache.metamodel.mongodb.mongo3.MongoDbDataContext.java

License:Apache License

@Override
protected Number executeCountQuery(Table table, List<FilterItem> whereItems,
        boolean functionApproximationAllowed) {
    final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());

    final Document query = createMongoDbQuery(table, whereItems);

    logger.info("Executing MongoDB 'count' query: {}", query);
    final long count = collection.count(query);

    return count;
}

From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java

License:Apache License

/**
 * Returns the document of the given document identifier.
 *
 * @param databaseName the database//from   w ww  . j  ava  2s .c  o m
 * @param collectionName the collection
 * @param documentId the document identifier to query for
 * @return the document of the given identifier
 * @throws DatasourceException
 * @throws NotFoundException
 */
@Override
public String getById(String databaseName, String collectionName, String documentId)
        throws DatasourceException, NotFoundException {
    try {
        if (!databaseExists(databaseName)) {
            throw new NotFoundException("The database doesn't exist in the datasource");
        }
        if (!collectionExists(databaseName, collectionName)) {
            throw new NotFoundException("The collection doesn't exist in the datasource");
        }
        MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName);
        MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName);
        Document query = new Document("_id", new ObjectId(documentId));
        if (collection.count(query) == 0) {
            throw new NotFoundException("The document doesn't exist in the datasource");
        }
        Document document = collection.find(query).first();
        document.remove("_id");
        return JSON.serialize(document);
    } catch (MongoException ex) {
        logger.error("An error occured while retrieving the document", ex);
        throw new DatasourceException("An error occured while retrieving the document");
    }
}

From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java

License:Apache License

/**
 * Updates a document in the collection. If the document exists in the
 * collection it will be updated. If the document doesn't exist an error
 * will be thrown. If the specified database and collection do not exist
 * they will be created./*from   w w w .j  a v  a  2  s .  com*/
 *
 * @param databaseName the database
 * @param collectionName the collection
 * @param documentId the document identifier
 * @param content the JSON payload
 * @return a status message with the outcome of the operation
 * @throws DatasourceException
 * @throws DeserializeException
 * @throws IllegalArgumentException
 * @throws NotFoundException
 */
@Override
public boolean replaceById(String databaseName, String collectionName, String documentId, String content)
        throws DatasourceException, DeserializeException, IllegalArgumentException, NotFoundException {
    try {
        if (!validInputForAddOrUpdate(databaseName, collectionName, documentId, content)) {
            throw new IllegalArgumentException();
        }
        MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName);
        MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName);
        Document query = new Document("_id", new ObjectId(documentId));
        Document document = Document.parse(content);
        if (collection.count(query) == 0) {
            throw new NotFoundException("The document doesn't exist in the collection");
        }
        collection.replaceOne(query, document);
        return true;
    } catch (IllegalArgumentException | ClassCastException | JSONParseException ex) {
        logger.error("The JSON payload is invalid", ex);
        throw new DeserializeException("The JSON payload is invalid");
    } catch (MongoException ex) {
        logger.error("An error occured while updating the document", ex);
        throw new DatasourceException("An error occured while updating the document");
    }
}